1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
33 #include "tree-object-size.h"
36 #include "internal-fn.h"
40 #include "insn-config.h"
47 #include "insn-codes.h"
52 #include "typeclass.h"
55 #include "langhooks.h"
56 #include "tree-ssanames.h"
58 #include "value-prof.h"
59 #include "diagnostic-core.h"
64 #include "tree-chkp.h"
66 #include "gomp-constants.h"
69 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
71 struct target_builtins default_target_builtins
;
73 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names
[BUILT_IN_LAST
]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names
[(int) END_BUILTINS
] =
83 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info
[(int)END_BUILTINS
];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p
;
94 static rtx
c_readstr (const char *, machine_mode
);
95 static int target_char_cast (tree
, char *);
96 static rtx
get_memory_rtx (tree
, tree
);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx
result_vector (int, rtx
);
100 static void expand_builtin_prefetch (tree
);
101 static rtx
expand_builtin_apply_args (void);
102 static rtx
expand_builtin_apply_args_1 (void);
103 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
104 static void expand_builtin_return (rtx
);
105 static enum type_class
type_to_class (tree
);
106 static rtx
expand_builtin_classify_type (tree
);
107 static void expand_errno_check (tree
, rtx
);
108 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
113 static rtx
expand_builtin_sincos (tree
);
114 static rtx
expand_builtin_cexpi (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
117 static rtx
expand_builtin_next_arg (void);
118 static rtx
expand_builtin_va_start (tree
);
119 static rtx
expand_builtin_va_end (tree
);
120 static rtx
expand_builtin_va_copy (tree
);
121 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
122 static rtx
expand_builtin_strcmp (tree
, rtx
);
123 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
124 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
125 static rtx
expand_builtin_memcpy (tree
, rtx
);
126 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
127 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
128 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
129 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
130 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
131 machine_mode
, int, tree
);
132 static rtx
expand_builtin_strcpy (tree
, rtx
);
133 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
134 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_strncpy (tree
, rtx
);
136 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
137 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
139 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
140 static rtx
expand_builtin_bzero (tree
);
141 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_alloca (tree
, bool);
143 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
144 static rtx
expand_builtin_frame_address (tree
, tree
);
145 static tree
stabilize_va_list_loc (location_t
, tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_classify_type (tree
);
149 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
150 static tree
fold_builtin_inf (location_t
, tree
, int);
151 static tree
fold_builtin_nan (tree
, tree
, int);
152 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
153 static bool validate_arg (const_tree
, enum tree_code code
);
154 static bool integer_valued_real_p (tree
);
155 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
156 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
157 static rtx
expand_builtin_signbit (tree
, rtx
);
158 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
159 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
160 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
161 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
164 static tree
fold_builtin_tan (tree
, tree
);
165 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
166 static tree
fold_builtin_floor (location_t
, tree
, tree
);
167 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
168 static tree
fold_builtin_round (location_t
, tree
, tree
);
169 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
170 static tree
fold_builtin_bitop (tree
, tree
);
171 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
173 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
174 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
175 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
177 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
178 static tree
fold_builtin_isascii (location_t
, tree
);
179 static tree
fold_builtin_toascii (location_t
, tree
);
180 static tree
fold_builtin_isdigit (location_t
, tree
);
181 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
182 static tree
fold_builtin_abs (location_t
, tree
, tree
);
183 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
185 static tree
fold_builtin_0 (location_t
, tree
);
186 static tree
fold_builtin_1 (location_t
, tree
, tree
);
187 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
188 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
189 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
191 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
195 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
197 static rtx
expand_builtin_object_size (tree
);
198 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
199 enum built_in_function
);
200 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
201 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
202 static void maybe_emit_free_warning (tree
);
203 static tree
fold_builtin_object_size (tree
, tree
);
205 unsigned HOST_WIDE_INT target_newline
;
206 unsigned HOST_WIDE_INT target_percent
;
207 static unsigned HOST_WIDE_INT target_c
;
208 static unsigned HOST_WIDE_INT target_s
;
209 char target_percent_c
[3];
210 char target_percent_s
[3];
211 char target_percent_s_newline
[4];
212 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
213 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
214 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
215 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
216 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_sincos (tree
, tree
, tree
);
219 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
220 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
221 const REAL_VALUE_TYPE
*, bool);
222 static tree
do_mpfr_remquo (tree
, tree
, tree
);
223 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
224 static void expand_builtin_sync_synchronize (void);
226 /* Return true if NAME starts with __builtin_ or __sync_. */
229 is_builtin_name (const char *name
)
231 if (strncmp (name
, "__builtin_", 10) == 0)
233 if (strncmp (name
, "__sync_", 7) == 0)
235 if (strncmp (name
, "__atomic_", 9) == 0)
238 && (!strcmp (name
, "__cilkrts_detach")
239 || !strcmp (name
, "__cilkrts_pop_frame")))
245 /* Return true if DECL is a function symbol representing a built-in. */
248 is_builtin_fn (tree decl
)
250 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
258 called_as_built_in (tree node
)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
263 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
264 return is_builtin_name (name
);
267 /* Compute values M and N such that M divides (address of EXP - N) and such
268 that N < M. If these numbers can be determined, store M in alignp and N in
269 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
270 *alignp and any bit-offset to *bitposp.
272 Note that the address (and thus the alignment) computed here is based
273 on the address to which a symbol resolves, whereas DECL_ALIGN is based
274 on the address at which an object is actually located. These two
275 addresses are not always the same. For example, on ARM targets,
276 the address &foo of a Thumb function foo() has the lowest bit set,
277 whereas foo() itself starts on an even address.
279 If ADDR_P is true we are taking the address of the memory reference EXP
280 and thus cannot rely on the access taking place. */
283 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
284 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
286 HOST_WIDE_INT bitsize
, bitpos
;
289 int unsignedp
, volatilep
;
290 unsigned int align
= BITS_PER_UNIT
;
291 bool known_alignment
= false;
293 /* Get the innermost object and the constant (bitpos) and possibly
294 variable (offset) offset of the access. */
295 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
296 &mode
, &unsignedp
, &volatilep
, true);
298 /* Extract alignment information from the innermost object and
299 possibly adjust bitpos and offset. */
300 if (TREE_CODE (exp
) == FUNCTION_DECL
)
302 /* Function addresses can encode extra information besides their
303 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
304 allows the low bit to be used as a virtual bit, we know
305 that the address itself must be at least 2-byte aligned. */
306 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
307 align
= 2 * BITS_PER_UNIT
;
309 else if (TREE_CODE (exp
) == LABEL_DECL
)
311 else if (TREE_CODE (exp
) == CONST_DECL
)
313 /* The alignment of a CONST_DECL is determined by its initializer. */
314 exp
= DECL_INITIAL (exp
);
315 align
= TYPE_ALIGN (TREE_TYPE (exp
));
316 #ifdef CONSTANT_ALIGNMENT
317 if (CONSTANT_CLASS_P (exp
))
318 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
320 known_alignment
= true;
322 else if (DECL_P (exp
))
324 align
= DECL_ALIGN (exp
);
325 known_alignment
= true;
327 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
329 align
= TYPE_ALIGN (TREE_TYPE (exp
));
331 else if (TREE_CODE (exp
) == INDIRECT_REF
332 || TREE_CODE (exp
) == MEM_REF
333 || TREE_CODE (exp
) == TARGET_MEM_REF
)
335 tree addr
= TREE_OPERAND (exp
, 0);
337 unsigned HOST_WIDE_INT ptr_bitpos
;
338 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr
) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
344 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
345 ptr_bitmask
*= BITS_PER_UNIT
;
346 align
= ptr_bitmask
& -ptr_bitmask
;
347 addr
= TREE_OPERAND (addr
, 0);
351 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
352 align
= MAX (ptr_align
, align
);
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos
&= ptr_bitmask
;
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
363 unsigned HOST_WIDE_INT step
= 1;
365 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
366 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
368 if (TMR_INDEX2 (exp
))
369 align
= BITS_PER_UNIT
;
370 known_alignment
= false;
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p
&& !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
380 align
= TYPE_ALIGN (TREE_TYPE (exp
));
383 /* Else adjust bitpos accordingly. */
384 bitpos
+= ptr_bitpos
;
385 if (TREE_CODE (exp
) == MEM_REF
386 || TREE_CODE (exp
) == TARGET_MEM_REF
)
387 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
390 else if (TREE_CODE (exp
) == STRING_CST
)
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align
= TYPE_ALIGN (TREE_TYPE (exp
));
395 #ifdef CONSTANT_ALIGNMENT
396 if (CONSTANT_CLASS_P (exp
))
397 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
399 known_alignment
= true;
402 /* If there is a non-constant offset part extract the maximum
403 alignment that can prevail. */
406 unsigned int trailing_zeros
= tree_ctz (offset
);
407 if (trailing_zeros
< HOST_BITS_PER_INT
)
409 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
411 align
= MIN (align
, inner
);
416 *bitposp
= bitpos
& (*alignp
- 1);
417 return known_alignment
;
420 /* For a memory reference expression EXP compute values M and N such that M
421 divides (&EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Otherwise return false
423 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
426 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
427 unsigned HOST_WIDE_INT
*bitposp
)
429 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
432 /* Return the alignment in bits of EXP, an object. */
435 get_object_alignment (tree exp
)
437 unsigned HOST_WIDE_INT bitpos
= 0;
440 get_object_alignment_1 (exp
, &align
, &bitpos
);
442 /* align and bitpos now specify known low bits of the pointer.
443 ptr & (align - 1) == bitpos. */
446 align
= (bitpos
& -bitpos
);
450 /* For a pointer valued expression EXP compute values M and N such that M
451 divides (EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Return false if
453 the results are just a conservative approximation.
455 If EXP is not a pointer, false is returned too. */
458 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
459 unsigned HOST_WIDE_INT
*bitposp
)
463 if (TREE_CODE (exp
) == ADDR_EXPR
)
464 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
465 alignp
, bitposp
, true);
466 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
469 unsigned HOST_WIDE_INT bitpos
;
470 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
472 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
473 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
476 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
477 if (trailing_zeros
< HOST_BITS_PER_INT
)
479 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
481 align
= MIN (align
, inner
);
485 *bitposp
= bitpos
& (align
- 1);
488 else if (TREE_CODE (exp
) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (exp
)))
491 unsigned int ptr_align
, ptr_misalign
;
492 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
494 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
496 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
497 *alignp
= ptr_align
* BITS_PER_UNIT
;
498 /* We cannot really tell whether this result is an approximation. */
504 *alignp
= BITS_PER_UNIT
;
508 else if (TREE_CODE (exp
) == INTEGER_CST
)
510 *alignp
= BIGGEST_ALIGNMENT
;
511 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
512 & (BIGGEST_ALIGNMENT
- 1));
517 *alignp
= BITS_PER_UNIT
;
521 /* Return the alignment in bits of EXP, a pointer valued expression.
522 The alignment returned is, by default, the alignment of the thing that
523 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525 Otherwise, look at the expression to see if we can do better, i.e., if the
526 expression is actually pointing at an object whose alignment is tighter. */
529 get_pointer_alignment (tree exp
)
531 unsigned HOST_WIDE_INT bitpos
= 0;
534 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
536 /* align and bitpos now specify known low bits of the pointer.
537 ptr & (align - 1) == bitpos. */
540 align
= (bitpos
& -bitpos
);
545 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
546 way, because it could contain a zero byte in the middle.
547 TREE_STRING_LENGTH is the size of the character array, not the string.
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
556 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
557 accesses. Note that this implies the result is not going to be emitted
558 into the instruction stream.
560 The value returned is of type `ssizetype'.
562 Unfortunately, string_constant can't access the values of const char
563 arrays with initializers, so neither can we do so here. */
566 c_strlen (tree src
, int only_value
)
569 HOST_WIDE_INT offset
;
575 if (TREE_CODE (src
) == COND_EXPR
576 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
580 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
581 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
582 if (tree_int_cst_equal (len1
, len2
))
586 if (TREE_CODE (src
) == COMPOUND_EXPR
587 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
588 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
590 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
592 src
= string_constant (src
, &offset_node
);
596 max
= TREE_STRING_LENGTH (src
) - 1;
597 ptr
= TREE_STRING_POINTER (src
);
599 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
601 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
602 compute the offset to the following null if we don't know where to
603 start searching for it. */
606 for (i
= 0; i
< max
; i
++)
610 /* We don't know the starting offset, but we do know that the string
611 has no internal zero bytes. We can assume that the offset falls
612 within the bounds of the string; otherwise, the programmer deserves
613 what he gets. Subtract the offset from the length of the string,
614 and return that. This would perhaps not be valid if we were dealing
615 with named arrays in addition to literal string constants. */
617 return size_diffop_loc (loc
, size_int (max
), offset_node
);
620 /* We have a known offset into the string. Start searching there for
621 a null character if we can represent it as a single HOST_WIDE_INT. */
622 if (offset_node
== 0)
624 else if (! tree_fits_shwi_p (offset_node
))
627 offset
= tree_to_shwi (offset_node
);
629 /* If the offset is known to be out of bounds, warn, and call strlen at
631 if (offset
< 0 || offset
> max
)
633 /* Suppress multiple warnings for propagated constant strings. */
635 && !TREE_NO_WARNING (src
))
637 warning_at (loc
, 0, "offset outside bounds of constant string");
638 TREE_NO_WARNING (src
) = 1;
643 /* Use strlen to search for the first zero byte. Since any strings
644 constructed with build_string will have nulls appended, we win even
645 if we get handed something like (char[4])"abcd".
647 Since OFFSET is our starting index into the string, no further
648 calculation is needed. */
649 return ssize_int (strlen (ptr
+ offset
));
652 /* Return a char pointer for a C string if it is a string constant
653 or sum of string constant and integer constant. */
660 src
= string_constant (src
, &offset_node
);
664 if (offset_node
== 0)
665 return TREE_STRING_POINTER (src
);
666 else if (!tree_fits_uhwi_p (offset_node
)
667 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
670 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
673 /* Return a constant integer corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677 c_readstr (const char *str
, machine_mode mode
)
681 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
683 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
684 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
685 / HOST_BITS_PER_WIDE_INT
;
687 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
688 for (i
= 0; i
< len
; i
++)
692 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
695 if (WORDS_BIG_ENDIAN
)
696 j
= GET_MODE_SIZE (mode
) - i
- 1;
697 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
699 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
703 ch
= (unsigned char) str
[i
];
704 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
707 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
708 return immed_wide_int_const (c
, mode
);
711 /* Cast a target constant CST to target CHAR and if that value fits into
712 host char type, return zero and put that value into variable pointed to by
716 target_char_cast (tree cst
, char *p
)
718 unsigned HOST_WIDE_INT val
, hostval
;
720 if (TREE_CODE (cst
) != INTEGER_CST
721 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
724 /* Do not care if it fits or not right here. */
725 val
= TREE_INT_CST_LOW (cst
);
727 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
728 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
731 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
732 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
741 /* Similar to save_expr, but assumes that arbitrary code is not executed
742 in between the multiple evaluations. In particular, we assume that a
743 non-addressable local variable will not be modified. */
746 builtin_save_expr (tree exp
)
748 if (TREE_CODE (exp
) == SSA_NAME
749 || (TREE_ADDRESSABLE (exp
) == 0
750 && (TREE_CODE (exp
) == PARM_DECL
751 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
754 return save_expr (exp
);
757 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
758 times to get the address of either a higher stack frame, or a return
759 address located within it (depending on FNDECL_CODE). */
762 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
766 #ifdef INITIAL_FRAME_ADDRESS_RTX
767 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
771 /* For a zero count with __builtin_return_address, we don't care what
772 frame address we return, because target-specific definitions will
773 override us. Therefore frame pointer elimination is OK, and using
774 the soft frame pointer is OK.
776 For a nonzero count, or a zero count with __builtin_frame_address,
777 we require a stable offset from the current frame pointer to the
778 previous one, so we must use the hard frame pointer, and
779 we must disable frame pointer elimination. */
780 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
781 tem
= frame_pointer_rtx
;
784 tem
= hard_frame_pointer_rtx
;
786 /* Tell reload not to eliminate the frame pointer. */
787 crtl
->accesses_prior_frames
= 1;
791 /* Some machines need special handling before we can access
792 arbitrary frames. For example, on the SPARC, we must first flush
793 all register windows to the stack. */
794 #ifdef SETUP_FRAME_ADDRESSES
796 SETUP_FRAME_ADDRESSES ();
799 /* On the SPARC, the return address is not in the frame, it is in a
800 register. There is no way to access it off of the current frame
801 pointer, but it can be accessed off the previous frame pointer by
802 reading the value from the register window save area. */
803 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
806 /* Scan back COUNT frames to the specified frame. */
807 for (i
= 0; i
< count
; i
++)
809 /* Assume the dynamic chain pointer is in the word that the
810 frame address points to, unless otherwise specified. */
811 #ifdef DYNAMIC_CHAIN_ADDRESS
812 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
814 tem
= memory_address (Pmode
, tem
);
815 tem
= gen_frame_mem (Pmode
, tem
);
816 tem
= copy_to_reg (tem
);
819 /* For __builtin_frame_address, return what we've got. But, on
820 the SPARC for example, we may have to add a bias. */
821 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
822 #ifdef FRAME_ADDR_RTX
823 return FRAME_ADDR_RTX (tem
);
828 /* For __builtin_return_address, get the return address from that frame. */
829 #ifdef RETURN_ADDR_RTX
830 tem
= RETURN_ADDR_RTX (count
, tem
);
832 tem
= memory_address (Pmode
,
833 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
834 tem
= gen_frame_mem (Pmode
, tem
);
839 /* Alias set used for setjmp buffer. */
840 static alias_set_type setjmp_alias_set
= -1;
842 /* Construct the leading half of a __builtin_setjmp call. Control will
843 return to RECEIVER_LABEL. This is also called directly by the SJLJ
844 exception handling code. */
847 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
849 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
853 if (setjmp_alias_set
== -1)
854 setjmp_alias_set
= new_alias_set ();
856 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
858 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
860 /* We store the frame pointer and the address of receiver_label in
861 the buffer and use the rest of it for the stack save area, which
862 is machine-dependent. */
864 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
865 set_mem_alias_set (mem
, setjmp_alias_set
);
866 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
868 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
869 GET_MODE_SIZE (Pmode
))),
870 set_mem_alias_set (mem
, setjmp_alias_set
);
872 emit_move_insn (validize_mem (mem
),
873 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
875 stack_save
= gen_rtx_MEM (sa_mode
,
876 plus_constant (Pmode
, buf_addr
,
877 2 * GET_MODE_SIZE (Pmode
)));
878 set_mem_alias_set (stack_save
, setjmp_alias_set
);
879 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
881 /* If there is further processing to do, do it. */
882 if (targetm
.have_builtin_setjmp_setup ())
883 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
885 /* We have a nonlocal label. */
886 cfun
->has_nonlocal_label
= 1;
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
894 expand_builtin_setjmp_receiver (rtx receiver_label
)
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx
);
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
905 if (chain
&& REG_P (chain
))
906 emit_clobber (chain
);
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 if (! targetm
.have_nonlocal_goto ())
912 /* First adjust our frame pointer to its actual value. It was
913 previously set to the start of the virtual area corresponding to
914 the stacked variables when we branched here and now needs to be
915 adjusted to the actual hardware fp value.
917 Assignments to virtual registers are converted by
918 instantiate_virtual_regs into the corresponding assignment
919 to the underlying register (fp in this case) that makes
920 the original assignment true.
921 So the following insn will actually be decrementing fp by
922 STARTING_FRAME_OFFSET. */
923 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
925 /* Restoring the frame pointer also modifies the hard frame pointer.
926 Mark it used (so that the previous assignment remains live once
927 the frame pointer is eliminated) and clobbered (to represent the
928 implicit update from the assignment). */
929 emit_use (hard_frame_pointer_rtx
);
930 emit_clobber (hard_frame_pointer_rtx
);
933 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
934 if (fixed_regs
[ARG_POINTER_REGNUM
])
936 #ifdef ELIMINABLE_REGS
937 /* If the argument pointer can be eliminated in favor of the
938 frame pointer, we don't need to restore it. We assume here
939 that if such an elimination is present, it can always be used.
940 This is the case on all known machines; if we don't make this
941 assumption, we do unnecessary saving on many machines. */
943 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
945 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
946 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
947 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
950 if (i
== ARRAY_SIZE (elim_regs
))
953 /* Now restore our arg pointer from the address at which it
954 was saved in our stack frame. */
955 emit_move_insn (crtl
->args
.internal_arg_pointer
,
956 copy_to_reg (get_arg_pointer_save_area ()));
961 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
962 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
963 else if (targetm
.have_nonlocal_goto_receiver ())
964 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
968 /* We must not allow the code we just generated to be reordered by
969 scheduling. Specifically, the update of the frame pointer must
970 happen immediately, not later. */
971 emit_insn (gen_blockage ());
974 /* __builtin_longjmp is passed a pointer to an array of five words (not
975 all will be used on all machines). It operates similarly to the C
976 library function of the same name, but is more efficient. Much of
977 the code below is copied from the handling of non-local gotos. */
980 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
983 rtx_insn
*insn
, *last
;
984 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
986 /* DRAP is needed for stack realign if longjmp is expanded to current
988 if (SUPPORTS_STACK_ALIGNMENT
)
989 crtl
->need_drap
= true;
991 if (setjmp_alias_set
== -1)
992 setjmp_alias_set
= new_alias_set ();
994 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
996 buf_addr
= force_reg (Pmode
, buf_addr
);
998 /* We require that the user must pass a second argument of 1, because
999 that is what builtin_setjmp will return. */
1000 gcc_assert (value
== const1_rtx
);
1002 last
= get_last_insn ();
1003 if (targetm
.have_builtin_longjmp ())
1004 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1007 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1008 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1009 GET_MODE_SIZE (Pmode
)));
1011 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1012 2 * GET_MODE_SIZE (Pmode
)));
1013 set_mem_alias_set (fp
, setjmp_alias_set
);
1014 set_mem_alias_set (lab
, setjmp_alias_set
);
1015 set_mem_alias_set (stack
, setjmp_alias_set
);
1017 /* Pick up FP, label, and SP from the block and jump. This code is
1018 from expand_goto in stmt.c; see there for detailed comments. */
1019 if (targetm
.have_nonlocal_goto ())
1020 /* We have to pass a value to the nonlocal_goto pattern that will
1021 get copied into the static_chain pointer, but it does not matter
1022 what that value is, because builtin_setjmp does not use it. */
1023 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1026 lab
= copy_to_reg (lab
);
1028 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1029 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1031 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1032 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1034 emit_use (hard_frame_pointer_rtx
);
1035 emit_use (stack_pointer_rtx
);
1036 emit_indirect_jump (lab
);
1040 /* Search backwards and mark the jump insn as a non-local goto.
1041 Note that this precludes the use of __builtin_longjmp to a
1042 __builtin_setjmp target in the same function. However, we've
1043 already cautioned the user that these functions are for
1044 internal exception handling use only. */
1045 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1047 gcc_assert (insn
!= last
);
1051 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1054 else if (CALL_P (insn
))
1060 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1062 return (iter
->i
< iter
->n
);
1065 /* This function validates the types of a function call argument list
1066 against a specified list of tree_codes. If the last specifier is a 0,
1067 that represents an ellipses, otherwise the last specifier must be a
1071 validate_arglist (const_tree callexpr
, ...)
1073 enum tree_code code
;
1076 const_call_expr_arg_iterator iter
;
1079 va_start (ap
, callexpr
);
1080 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1084 code
= (enum tree_code
) va_arg (ap
, int);
1088 /* This signifies an ellipses, any further arguments are all ok. */
1092 /* This signifies an endlink, if no arguments remain, return
1093 true, otherwise return false. */
1094 res
= !more_const_call_expr_args_p (&iter
);
1097 /* If no parameters remain or the parameter's code does not
1098 match the specified code, return false. Otherwise continue
1099 checking any remaining arguments. */
1100 arg
= next_const_call_expr_arg (&iter
);
1101 if (!validate_arg (arg
, code
))
1108 /* We need gotos here since we can only have one VA_CLOSE in a
1116 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1117 and the address of the save area. */
1120 expand_builtin_nonlocal_goto (tree exp
)
1122 tree t_label
, t_save_area
;
1123 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1126 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1129 t_label
= CALL_EXPR_ARG (exp
, 0);
1130 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1132 r_label
= expand_normal (t_label
);
1133 r_label
= convert_memory_address (Pmode
, r_label
);
1134 r_save_area
= expand_normal (t_save_area
);
1135 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1136 /* Copy the address of the save location to a register just in case it was
1137 based on the frame pointer. */
1138 r_save_area
= copy_to_reg (r_save_area
);
1139 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1140 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1141 plus_constant (Pmode
, r_save_area
,
1142 GET_MODE_SIZE (Pmode
)));
1144 crtl
->has_nonlocal_goto
= 1;
1146 /* ??? We no longer need to pass the static chain value, afaik. */
1147 if (targetm
.have_nonlocal_goto ())
1148 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1151 r_label
= copy_to_reg (r_label
);
1153 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1154 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1156 /* Restore frame pointer for containing function. */
1157 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1158 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1160 /* USE of hard_frame_pointer_rtx added for consistency;
1161 not clear if really needed. */
1162 emit_use (hard_frame_pointer_rtx
);
1163 emit_use (stack_pointer_rtx
);
1165 /* If the architecture is using a GP register, we must
1166 conservatively assume that the target function makes use of it.
1167 The prologue of functions with nonlocal gotos must therefore
1168 initialize the GP register to the appropriate value, and we
1169 must then make sure that this value is live at the point
1170 of the jump. (Note that this doesn't necessarily apply
1171 to targets with a nonlocal_goto pattern; they are free
1172 to implement it in their own way. Note also that this is
1173 a no-op if the GP register is a global invariant.) */
1174 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1175 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1176 emit_use (pic_offset_table_rtx
);
1178 emit_indirect_jump (r_label
);
1181 /* Search backwards to the jump insn and mark it as a
1183 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1187 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1190 else if (CALL_P (insn
))
1197 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1198 (not all will be used on all machines) that was passed to __builtin_setjmp.
1199 It updates the stack pointer in that block to the current value. This is
1200 also called directly by the SJLJ exception handling code. */
1203 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1205 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1207 = gen_rtx_MEM (sa_mode
,
1210 plus_constant (Pmode
, buf_addr
,
1211 2 * GET_MODE_SIZE (Pmode
))));
1213 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1216 /* Expand a call to __builtin_prefetch. For a target that does not support
1217 data prefetch, evaluate the memory address argument in case it has side
1221 expand_builtin_prefetch (tree exp
)
1223 tree arg0
, arg1
, arg2
;
1227 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1230 arg0
= CALL_EXPR_ARG (exp
, 0);
1232 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1233 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 nargs
= call_expr_nargs (exp
);
1237 arg1
= CALL_EXPR_ARG (exp
, 1);
1239 arg1
= integer_zero_node
;
1241 arg2
= CALL_EXPR_ARG (exp
, 2);
1243 arg2
= integer_three_node
;
1245 /* Argument 0 is an address. */
1246 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1248 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1249 if (TREE_CODE (arg1
) != INTEGER_CST
)
1251 error ("second argument to %<__builtin_prefetch%> must be a constant");
1252 arg1
= integer_zero_node
;
1254 op1
= expand_normal (arg1
);
1255 /* Argument 1 must be either zero or one. */
1256 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1258 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 /* Argument 2 (locality) must be a compile-time constant int. */
1264 if (TREE_CODE (arg2
) != INTEGER_CST
)
1266 error ("third argument to %<__builtin_prefetch%> must be a constant");
1267 arg2
= integer_zero_node
;
1269 op2
= expand_normal (arg2
);
1270 /* Argument 2 must be 0, 1, 2, or 3. */
1271 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1273 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1277 if (targetm
.have_prefetch ())
1279 struct expand_operand ops
[3];
1281 create_address_operand (&ops
[0], op0
);
1282 create_integer_operand (&ops
[1], INTVAL (op1
));
1283 create_integer_operand (&ops
[2], INTVAL (op2
));
1284 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1288 /* Don't do anything with direct references to volatile memory, but
1289 generate code to handle other side effects. */
1290 if (!MEM_P (op0
) && side_effects_p (op0
))
1294 /* Get a MEM rtx for expression EXP which is the address of an operand
1295 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1296 the maximum length of the block of memory that might be accessed or
1300 get_memory_rtx (tree exp
, tree len
)
1302 tree orig_exp
= exp
;
1305 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1306 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1307 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1308 exp
= TREE_OPERAND (exp
, 0);
1310 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1311 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1313 /* Get an expression we can use to find the attributes to assign to MEM.
1314 First remove any nops. */
1315 while (CONVERT_EXPR_P (exp
)
1316 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1317 exp
= TREE_OPERAND (exp
, 0);
1319 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1320 (as builtin stringops may alias with anything). */
1321 exp
= fold_build2 (MEM_REF
,
1322 build_array_type (char_type_node
,
1323 build_range_type (sizetype
,
1324 size_one_node
, len
)),
1325 exp
, build_int_cst (ptr_type_node
, 0));
1327 /* If the MEM_REF has no acceptable address, try to get the base object
1328 from the original address we got, and build an all-aliasing
1329 unknown-sized access to that one. */
1330 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1331 set_mem_attributes (mem
, exp
, 0);
1332 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1333 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1336 exp
= build_fold_addr_expr (exp
);
1337 exp
= fold_build2 (MEM_REF
,
1338 build_array_type (char_type_node
,
1339 build_range_type (sizetype
,
1342 exp
, build_int_cst (ptr_type_node
, 0));
1343 set_mem_attributes (mem
, exp
, 0);
1345 set_mem_alias_set (mem
, 0);
1349 /* Built-in functions to perform an untyped call and return. */
1351 #define apply_args_mode \
1352 (this_target_builtins->x_apply_args_mode)
1353 #define apply_result_mode \
1354 (this_target_builtins->x_apply_result_mode)
1356 /* Return the size required for the block returned by __builtin_apply_args,
1357 and initialize apply_args_mode. */
1360 apply_args_size (void)
1362 static int size
= -1;
1367 /* The values computed by this function never change. */
1370 /* The first value is the incoming arg-pointer. */
1371 size
= GET_MODE_SIZE (Pmode
);
1373 /* The second value is the structure value address unless this is
1374 passed as an "invisible" first argument. */
1375 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1376 size
+= GET_MODE_SIZE (Pmode
);
1378 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1379 if (FUNCTION_ARG_REGNO_P (regno
))
1381 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1383 gcc_assert (mode
!= VOIDmode
);
1385 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1386 if (size
% align
!= 0)
1387 size
= CEIL (size
, align
) * align
;
1388 size
+= GET_MODE_SIZE (mode
);
1389 apply_args_mode
[regno
] = mode
;
1393 apply_args_mode
[regno
] = VOIDmode
;
1399 /* Return the size required for the block returned by __builtin_apply,
1400 and initialize apply_result_mode. */
1403 apply_result_size (void)
1405 static int size
= -1;
1409 /* The values computed by this function never change. */
1414 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1415 if (targetm
.calls
.function_value_regno_p (regno
))
1417 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1419 gcc_assert (mode
!= VOIDmode
);
1421 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1422 if (size
% align
!= 0)
1423 size
= CEIL (size
, align
) * align
;
1424 size
+= GET_MODE_SIZE (mode
);
1425 apply_result_mode
[regno
] = mode
;
1428 apply_result_mode
[regno
] = VOIDmode
;
1430 /* Allow targets that use untyped_call and untyped_return to override
1431 the size so that machine-specific information can be stored here. */
1432 #ifdef APPLY_RESULT_SIZE
1433 size
= APPLY_RESULT_SIZE
;
1439 /* Create a vector describing the result block RESULT. If SAVEP is true,
1440 the result block is used to save the values; otherwise it is used to
1441 restore the values. */
1444 result_vector (int savep
, rtx result
)
1446 int regno
, size
, align
, nelts
;
1449 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1452 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1453 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1455 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1456 if (size
% align
!= 0)
1457 size
= CEIL (size
, align
) * align
;
1458 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1459 mem
= adjust_address (result
, mode
, size
);
1460 savevec
[nelts
++] = (savep
1461 ? gen_rtx_SET (mem
, reg
)
1462 : gen_rtx_SET (reg
, mem
));
1463 size
+= GET_MODE_SIZE (mode
);
1465 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1468 /* Save the state required to perform an untyped call with the same
1469 arguments as were passed to the current function. */
1472 expand_builtin_apply_args_1 (void)
1475 int size
, align
, regno
;
1477 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1479 /* Create a block where the arg-pointer, structure value address,
1480 and argument registers can be saved. */
1481 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1483 /* Walk past the arg-pointer and structure value address. */
1484 size
= GET_MODE_SIZE (Pmode
);
1485 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1486 size
+= GET_MODE_SIZE (Pmode
);
1488 /* Save each register used in calling a function to the block. */
1489 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1490 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1492 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1493 if (size
% align
!= 0)
1494 size
= CEIL (size
, align
) * align
;
1496 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1498 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1499 size
+= GET_MODE_SIZE (mode
);
1502 /* Save the arg pointer to the block. */
1503 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1507 if (STACK_GROWS_DOWNWARD
)
1509 = force_operand (plus_constant (Pmode
, tem
,
1510 crtl
->args
.pretend_args_size
),
1512 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1514 size
= GET_MODE_SIZE (Pmode
);
1516 /* Save the structure value address unless this is passed as an
1517 "invisible" first argument. */
1518 if (struct_incoming_value
)
1520 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1521 copy_to_reg (struct_incoming_value
));
1522 size
+= GET_MODE_SIZE (Pmode
);
1525 /* Return the address of the block. */
1526 return copy_addr_to_reg (XEXP (registers
, 0));
1529 /* __builtin_apply_args returns block of memory allocated on
1530 the stack into which is stored the arg pointer, structure
1531 value address, static chain, and all the registers that might
1532 possibly be used in performing a function call. The code is
1533 moved to the start of the function so the incoming values are
1537 expand_builtin_apply_args (void)
1539 /* Don't do __builtin_apply_args more than once in a function.
1540 Save the result of the first call and reuse it. */
1541 if (apply_args_value
!= 0)
1542 return apply_args_value
;
1544 /* When this function is called, it means that registers must be
1545 saved on entry to this function. So we migrate the
1546 call to the first insn of this function. */
1550 temp
= expand_builtin_apply_args_1 ();
1551 rtx_insn
*seq
= get_insns ();
1554 apply_args_value
= temp
;
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1562 push_topmost_sequence ();
1563 if (REG_P (crtl
->args
.internal_arg_pointer
)
1564 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1565 emit_insn_before (seq
, parm_birth_insn
);
1567 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1577 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1579 int size
, align
, regno
;
1581 rtx incoming_args
, result
, reg
, dest
, src
;
1582 rtx_call_insn
*call_insn
;
1583 rtx old_stack_level
= 0;
1584 rtx call_fusage
= 0;
1585 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1587 arguments
= convert_memory_address (Pmode
, arguments
);
1589 /* Create a block where the return registers can be saved. */
1590 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1592 /* Fetch the arg pointer from the ARGUMENTS block. */
1593 incoming_args
= gen_reg_rtx (Pmode
);
1594 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1595 if (!STACK_GROWS_DOWNWARD
)
1596 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1597 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1599 /* Push a new argument block and copy the arguments. Do not allow
1600 the (potential) memcpy call below to interfere with our stack
1602 do_pending_stack_adjust ();
1605 /* Save the stack with nonlocal if available. */
1606 if (targetm
.have_save_stack_nonlocal ())
1607 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1609 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT
)
1622 crtl
->need_drap
= true;
1624 dest
= virtual_outgoing_args_rtx
;
1625 if (!STACK_GROWS_DOWNWARD
)
1627 if (CONST_INT_P (argsize
))
1628 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1630 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1632 dest
= gen_rtx_MEM (BLKmode
, dest
);
1633 set_mem_align (dest
, PARM_BOUNDARY
);
1634 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1635 set_mem_align (src
, PARM_BOUNDARY
);
1636 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1638 /* Refer to the argument block. */
1640 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1641 set_mem_align (arguments
, PARM_BOUNDARY
);
1643 /* Walk past the arg-pointer and structure value address. */
1644 size
= GET_MODE_SIZE (Pmode
);
1646 size
+= GET_MODE_SIZE (Pmode
);
1648 /* Restore each of the registers previously saved. Make USE insns
1649 for each of these registers for use in making the call. */
1650 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1651 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1653 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1654 if (size
% align
!= 0)
1655 size
= CEIL (size
, align
) * align
;
1656 reg
= gen_rtx_REG (mode
, regno
);
1657 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1658 use_reg (&call_fusage
, reg
);
1659 size
+= GET_MODE_SIZE (mode
);
1662 /* Restore the structure value address unless this is passed as an
1663 "invisible" first argument. */
1664 size
= GET_MODE_SIZE (Pmode
);
1667 rtx value
= gen_reg_rtx (Pmode
);
1668 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1669 emit_move_insn (struct_value
, value
);
1670 if (REG_P (struct_value
))
1671 use_reg (&call_fusage
, struct_value
);
1672 size
+= GET_MODE_SIZE (Pmode
);
1675 /* All arguments and registers used for the call are set up by now! */
1676 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1678 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1679 and we don't want to load it into a register as an optimization,
1680 because prepare_call_address already did it if it should be done. */
1681 if (GET_CODE (function
) != SYMBOL_REF
)
1682 function
= memory_address (FUNCTION_MODE
, function
);
1684 /* Generate the actual call instruction and save the return value. */
1685 if (targetm
.have_untyped_call ())
1687 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1688 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1689 result_vector (1, result
)));
1692 #ifdef HAVE_call_value
1693 if (HAVE_call_value
)
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1702 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1704 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1706 valreg
= gen_rtx_REG (mode
, regno
);
1709 emit_call_insn (GEN_CALL_VALUE (valreg
,
1710 gen_rtx_MEM (FUNCTION_MODE
, function
),
1711 const0_rtx
, NULL_RTX
, const0_rtx
));
1713 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1719 /* Find the CALL insn we just emitted, and attach the register usage
1721 call_insn
= last_call_insn ();
1722 add_function_usage_to (call_insn
, call_fusage
);
1724 /* Restore the stack. */
1725 if (targetm
.have_save_stack_nonlocal ())
1726 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1728 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1729 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1733 /* Return the address of the result block. */
1734 result
= copy_addr_to_reg (XEXP (result
, 0));
1735 return convert_memory_address (ptr_mode
, result
);
1738 /* Perform an untyped return. */
1741 expand_builtin_return (rtx result
)
1743 int size
, align
, regno
;
1746 rtx_insn
*call_fusage
= 0;
1748 result
= convert_memory_address (Pmode
, result
);
1750 apply_result_size ();
1751 result
= gen_rtx_MEM (BLKmode
, result
);
1753 if (targetm
.have_untyped_return ())
1755 rtx vector
= result_vector (0, result
);
1756 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1761 /* Restore the return value and note that each value is used. */
1763 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1764 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1766 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1767 if (size
% align
!= 0)
1768 size
= CEIL (size
, align
) * align
;
1769 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1770 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1772 push_to_sequence (call_fusage
);
1774 call_fusage
= get_insns ();
1776 size
+= GET_MODE_SIZE (mode
);
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage
);
1782 /* Return whatever values was restored by jumping directly to the end
1784 expand_naked_return ();
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1789 static enum type_class
1790 type_to_class (tree type
)
1792 switch (TREE_CODE (type
))
1794 case VOID_TYPE
: return void_type_class
;
1795 case INTEGER_TYPE
: return integer_type_class
;
1796 case ENUMERAL_TYPE
: return enumeral_type_class
;
1797 case BOOLEAN_TYPE
: return boolean_type_class
;
1798 case POINTER_TYPE
: return pointer_type_class
;
1799 case REFERENCE_TYPE
: return reference_type_class
;
1800 case OFFSET_TYPE
: return offset_type_class
;
1801 case REAL_TYPE
: return real_type_class
;
1802 case COMPLEX_TYPE
: return complex_type_class
;
1803 case FUNCTION_TYPE
: return function_type_class
;
1804 case METHOD_TYPE
: return method_type_class
;
1805 case RECORD_TYPE
: return record_type_class
;
1807 case QUAL_UNION_TYPE
: return union_type_class
;
1808 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1809 ? string_type_class
: array_type_class
);
1810 case LANG_TYPE
: return lang_type_class
;
1811 default: return no_type_class
;
1815 /* Expand a call EXP to __builtin_classify_type. */
1818 expand_builtin_classify_type (tree exp
)
1820 if (call_expr_nargs (exp
))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1822 return GEN_INT (no_type_class
);
1825 /* This helper macro, meant to be used in mathfn_built_in below,
1826 determines which among a set of three builtin math functions is
1827 appropriate for a given type mode. The `F' and `L' cases are
1828 automatically generated from the `double' case. */
1829 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1830 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1831 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1832 fcodel = BUILT_IN_MATHFN##L ; break;
1833 /* Similar to above, but appends _R after any F/L suffix. */
1834 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1836 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1837 fcodel = BUILT_IN_MATHFN##L_R ; break;
1839 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1840 if available. If IMPLICIT is true use the implicit builtin declaration,
1841 otherwise use the explicit declaration. If we can't do the conversion,
1845 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1847 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1851 CASE_MATHFN (BUILT_IN_ACOS
)
1852 CASE_MATHFN (BUILT_IN_ACOSH
)
1853 CASE_MATHFN (BUILT_IN_ASIN
)
1854 CASE_MATHFN (BUILT_IN_ASINH
)
1855 CASE_MATHFN (BUILT_IN_ATAN
)
1856 CASE_MATHFN (BUILT_IN_ATAN2
)
1857 CASE_MATHFN (BUILT_IN_ATANH
)
1858 CASE_MATHFN (BUILT_IN_CBRT
)
1859 CASE_MATHFN (BUILT_IN_CEIL
)
1860 CASE_MATHFN (BUILT_IN_CEXPI
)
1861 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1862 CASE_MATHFN (BUILT_IN_COS
)
1863 CASE_MATHFN (BUILT_IN_COSH
)
1864 CASE_MATHFN (BUILT_IN_DREM
)
1865 CASE_MATHFN (BUILT_IN_ERF
)
1866 CASE_MATHFN (BUILT_IN_ERFC
)
1867 CASE_MATHFN (BUILT_IN_EXP
)
1868 CASE_MATHFN (BUILT_IN_EXP10
)
1869 CASE_MATHFN (BUILT_IN_EXP2
)
1870 CASE_MATHFN (BUILT_IN_EXPM1
)
1871 CASE_MATHFN (BUILT_IN_FABS
)
1872 CASE_MATHFN (BUILT_IN_FDIM
)
1873 CASE_MATHFN (BUILT_IN_FLOOR
)
1874 CASE_MATHFN (BUILT_IN_FMA
)
1875 CASE_MATHFN (BUILT_IN_FMAX
)
1876 CASE_MATHFN (BUILT_IN_FMIN
)
1877 CASE_MATHFN (BUILT_IN_FMOD
)
1878 CASE_MATHFN (BUILT_IN_FREXP
)
1879 CASE_MATHFN (BUILT_IN_GAMMA
)
1880 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1881 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1882 CASE_MATHFN (BUILT_IN_HYPOT
)
1883 CASE_MATHFN (BUILT_IN_ILOGB
)
1884 CASE_MATHFN (BUILT_IN_ICEIL
)
1885 CASE_MATHFN (BUILT_IN_IFLOOR
)
1886 CASE_MATHFN (BUILT_IN_INF
)
1887 CASE_MATHFN (BUILT_IN_IRINT
)
1888 CASE_MATHFN (BUILT_IN_IROUND
)
1889 CASE_MATHFN (BUILT_IN_ISINF
)
1890 CASE_MATHFN (BUILT_IN_J0
)
1891 CASE_MATHFN (BUILT_IN_J1
)
1892 CASE_MATHFN (BUILT_IN_JN
)
1893 CASE_MATHFN (BUILT_IN_LCEIL
)
1894 CASE_MATHFN (BUILT_IN_LDEXP
)
1895 CASE_MATHFN (BUILT_IN_LFLOOR
)
1896 CASE_MATHFN (BUILT_IN_LGAMMA
)
1897 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1898 CASE_MATHFN (BUILT_IN_LLCEIL
)
1899 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1900 CASE_MATHFN (BUILT_IN_LLRINT
)
1901 CASE_MATHFN (BUILT_IN_LLROUND
)
1902 CASE_MATHFN (BUILT_IN_LOG
)
1903 CASE_MATHFN (BUILT_IN_LOG10
)
1904 CASE_MATHFN (BUILT_IN_LOG1P
)
1905 CASE_MATHFN (BUILT_IN_LOG2
)
1906 CASE_MATHFN (BUILT_IN_LOGB
)
1907 CASE_MATHFN (BUILT_IN_LRINT
)
1908 CASE_MATHFN (BUILT_IN_LROUND
)
1909 CASE_MATHFN (BUILT_IN_MODF
)
1910 CASE_MATHFN (BUILT_IN_NAN
)
1911 CASE_MATHFN (BUILT_IN_NANS
)
1912 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1913 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1914 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1915 CASE_MATHFN (BUILT_IN_POW
)
1916 CASE_MATHFN (BUILT_IN_POWI
)
1917 CASE_MATHFN (BUILT_IN_POW10
)
1918 CASE_MATHFN (BUILT_IN_REMAINDER
)
1919 CASE_MATHFN (BUILT_IN_REMQUO
)
1920 CASE_MATHFN (BUILT_IN_RINT
)
1921 CASE_MATHFN (BUILT_IN_ROUND
)
1922 CASE_MATHFN (BUILT_IN_SCALB
)
1923 CASE_MATHFN (BUILT_IN_SCALBLN
)
1924 CASE_MATHFN (BUILT_IN_SCALBN
)
1925 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1926 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1927 CASE_MATHFN (BUILT_IN_SIN
)
1928 CASE_MATHFN (BUILT_IN_SINCOS
)
1929 CASE_MATHFN (BUILT_IN_SINH
)
1930 CASE_MATHFN (BUILT_IN_SQRT
)
1931 CASE_MATHFN (BUILT_IN_TAN
)
1932 CASE_MATHFN (BUILT_IN_TANH
)
1933 CASE_MATHFN (BUILT_IN_TGAMMA
)
1934 CASE_MATHFN (BUILT_IN_TRUNC
)
1935 CASE_MATHFN (BUILT_IN_Y0
)
1936 CASE_MATHFN (BUILT_IN_Y1
)
1937 CASE_MATHFN (BUILT_IN_YN
)
1943 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1945 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1947 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1952 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1955 return builtin_decl_explicit (fcode2
);
1958 /* Like mathfn_built_in_1(), but always use the implicit array. */
1961 mathfn_built_in (tree type
, enum built_in_function fn
)
1963 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1966 /* If errno must be maintained, expand the RTL to check if the result,
1967 TARGET, of a built-in function call, EXP, is NaN, and if so set
1971 expand_errno_check (tree exp
, rtx target
)
1973 rtx_code_label
*lab
= gen_label_rtx ();
1975 /* Test the result; if it is NaN, set errno=EDOM because
1976 the argument was not in the domain. */
1977 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1978 NULL_RTX
, NULL
, lab
,
1979 /* The jump is very likely. */
1980 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1983 /* If this built-in doesn't throw an exception, set errno directly. */
1984 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1986 #ifdef GEN_ERRNO_RTX
1987 rtx errno_rtx
= GEN_ERRNO_RTX
;
1990 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1992 emit_move_insn (errno_rtx
,
1993 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp
) = 0;
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2005 expand_call (exp
, target
, 0);
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2017 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2019 optab builtin_optab
;
2022 tree fndecl
= get_callee_fndecl (exp
);
2024 bool errno_set
= false;
2025 bool try_widening
= false;
2028 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2031 arg
= CALL_EXPR_ARG (exp
, 0);
2033 switch (DECL_FUNCTION_CODE (fndecl
))
2035 CASE_FLT_FN (BUILT_IN_SQRT
):
2036 errno_set
= ! tree_expr_nonnegative_p (arg
);
2037 try_widening
= true;
2038 builtin_optab
= sqrt_optab
;
2040 CASE_FLT_FN (BUILT_IN_EXP
):
2041 errno_set
= true; builtin_optab
= exp_optab
; break;
2042 CASE_FLT_FN (BUILT_IN_EXP10
):
2043 CASE_FLT_FN (BUILT_IN_POW10
):
2044 errno_set
= true; builtin_optab
= exp10_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_EXP2
):
2046 errno_set
= true; builtin_optab
= exp2_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_EXPM1
):
2048 errno_set
= true; builtin_optab
= expm1_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_LOGB
):
2050 errno_set
= true; builtin_optab
= logb_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_LOG
):
2052 errno_set
= true; builtin_optab
= log_optab
; break;
2053 CASE_FLT_FN (BUILT_IN_LOG10
):
2054 errno_set
= true; builtin_optab
= log10_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_LOG2
):
2056 errno_set
= true; builtin_optab
= log2_optab
; break;
2057 CASE_FLT_FN (BUILT_IN_LOG1P
):
2058 errno_set
= true; builtin_optab
= log1p_optab
; break;
2059 CASE_FLT_FN (BUILT_IN_ASIN
):
2060 builtin_optab
= asin_optab
; break;
2061 CASE_FLT_FN (BUILT_IN_ACOS
):
2062 builtin_optab
= acos_optab
; break;
2063 CASE_FLT_FN (BUILT_IN_TAN
):
2064 builtin_optab
= tan_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN
):
2066 builtin_optab
= atan_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_FLOOR
):
2068 builtin_optab
= floor_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_CEIL
):
2070 builtin_optab
= ceil_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_TRUNC
):
2072 builtin_optab
= btrunc_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_ROUND
):
2074 builtin_optab
= round_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2076 builtin_optab
= nearbyint_optab
;
2077 if (flag_trapping_math
)
2079 /* Else fallthrough and expand as rint. */
2080 CASE_FLT_FN (BUILT_IN_RINT
):
2081 builtin_optab
= rint_optab
; break;
2082 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2083 builtin_optab
= significand_optab
; break;
2088 /* Make a suitable register to place result in. */
2089 mode
= TYPE_MODE (TREE_TYPE (exp
));
2091 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2094 /* Before working hard, check whether the instruction is available, but try
2095 to widen the mode for specific operations. */
2096 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2097 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2098 && (!errno_set
|| !optimize_insn_for_size_p ()))
2100 rtx result
= gen_reg_rtx (mode
);
2102 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2103 need to expand the argument again. This way, we will not perform
2104 side-effects more the once. */
2105 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2107 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2111 /* Compute into RESULT.
2112 Set RESULT to wherever the result comes back. */
2113 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2118 expand_errno_check (exp
, result
);
2120 /* Output the entire sequence. */
2121 insns
= get_insns ();
2127 /* If we were unable to expand via the builtin, stop the sequence
2128 (without outputting the insns) and call to the library function
2129 with the stabilized argument list. */
2133 return expand_call (exp
, target
, target
== const0_rtx
);
2136 /* Expand a call to the builtin binary math functions (pow and atan2).
2137 Return NULL_RTX if a normal call should be emitted rather than expanding the
2138 function in-line. EXP is the expression that is a call to the builtin
2139 function; if convenient, the result should be placed in TARGET.
2140 SUBTARGET may be used as the target for computing one of EXP's
2144 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2146 optab builtin_optab
;
2147 rtx op0
, op1
, result
;
2149 int op1_type
= REAL_TYPE
;
2150 tree fndecl
= get_callee_fndecl (exp
);
2153 bool errno_set
= true;
2155 switch (DECL_FUNCTION_CODE (fndecl
))
2157 CASE_FLT_FN (BUILT_IN_SCALBN
):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2159 CASE_FLT_FN (BUILT_IN_LDEXP
):
2160 op1_type
= INTEGER_TYPE
;
2165 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2168 arg0
= CALL_EXPR_ARG (exp
, 0);
2169 arg1
= CALL_EXPR_ARG (exp
, 1);
2171 switch (DECL_FUNCTION_CODE (fndecl
))
2173 CASE_FLT_FN (BUILT_IN_POW
):
2174 builtin_optab
= pow_optab
; break;
2175 CASE_FLT_FN (BUILT_IN_ATAN2
):
2176 builtin_optab
= atan2_optab
; break;
2177 CASE_FLT_FN (BUILT_IN_SCALB
):
2178 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2180 builtin_optab
= scalb_optab
; break;
2181 CASE_FLT_FN (BUILT_IN_SCALBN
):
2182 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2185 /* Fall through... */
2186 CASE_FLT_FN (BUILT_IN_LDEXP
):
2187 builtin_optab
= ldexp_optab
; break;
2188 CASE_FLT_FN (BUILT_IN_FMOD
):
2189 builtin_optab
= fmod_optab
; break;
2190 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2191 CASE_FLT_FN (BUILT_IN_DREM
):
2192 builtin_optab
= remainder_optab
; break;
2197 /* Make a suitable register to place result in. */
2198 mode
= TYPE_MODE (TREE_TYPE (exp
));
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2204 result
= gen_reg_rtx (mode
);
2206 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2209 if (errno_set
&& optimize_insn_for_size_p ())
2212 /* Always stabilize the argument list. */
2213 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2214 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2216 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2217 op1
= expand_normal (arg1
);
2221 /* Compute into RESULT.
2222 Set RESULT to wherever the result comes back. */
2223 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2224 result
, 0, OPTAB_DIRECT
);
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2232 return expand_call (exp
, target
, target
== const0_rtx
);
2236 expand_errno_check (exp
, result
);
2238 /* Output the entire sequence. */
2239 insns
= get_insns ();
2246 /* Expand a call to the builtin trinary math functions (fma).
2247 Return NULL_RTX if a normal call should be emitted rather than expanding the
2248 function in-line. EXP is the expression that is a call to the builtin
2249 function; if convenient, the result should be placed in TARGET.
2250 SUBTARGET may be used as the target for computing one of EXP's
2254 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2256 optab builtin_optab
;
2257 rtx op0
, op1
, op2
, result
;
2259 tree fndecl
= get_callee_fndecl (exp
);
2260 tree arg0
, arg1
, arg2
;
2263 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2266 arg0
= CALL_EXPR_ARG (exp
, 0);
2267 arg1
= CALL_EXPR_ARG (exp
, 1);
2268 arg2
= CALL_EXPR_ARG (exp
, 2);
2270 switch (DECL_FUNCTION_CODE (fndecl
))
2272 CASE_FLT_FN (BUILT_IN_FMA
):
2273 builtin_optab
= fma_optab
; break;
2278 /* Make a suitable register to place result in. */
2279 mode
= TYPE_MODE (TREE_TYPE (exp
));
2281 /* Before working hard, check whether the instruction is available. */
2282 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2285 result
= gen_reg_rtx (mode
);
2287 /* Always stabilize the argument list. */
2288 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2289 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2290 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2292 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2293 op1
= expand_normal (arg1
);
2294 op2
= expand_normal (arg2
);
2298 /* Compute into RESULT.
2299 Set RESULT to wherever the result comes back. */
2300 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2303 /* If we were unable to expand via the builtin, stop the sequence
2304 (without outputting the insns) and call to the library function
2305 with the stabilized argument list. */
2309 return expand_call (exp
, target
, target
== const0_rtx
);
2312 /* Output the entire sequence. */
2313 insns
= get_insns ();
2320 /* Expand a call to the builtin sin and cos math functions.
2321 Return NULL_RTX if a normal call should be emitted rather than expanding the
2322 function in-line. EXP is the expression that is a call to the builtin
2323 function; if convenient, the result should be placed in TARGET.
2324 SUBTARGET may be used as the target for computing one of EXP's
2328 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2330 optab builtin_optab
;
2333 tree fndecl
= get_callee_fndecl (exp
);
2337 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2340 arg
= CALL_EXPR_ARG (exp
, 0);
2342 switch (DECL_FUNCTION_CODE (fndecl
))
2344 CASE_FLT_FN (BUILT_IN_SIN
):
2345 CASE_FLT_FN (BUILT_IN_COS
):
2346 builtin_optab
= sincos_optab
; break;
2351 /* Make a suitable register to place result in. */
2352 mode
= TYPE_MODE (TREE_TYPE (exp
));
2354 /* Check if sincos insn is available, otherwise fallback
2355 to sin or cos insn. */
2356 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2357 switch (DECL_FUNCTION_CODE (fndecl
))
2359 CASE_FLT_FN (BUILT_IN_SIN
):
2360 builtin_optab
= sin_optab
; break;
2361 CASE_FLT_FN (BUILT_IN_COS
):
2362 builtin_optab
= cos_optab
; break;
2367 /* Before working hard, check whether the instruction is available. */
2368 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2370 rtx result
= gen_reg_rtx (mode
);
2372 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2373 need to expand the argument again. This way, we will not perform
2374 side-effects more the once. */
2375 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2377 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2381 /* Compute into RESULT.
2382 Set RESULT to wherever the result comes back. */
2383 if (builtin_optab
== sincos_optab
)
2387 switch (DECL_FUNCTION_CODE (fndecl
))
2389 CASE_FLT_FN (BUILT_IN_SIN
):
2390 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2392 CASE_FLT_FN (BUILT_IN_COS
):
2393 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2401 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2405 /* Output the entire sequence. */
2406 insns
= get_insns ();
2412 /* If we were unable to expand via the builtin, stop the sequence
2413 (without outputting the insns) and call to the library function
2414 with the stabilized argument list. */
2418 return expand_call (exp
, target
, target
== const0_rtx
);
2421 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2422 return an RTL instruction code that implements the functionality.
2423 If that isn't possible or available return CODE_FOR_nothing. */
2425 static enum insn_code
2426 interclass_mathfn_icode (tree arg
, tree fndecl
)
2428 bool errno_set
= false;
2429 optab builtin_optab
= unknown_optab
;
2432 switch (DECL_FUNCTION_CODE (fndecl
))
2434 CASE_FLT_FN (BUILT_IN_ILOGB
):
2435 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2436 CASE_FLT_FN (BUILT_IN_ISINF
):
2437 builtin_optab
= isinf_optab
; break;
2438 case BUILT_IN_ISNORMAL
:
2439 case BUILT_IN_ISFINITE
:
2440 CASE_FLT_FN (BUILT_IN_FINITE
):
2441 case BUILT_IN_FINITED32
:
2442 case BUILT_IN_FINITED64
:
2443 case BUILT_IN_FINITED128
:
2444 case BUILT_IN_ISINFD32
:
2445 case BUILT_IN_ISINFD64
:
2446 case BUILT_IN_ISINFD128
:
2447 /* These builtins have no optabs (yet). */
2453 /* There's no easy way to detect the case we need to set EDOM. */
2454 if (flag_errno_math
&& errno_set
)
2455 return CODE_FOR_nothing
;
2457 /* Optab mode depends on the mode of the input argument. */
2458 mode
= TYPE_MODE (TREE_TYPE (arg
));
2461 return optab_handler (builtin_optab
, mode
);
2462 return CODE_FOR_nothing
;
2465 /* Expand a call to one of the builtin math functions that operate on
2466 floating point argument and output an integer result (ilogb, isinf,
2468 Return 0 if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2470 function; if convenient, the result should be placed in TARGET. */
2473 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2475 enum insn_code icode
= CODE_FOR_nothing
;
2477 tree fndecl
= get_callee_fndecl (exp
);
2481 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2484 arg
= CALL_EXPR_ARG (exp
, 0);
2485 icode
= interclass_mathfn_icode (arg
, fndecl
);
2486 mode
= TYPE_MODE (TREE_TYPE (arg
));
2488 if (icode
!= CODE_FOR_nothing
)
2490 struct expand_operand ops
[1];
2491 rtx_insn
*last
= get_last_insn ();
2492 tree orig_arg
= arg
;
2494 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2495 need to expand the argument again. This way, we will not perform
2496 side-effects more the once. */
2497 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2499 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2501 if (mode
!= GET_MODE (op0
))
2502 op0
= convert_to_mode (mode
, op0
, 0);
2504 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2505 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2506 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2507 return ops
[0].value
;
2509 delete_insns_since (last
);
2510 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2516 /* Expand a call to the builtin sincos math function.
2517 Return NULL_RTX if a normal call should be emitted rather than expanding the
2518 function in-line. EXP is the expression that is a call to the builtin
2522 expand_builtin_sincos (tree exp
)
2524 rtx op0
, op1
, op2
, target1
, target2
;
2526 tree arg
, sinp
, cosp
;
2528 location_t loc
= EXPR_LOCATION (exp
);
2529 tree alias_type
, alias_off
;
2531 if (!validate_arglist (exp
, REAL_TYPE
,
2532 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2535 arg
= CALL_EXPR_ARG (exp
, 0);
2536 sinp
= CALL_EXPR_ARG (exp
, 1);
2537 cosp
= CALL_EXPR_ARG (exp
, 2);
2539 /* Make a suitable register to place result in. */
2540 mode
= TYPE_MODE (TREE_TYPE (arg
));
2542 /* Check if sincos insn is available, otherwise emit the call. */
2543 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2546 target1
= gen_reg_rtx (mode
);
2547 target2
= gen_reg_rtx (mode
);
2549 op0
= expand_normal (arg
);
2550 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2551 alias_off
= build_int_cst (alias_type
, 0);
2552 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2554 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2557 /* Compute into target1 and target2.
2558 Set TARGET to wherever the result comes back. */
2559 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2560 gcc_assert (result
);
2562 /* Move target1 and target2 to the memory locations indicated
2564 emit_move_insn (op1
, target1
);
2565 emit_move_insn (op2
, target2
);
2570 /* Expand a call to the internal cexpi builtin to the sincos math function.
2571 EXP is the expression that is a call to the builtin function; if convenient,
2572 the result should be placed in TARGET. */
2575 expand_builtin_cexpi (tree exp
, rtx target
)
2577 tree fndecl
= get_callee_fndecl (exp
);
2581 location_t loc
= EXPR_LOCATION (exp
);
2583 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2586 arg
= CALL_EXPR_ARG (exp
, 0);
2587 type
= TREE_TYPE (arg
);
2588 mode
= TYPE_MODE (TREE_TYPE (arg
));
2590 /* Try expanding via a sincos optab, fall back to emitting a libcall
2591 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2592 is only generated from sincos, cexp or if we have either of them. */
2593 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2595 op1
= gen_reg_rtx (mode
);
2596 op2
= gen_reg_rtx (mode
);
2598 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2600 /* Compute into op1 and op2. */
2601 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2603 else if (targetm
.libc_has_function (function_sincos
))
2605 tree call
, fn
= NULL_TREE
;
2609 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2610 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2611 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2612 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2613 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2614 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2618 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2619 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2620 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2621 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2622 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2623 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2625 /* Make sure not to fold the sincos call again. */
2626 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2627 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2628 call
, 3, arg
, top1
, top2
));
2632 tree call
, fn
= NULL_TREE
, narg
;
2633 tree ctype
= build_complex_type (type
);
2635 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2636 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2637 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2638 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2639 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2640 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2644 /* If we don't have a decl for cexp create one. This is the
2645 friendliest fallback if the user calls __builtin_cexpi
2646 without full target C99 function support. */
2647 if (fn
== NULL_TREE
)
2650 const char *name
= NULL
;
2652 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2654 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2656 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2659 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2660 fn
= build_fn_decl (name
, fntype
);
2663 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2664 build_real (type
, dconst0
), arg
);
2666 /* Make sure not to fold the cexp call again. */
2667 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2668 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2669 target
, VOIDmode
, EXPAND_NORMAL
);
2672 /* Now build the proper return type. */
2673 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2674 make_tree (TREE_TYPE (arg
), op2
),
2675 make_tree (TREE_TYPE (arg
), op1
)),
2676 target
, VOIDmode
, EXPAND_NORMAL
);
2679 /* Conveniently construct a function call expression. FNDECL names the
2680 function to be called, N is the number of arguments, and the "..."
2681 parameters are the argument expressions. Unlike build_call_exr
2682 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2685 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2688 tree fntype
= TREE_TYPE (fndecl
);
2689 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2692 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2694 SET_EXPR_LOCATION (fn
, loc
);
2698 /* Expand a call to one of the builtin rounding functions gcc defines
2699 as an extension (lfloor and lceil). As these are gcc extensions we
2700 do not need to worry about setting errno to EDOM.
2701 If expanding via optab fails, lower expression to (int)(floor(x)).
2702 EXP is the expression that is a call to the builtin function;
2703 if convenient, the result should be placed in TARGET. */
2706 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2708 convert_optab builtin_optab
;
2711 tree fndecl
= get_callee_fndecl (exp
);
2712 enum built_in_function fallback_fn
;
2713 tree fallback_fndecl
;
2717 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2720 arg
= CALL_EXPR_ARG (exp
, 0);
2722 switch (DECL_FUNCTION_CODE (fndecl
))
2724 CASE_FLT_FN (BUILT_IN_ICEIL
):
2725 CASE_FLT_FN (BUILT_IN_LCEIL
):
2726 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2727 builtin_optab
= lceil_optab
;
2728 fallback_fn
= BUILT_IN_CEIL
;
2731 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2732 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2733 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2734 builtin_optab
= lfloor_optab
;
2735 fallback_fn
= BUILT_IN_FLOOR
;
2742 /* Make a suitable register to place result in. */
2743 mode
= TYPE_MODE (TREE_TYPE (exp
));
2745 target
= gen_reg_rtx (mode
);
2747 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2748 need to expand the argument again. This way, we will not perform
2749 side-effects more the once. */
2750 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2752 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2756 /* Compute into TARGET. */
2757 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2759 /* Output the entire sequence. */
2760 insns
= get_insns ();
2766 /* If we were unable to expand via the builtin, stop the sequence
2767 (without outputting the insns). */
2770 /* Fall back to floating point rounding optab. */
2771 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2773 /* For non-C99 targets we may end up without a fallback fndecl here
2774 if the user called __builtin_lfloor directly. In this case emit
2775 a call to the floor/ceil variants nevertheless. This should result
2776 in the best user experience for not full C99 targets. */
2777 if (fallback_fndecl
== NULL_TREE
)
2780 const char *name
= NULL
;
2782 switch (DECL_FUNCTION_CODE (fndecl
))
2784 case BUILT_IN_ICEIL
:
2785 case BUILT_IN_LCEIL
:
2786 case BUILT_IN_LLCEIL
:
2789 case BUILT_IN_ICEILF
:
2790 case BUILT_IN_LCEILF
:
2791 case BUILT_IN_LLCEILF
:
2794 case BUILT_IN_ICEILL
:
2795 case BUILT_IN_LCEILL
:
2796 case BUILT_IN_LLCEILL
:
2799 case BUILT_IN_IFLOOR
:
2800 case BUILT_IN_LFLOOR
:
2801 case BUILT_IN_LLFLOOR
:
2804 case BUILT_IN_IFLOORF
:
2805 case BUILT_IN_LFLOORF
:
2806 case BUILT_IN_LLFLOORF
:
2809 case BUILT_IN_IFLOORL
:
2810 case BUILT_IN_LFLOORL
:
2811 case BUILT_IN_LLFLOORL
:
2818 fntype
= build_function_type_list (TREE_TYPE (arg
),
2819 TREE_TYPE (arg
), NULL_TREE
);
2820 fallback_fndecl
= build_fn_decl (name
, fntype
);
2823 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2825 tmp
= expand_normal (exp
);
2826 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2828 /* Truncate the result of floating point optab to integer
2829 via expand_fix (). */
2830 target
= gen_reg_rtx (mode
);
2831 expand_fix (target
, tmp
, 0);
2836 /* Expand a call to one of the builtin math functions doing integer
2838 Return 0 if a normal call should be emitted rather than expanding the
2839 function in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2843 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2845 convert_optab builtin_optab
;
2848 tree fndecl
= get_callee_fndecl (exp
);
2851 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2853 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2856 arg
= CALL_EXPR_ARG (exp
, 0);
2858 switch (DECL_FUNCTION_CODE (fndecl
))
2860 CASE_FLT_FN (BUILT_IN_IRINT
):
2861 fallback_fn
= BUILT_IN_LRINT
;
2863 CASE_FLT_FN (BUILT_IN_LRINT
):
2864 CASE_FLT_FN (BUILT_IN_LLRINT
):
2865 builtin_optab
= lrint_optab
;
2868 CASE_FLT_FN (BUILT_IN_IROUND
):
2869 fallback_fn
= BUILT_IN_LROUND
;
2871 CASE_FLT_FN (BUILT_IN_LROUND
):
2872 CASE_FLT_FN (BUILT_IN_LLROUND
):
2873 builtin_optab
= lround_optab
;
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2884 /* Make a suitable register to place result in. */
2885 mode
= TYPE_MODE (TREE_TYPE (exp
));
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (!flag_errno_math
)
2890 rtx result
= gen_reg_rtx (mode
);
2892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2893 need to expand the argument again. This way, we will not perform
2894 side-effects more the once. */
2895 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2897 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2901 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2903 /* Output the entire sequence. */
2904 insns
= get_insns ();
2910 /* If we were unable to expand via the builtin, stop the sequence
2911 (without outputting the insns) and call to the library function
2912 with the stabilized argument list. */
2916 if (fallback_fn
!= BUILT_IN_NONE
)
2918 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2919 targets, (int) round (x) should never be transformed into
2920 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2921 a call to lround in the hope that the target provides at least some
2922 C99 functions. This should result in the best user experience for
2923 not full C99 targets. */
2924 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2927 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2928 fallback_fndecl
, 1, arg
);
2930 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2931 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2932 return convert_to_mode (mode
, target
, 0);
2935 return expand_call (exp
, target
, target
== const0_rtx
);
2938 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2944 expand_builtin_powi (tree exp
, rtx target
)
2951 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2954 arg0
= CALL_EXPR_ARG (exp
, 0);
2955 arg1
= CALL_EXPR_ARG (exp
, 1);
2956 mode
= TYPE_MODE (TREE_TYPE (exp
));
2958 /* Emit a libcall to libgcc. */
2960 /* Mode of the 2nd argument must match that of an int. */
2961 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2963 if (target
== NULL_RTX
)
2964 target
= gen_reg_rtx (mode
);
2966 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2967 if (GET_MODE (op0
) != mode
)
2968 op0
= convert_to_mode (mode
, op0
, 0);
2969 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2970 if (GET_MODE (op1
) != mode2
)
2971 op1
= convert_to_mode (mode2
, op1
, 0);
2973 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2974 target
, LCT_CONST
, mode
, 2,
2975 op0
, mode
, op1
, mode2
);
2980 /* Expand expression EXP which is a call to the strlen builtin. Return
2981 NULL_RTX if we failed the caller should emit a normal call, otherwise
2982 try to get the result in TARGET, if convenient. */
2985 expand_builtin_strlen (tree exp
, rtx target
,
2986 machine_mode target_mode
)
2988 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2992 struct expand_operand ops
[4];
2995 tree src
= CALL_EXPR_ARG (exp
, 0);
2997 rtx_insn
*before_strlen
;
2998 machine_mode insn_mode
= target_mode
;
2999 enum insn_code icode
= CODE_FOR_nothing
;
3002 /* If the length can be computed at compile-time, return it. */
3003 len
= c_strlen (src
, 0);
3005 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3007 /* If the length can be computed at compile-time and is constant
3008 integer, but there are side-effects in src, evaluate
3009 src for side-effects, then return len.
3010 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3011 can be optimized into: i++; x = 3; */
3012 len
= c_strlen (src
, 1);
3013 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3015 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3016 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3019 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3021 /* If SRC is not a pointer type, don't do this operation inline. */
3025 /* Bail out if we can't compute strlen in the right mode. */
3026 while (insn_mode
!= VOIDmode
)
3028 icode
= optab_handler (strlen_optab
, insn_mode
);
3029 if (icode
!= CODE_FOR_nothing
)
3032 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3034 if (insn_mode
== VOIDmode
)
3037 /* Make a place to hold the source address. We will not expand
3038 the actual source until we are sure that the expansion will
3039 not fail -- there are trees that cannot be expanded twice. */
3040 src_reg
= gen_reg_rtx (Pmode
);
3042 /* Mark the beginning of the strlen sequence so we can emit the
3043 source operand later. */
3044 before_strlen
= get_last_insn ();
3046 create_output_operand (&ops
[0], target
, insn_mode
);
3047 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3048 create_integer_operand (&ops
[2], 0);
3049 create_integer_operand (&ops
[3], align
);
3050 if (!maybe_expand_insn (icode
, 4, ops
))
3053 /* Now that we are assured of success, expand the source. */
3055 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3058 #ifdef POINTERS_EXTEND_UNSIGNED
3059 if (GET_MODE (pat
) != Pmode
)
3060 pat
= convert_to_mode (Pmode
, pat
,
3061 POINTERS_EXTEND_UNSIGNED
);
3063 emit_move_insn (src_reg
, pat
);
3069 emit_insn_after (pat
, before_strlen
);
3071 emit_insn_before (pat
, get_insns ());
3073 /* Return the value in the proper mode for this function. */
3074 if (GET_MODE (ops
[0].value
) == target_mode
)
3075 target
= ops
[0].value
;
3076 else if (target
!= 0)
3077 convert_move (target
, ops
[0].value
, 0);
3079 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3085 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3086 bytes from constant string DATA + OFFSET and return it as target
3090 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3093 const char *str
= (const char *) data
;
3095 gcc_assert (offset
>= 0
3096 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3097 <= strlen (str
) + 1));
3099 return c_readstr (str
+ offset
, mode
);
3102 /* LEN specify length of the block of memcpy/memset operation.
3103 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3104 In some cases we can make very likely guess on max size, then we
3105 set it into PROBABLE_MAX_SIZE. */
3108 determine_block_size (tree len
, rtx len_rtx
,
3109 unsigned HOST_WIDE_INT
*min_size
,
3110 unsigned HOST_WIDE_INT
*max_size
,
3111 unsigned HOST_WIDE_INT
*probable_max_size
)
3113 if (CONST_INT_P (len_rtx
))
3115 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3121 enum value_range_type range_type
= VR_UNDEFINED
;
3123 /* Determine bounds from the type. */
3124 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3125 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3128 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3129 *probable_max_size
= *max_size
3130 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3132 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3134 if (TREE_CODE (len
) == SSA_NAME
)
3135 range_type
= get_range_info (len
, &min
, &max
);
3136 if (range_type
== VR_RANGE
)
3138 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3139 *min_size
= min
.to_uhwi ();
3140 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3141 *probable_max_size
= *max_size
= max
.to_uhwi ();
3143 else if (range_type
== VR_ANTI_RANGE
)
3145 /* Anti range 0...N lets us to determine minimal size to N+1. */
3148 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3149 *min_size
= max
.to_uhwi () + 1;
3157 Produce anti range allowing negative values of N. We still
3158 can use the information and make a guess that N is not negative.
3160 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3161 *probable_max_size
= min
.to_uhwi () - 1;
3164 gcc_checking_assert (*max_size
<=
3165 (unsigned HOST_WIDE_INT
)
3166 GET_MODE_MASK (GET_MODE (len_rtx
)));
3169 /* Helper function to do the actual work for expand_builtin_memcpy. */
3172 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3174 const char *src_str
;
3175 unsigned int src_align
= get_pointer_alignment (src
);
3176 unsigned int dest_align
= get_pointer_alignment (dest
);
3177 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3178 HOST_WIDE_INT expected_size
= -1;
3179 unsigned int expected_align
= 0;
3180 unsigned HOST_WIDE_INT min_size
;
3181 unsigned HOST_WIDE_INT max_size
;
3182 unsigned HOST_WIDE_INT probable_max_size
;
3184 /* If DEST is not a pointer type, call the normal function. */
3185 if (dest_align
== 0)
3188 /* If either SRC is not a pointer type, don't do this
3189 operation in-line. */
3193 if (currently_expanding_gimple_stmt
)
3194 stringop_block_profile (currently_expanding_gimple_stmt
,
3195 &expected_align
, &expected_size
);
3197 if (expected_align
< dest_align
)
3198 expected_align
= dest_align
;
3199 dest_mem
= get_memory_rtx (dest
, len
);
3200 set_mem_align (dest_mem
, dest_align
);
3201 len_rtx
= expand_normal (len
);
3202 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3203 &probable_max_size
);
3204 src_str
= c_getstr (src
);
3206 /* If SRC is a string constant and block move would be done
3207 by pieces, we can avoid loading the string from memory
3208 and only stored the computed constants. */
3210 && CONST_INT_P (len_rtx
)
3211 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3212 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3213 CONST_CAST (char *, src_str
),
3216 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3217 builtin_memcpy_read_str
,
3218 CONST_CAST (char *, src_str
),
3219 dest_align
, false, 0);
3220 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3221 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3225 src_mem
= get_memory_rtx (src
, len
);
3226 set_mem_align (src_mem
, src_align
);
3228 /* Copy word part most expediently. */
3229 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3230 CALL_EXPR_TAILCALL (exp
)
3231 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3232 expected_align
, expected_size
,
3233 min_size
, max_size
, probable_max_size
);
3237 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3238 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3244 /* Expand a call EXP to the memcpy builtin.
3245 Return NULL_RTX if we failed, the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3250 expand_builtin_memcpy (tree exp
, rtx target
)
3252 if (!validate_arglist (exp
,
3253 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3257 tree dest
= CALL_EXPR_ARG (exp
, 0);
3258 tree src
= CALL_EXPR_ARG (exp
, 1);
3259 tree len
= CALL_EXPR_ARG (exp
, 2);
3260 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3264 /* Expand an instrumented call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3270 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3272 if (!validate_arglist (exp
,
3273 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3274 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3275 INTEGER_TYPE
, VOID_TYPE
))
3279 tree dest
= CALL_EXPR_ARG (exp
, 0);
3280 tree src
= CALL_EXPR_ARG (exp
, 2);
3281 tree len
= CALL_EXPR_ARG (exp
, 4);
3282 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3284 /* Return src bounds with the result. */
3287 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3288 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3289 res
= chkp_join_splitted_slot (res
, bnd
);
3295 /* Expand a call EXP to the mempcpy builtin.
3296 Return NULL_RTX if we failed; the caller should emit a normal call,
3297 otherwise try to get the result in TARGET, if convenient (and in
3298 mode MODE if that's convenient). If ENDP is 0 return the
3299 destination pointer, if ENDP is 1 return the end pointer ala
3300 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3304 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3306 if (!validate_arglist (exp
,
3307 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3311 tree dest
= CALL_EXPR_ARG (exp
, 0);
3312 tree src
= CALL_EXPR_ARG (exp
, 1);
3313 tree len
= CALL_EXPR_ARG (exp
, 2);
3314 return expand_builtin_mempcpy_args (dest
, src
, len
,
3315 target
, mode
, /*endp=*/ 1,
3320 /* Expand an instrumented call EXP to the mempcpy builtin.
3321 Return NULL_RTX if we failed, the caller should emit a normal call,
3322 otherwise try to get the result in TARGET, if convenient (and in
3323 mode MODE if that's convenient). */
3326 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3328 if (!validate_arglist (exp
,
3329 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3330 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3331 INTEGER_TYPE
, VOID_TYPE
))
3335 tree dest
= CALL_EXPR_ARG (exp
, 0);
3336 tree src
= CALL_EXPR_ARG (exp
, 2);
3337 tree len
= CALL_EXPR_ARG (exp
, 4);
3338 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3341 /* Return src bounds with the result. */
3344 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3345 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3346 res
= chkp_join_splitted_slot (res
, bnd
);
3352 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3353 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3354 so that this can also be called without constructing an actual CALL_EXPR.
3355 The other arguments and return value are the same as for
3356 expand_builtin_mempcpy. */
3359 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3360 rtx target
, machine_mode mode
, int endp
,
3363 tree fndecl
= get_callee_fndecl (orig_exp
);
3365 /* If return value is ignored, transform mempcpy into memcpy. */
3366 if (target
== const0_rtx
3367 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3368 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3370 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3371 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3373 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3375 else if (target
== const0_rtx
3376 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3378 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3379 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3381 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3385 const char *src_str
;
3386 unsigned int src_align
= get_pointer_alignment (src
);
3387 unsigned int dest_align
= get_pointer_alignment (dest
);
3388 rtx dest_mem
, src_mem
, len_rtx
;
3390 /* If either SRC or DEST is not a pointer type, don't do this
3391 operation in-line. */
3392 if (dest_align
== 0 || src_align
== 0)
3395 /* If LEN is not constant, call the normal function. */
3396 if (! tree_fits_uhwi_p (len
))
3399 len_rtx
= expand_normal (len
);
3400 src_str
= c_getstr (src
);
3402 /* If SRC is a string constant and block move would be done
3403 by pieces, we can avoid loading the string from memory
3404 and only stored the computed constants. */
3406 && CONST_INT_P (len_rtx
)
3407 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3408 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3409 CONST_CAST (char *, src_str
),
3412 dest_mem
= get_memory_rtx (dest
, len
);
3413 set_mem_align (dest_mem
, dest_align
);
3414 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3415 builtin_memcpy_read_str
,
3416 CONST_CAST (char *, src_str
),
3417 dest_align
, false, endp
);
3418 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3419 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3423 if (CONST_INT_P (len_rtx
)
3424 && can_move_by_pieces (INTVAL (len_rtx
),
3425 MIN (dest_align
, src_align
)))
3427 dest_mem
= get_memory_rtx (dest
, len
);
3428 set_mem_align (dest_mem
, dest_align
);
3429 src_mem
= get_memory_rtx (src
, len
);
3430 set_mem_align (src_mem
, src_align
);
3431 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3432 MIN (dest_align
, src_align
), endp
);
3433 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3434 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3443 # define HAVE_movstr 0
3444 # define CODE_FOR_movstr CODE_FOR_nothing
3447 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3448 we failed, the caller should emit a normal call, otherwise try to
3449 get the result in TARGET, if convenient. If ENDP is 0 return the
3450 destination pointer, if ENDP is 1 return the end pointer ala
3451 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3455 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3457 struct expand_operand ops
[3];
3464 dest_mem
= get_memory_rtx (dest
, NULL
);
3465 src_mem
= get_memory_rtx (src
, NULL
);
3468 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3469 dest_mem
= replace_equiv_address (dest_mem
, target
);
3472 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3473 create_fixed_operand (&ops
[1], dest_mem
);
3474 create_fixed_operand (&ops
[2], src_mem
);
3475 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3478 if (endp
&& target
!= const0_rtx
)
3480 target
= ops
[0].value
;
3481 /* movstr is supposed to set end to the address of the NUL
3482 terminator. If the caller requested a mempcpy-like return value,
3486 rtx tem
= plus_constant (GET_MODE (target
),
3487 gen_lowpart (GET_MODE (target
), target
), 1);
3488 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3494 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3495 NULL_RTX if we failed the caller should emit a normal call, otherwise
3496 try to get the result in TARGET, if convenient (and in mode MODE if that's
3500 expand_builtin_strcpy (tree exp
, rtx target
)
3502 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3504 tree dest
= CALL_EXPR_ARG (exp
, 0);
3505 tree src
= CALL_EXPR_ARG (exp
, 1);
3506 return expand_builtin_strcpy_args (dest
, src
, target
);
3511 /* Helper function to do the actual work for expand_builtin_strcpy. The
3512 arguments to the builtin_strcpy call DEST and SRC are broken out
3513 so that this can also be called without constructing an actual CALL_EXPR.
3514 The other arguments and return value are the same as for
3515 expand_builtin_strcpy. */
3518 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3520 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3523 /* Expand a call EXP to the stpcpy builtin.
3524 Return NULL_RTX if we failed the caller should emit a normal call,
3525 otherwise try to get the result in TARGET, if convenient (and in
3526 mode MODE if that's convenient). */
3529 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3532 location_t loc
= EXPR_LOCATION (exp
);
3534 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3537 dst
= CALL_EXPR_ARG (exp
, 0);
3538 src
= CALL_EXPR_ARG (exp
, 1);
3540 /* If return value is ignored, transform stpcpy into strcpy. */
3541 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3543 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3544 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3545 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3552 /* Ensure we get an actual string whose length can be evaluated at
3553 compile-time, not an expression containing a string. This is
3554 because the latter will potentially produce pessimized code
3555 when used to produce the return value. */
3556 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3557 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3559 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3560 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3561 target
, mode
, /*endp=*/2,
3567 if (TREE_CODE (len
) == INTEGER_CST
)
3569 rtx len_rtx
= expand_normal (len
);
3571 if (CONST_INT_P (len_rtx
))
3573 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3579 if (mode
!= VOIDmode
)
3580 target
= gen_reg_rtx (mode
);
3582 target
= gen_reg_rtx (GET_MODE (ret
));
3584 if (GET_MODE (target
) != GET_MODE (ret
))
3585 ret
= gen_lowpart (GET_MODE (target
), ret
);
3587 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3588 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3596 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3600 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3601 bytes from constant string DATA + OFFSET and return it as target
3605 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3608 const char *str
= (const char *) data
;
3610 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3613 return c_readstr (str
+ offset
, mode
);
3616 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3617 NULL_RTX if we failed the caller should emit a normal call. */
3620 expand_builtin_strncpy (tree exp
, rtx target
)
3622 location_t loc
= EXPR_LOCATION (exp
);
3624 if (validate_arglist (exp
,
3625 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3627 tree dest
= CALL_EXPR_ARG (exp
, 0);
3628 tree src
= CALL_EXPR_ARG (exp
, 1);
3629 tree len
= CALL_EXPR_ARG (exp
, 2);
3630 tree slen
= c_strlen (src
, 1);
3632 /* We must be passed a constant len and src parameter. */
3633 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3636 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3638 /* We're required to pad with trailing zeros if the requested
3639 len is greater than strlen(s2)+1. In that case try to
3640 use store_by_pieces, if it fails, punt. */
3641 if (tree_int_cst_lt (slen
, len
))
3643 unsigned int dest_align
= get_pointer_alignment (dest
);
3644 const char *p
= c_getstr (src
);
3647 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3648 || !can_store_by_pieces (tree_to_uhwi (len
),
3649 builtin_strncpy_read_str
,
3650 CONST_CAST (char *, p
),
3654 dest_mem
= get_memory_rtx (dest
, len
);
3655 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3656 builtin_strncpy_read_str
,
3657 CONST_CAST (char *, p
), dest_align
, false, 0);
3658 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3659 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3666 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3667 bytes from constant string DATA + OFFSET and return it as target
3671 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3674 const char *c
= (const char *) data
;
3675 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3677 memset (p
, *c
, GET_MODE_SIZE (mode
));
3679 return c_readstr (p
, mode
);
3682 /* Callback routine for store_by_pieces. Return the RTL of a register
3683 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3684 char value given in the RTL register data. For example, if mode is
3685 4 bytes wide, return the RTL for 0x01010101*data. */
3688 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3695 size
= GET_MODE_SIZE (mode
);
3699 p
= XALLOCAVEC (char, size
);
3700 memset (p
, 1, size
);
3701 coeff
= c_readstr (p
, mode
);
3703 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3704 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3705 return force_reg (mode
, target
);
3708 /* Expand expression EXP, which is a call to the memset builtin. Return
3709 NULL_RTX if we failed the caller should emit a normal call, otherwise
3710 try to get the result in TARGET, if convenient (and in mode MODE if that's
3714 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3716 if (!validate_arglist (exp
,
3717 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3721 tree dest
= CALL_EXPR_ARG (exp
, 0);
3722 tree val
= CALL_EXPR_ARG (exp
, 1);
3723 tree len
= CALL_EXPR_ARG (exp
, 2);
3724 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3728 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3729 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3730 try to get the result in TARGET, if convenient (and in mode MODE if that's
3734 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3736 if (!validate_arglist (exp
,
3737 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3738 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3742 tree dest
= CALL_EXPR_ARG (exp
, 0);
3743 tree val
= CALL_EXPR_ARG (exp
, 2);
3744 tree len
= CALL_EXPR_ARG (exp
, 3);
3745 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3747 /* Return src bounds with the result. */
3750 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3751 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3752 res
= chkp_join_splitted_slot (res
, bnd
);
3758 /* Helper function to do the actual work for expand_builtin_memset. The
3759 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3760 so that this can also be called without constructing an actual CALL_EXPR.
3761 The other arguments and return value are the same as for
3762 expand_builtin_memset. */
3765 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3766 rtx target
, machine_mode mode
, tree orig_exp
)
3769 enum built_in_function fcode
;
3770 machine_mode val_mode
;
3772 unsigned int dest_align
;
3773 rtx dest_mem
, dest_addr
, len_rtx
;
3774 HOST_WIDE_INT expected_size
= -1;
3775 unsigned int expected_align
= 0;
3776 unsigned HOST_WIDE_INT min_size
;
3777 unsigned HOST_WIDE_INT max_size
;
3778 unsigned HOST_WIDE_INT probable_max_size
;
3780 dest_align
= get_pointer_alignment (dest
);
3782 /* If DEST is not a pointer type, don't do this operation in-line. */
3783 if (dest_align
== 0)
3786 if (currently_expanding_gimple_stmt
)
3787 stringop_block_profile (currently_expanding_gimple_stmt
,
3788 &expected_align
, &expected_size
);
3790 if (expected_align
< dest_align
)
3791 expected_align
= dest_align
;
3793 /* If the LEN parameter is zero, return DEST. */
3794 if (integer_zerop (len
))
3796 /* Evaluate and ignore VAL in case it has side-effects. */
3797 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3798 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3801 /* Stabilize the arguments in case we fail. */
3802 dest
= builtin_save_expr (dest
);
3803 val
= builtin_save_expr (val
);
3804 len
= builtin_save_expr (len
);
3806 len_rtx
= expand_normal (len
);
3807 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3808 &probable_max_size
);
3809 dest_mem
= get_memory_rtx (dest
, len
);
3810 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3812 if (TREE_CODE (val
) != INTEGER_CST
)
3816 val_rtx
= expand_normal (val
);
3817 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3819 /* Assume that we can memset by pieces if we can store
3820 * the coefficients by pieces (in the required modes).
3821 * We can't pass builtin_memset_gen_str as that emits RTL. */
3823 if (tree_fits_uhwi_p (len
)
3824 && can_store_by_pieces (tree_to_uhwi (len
),
3825 builtin_memset_read_str
, &c
, dest_align
,
3828 val_rtx
= force_reg (val_mode
, val_rtx
);
3829 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3830 builtin_memset_gen_str
, val_rtx
, dest_align
,
3833 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3834 dest_align
, expected_align
,
3835 expected_size
, min_size
, max_size
,
3839 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3840 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3844 if (target_char_cast (val
, &c
))
3849 if (tree_fits_uhwi_p (len
)
3850 && can_store_by_pieces (tree_to_uhwi (len
),
3851 builtin_memset_read_str
, &c
, dest_align
,
3853 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3854 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3855 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3856 gen_int_mode (c
, val_mode
),
3857 dest_align
, expected_align
,
3858 expected_size
, min_size
, max_size
,
3862 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3863 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3867 set_mem_align (dest_mem
, dest_align
);
3868 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3869 CALL_EXPR_TAILCALL (orig_exp
)
3870 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3871 expected_align
, expected_size
,
3877 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3878 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3884 fndecl
= get_callee_fndecl (orig_exp
);
3885 fcode
= DECL_FUNCTION_CODE (fndecl
);
3886 if (fcode
== BUILT_IN_MEMSET
3887 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3888 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3890 else if (fcode
== BUILT_IN_BZERO
)
3891 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3895 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3896 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3897 return expand_call (fn
, target
, target
== const0_rtx
);
3900 /* Expand expression EXP, which is a call to the bzero builtin. Return
3901 NULL_RTX if we failed the caller should emit a normal call. */
3904 expand_builtin_bzero (tree exp
)
3907 location_t loc
= EXPR_LOCATION (exp
);
3909 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3912 dest
= CALL_EXPR_ARG (exp
, 0);
3913 size
= CALL_EXPR_ARG (exp
, 1);
3915 /* New argument list transforming bzero(ptr x, int y) to
3916 memset(ptr x, int 0, size_t y). This is done this way
3917 so that if it isn't expanded inline, we fallback to
3918 calling bzero instead of memset. */
3920 return expand_builtin_memset_args (dest
, integer_zero_node
,
3921 fold_convert_loc (loc
,
3922 size_type_node
, size
),
3923 const0_rtx
, VOIDmode
, exp
);
3926 /* Expand expression EXP, which is a call to the memcmp built-in function.
3927 Return NULL_RTX if we failed and the caller should emit a normal call,
3928 otherwise try to get the result in TARGET, if convenient (and in mode
3929 MODE, if that's convenient). */
3932 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3933 ATTRIBUTE_UNUSED machine_mode mode
)
3935 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3937 if (!validate_arglist (exp
,
3938 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3941 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3942 implementing memcmp because it will stop if it encounters two
3944 #if defined HAVE_cmpmemsi
3946 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3949 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3950 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3951 tree len
= CALL_EXPR_ARG (exp
, 2);
3953 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3954 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3955 machine_mode insn_mode
;
3958 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3962 /* If we don't have POINTER_TYPE, call the function. */
3963 if (arg1_align
== 0 || arg2_align
== 0)
3966 /* Make a place to write the result of the instruction. */
3969 && REG_P (result
) && GET_MODE (result
) == insn_mode
3970 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3971 result
= gen_reg_rtx (insn_mode
);
3973 arg1_rtx
= get_memory_rtx (arg1
, len
);
3974 arg2_rtx
= get_memory_rtx (arg2
, len
);
3975 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3977 /* Set MEM_SIZE as appropriate. */
3978 if (CONST_INT_P (arg3_rtx
))
3980 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3981 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3985 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3986 GEN_INT (MIN (arg1_align
, arg2_align
)));
3993 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3994 TYPE_MODE (integer_type_node
), 3,
3995 XEXP (arg1_rtx
, 0), Pmode
,
3996 XEXP (arg2_rtx
, 0), Pmode
,
3997 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3998 TYPE_UNSIGNED (sizetype
)),
3999 TYPE_MODE (sizetype
));
4001 /* Return the value in the proper mode for this function. */
4002 mode
= TYPE_MODE (TREE_TYPE (exp
));
4003 if (GET_MODE (result
) == mode
)
4005 else if (target
!= 0)
4007 convert_move (target
, result
, 0);
4011 return convert_to_mode (mode
, result
, 0);
4013 #endif /* HAVE_cmpmemsi. */
4018 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4019 if we failed the caller should emit a normal call, otherwise try to get
4020 the result in TARGET, if convenient. */
4023 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4025 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4028 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4029 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4030 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4032 rtx arg1_rtx
, arg2_rtx
;
4033 rtx result
, insn
= NULL_RTX
;
4035 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4036 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4038 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4039 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4041 /* If we don't have POINTER_TYPE, call the function. */
4042 if (arg1_align
== 0 || arg2_align
== 0)
4045 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4046 arg1
= builtin_save_expr (arg1
);
4047 arg2
= builtin_save_expr (arg2
);
4049 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4050 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4052 #ifdef HAVE_cmpstrsi
4053 /* Try to call cmpstrsi. */
4056 machine_mode insn_mode
4057 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4059 /* Make a place to write the result of the instruction. */
4062 && REG_P (result
) && GET_MODE (result
) == insn_mode
4063 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4064 result
= gen_reg_rtx (insn_mode
);
4066 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4067 GEN_INT (MIN (arg1_align
, arg2_align
)));
4070 #ifdef HAVE_cmpstrnsi
4071 /* Try to determine at least one length and call cmpstrnsi. */
4072 if (!insn
&& HAVE_cmpstrnsi
)
4077 machine_mode insn_mode
4078 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4079 tree len1
= c_strlen (arg1
, 1);
4080 tree len2
= c_strlen (arg2
, 1);
4083 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4085 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4087 /* If we don't have a constant length for the first, use the length
4088 of the second, if we know it. We don't require a constant for
4089 this case; some cost analysis could be done if both are available
4090 but neither is constant. For now, assume they're equally cheap,
4091 unless one has side effects. If both strings have constant lengths,
4098 else if (TREE_SIDE_EFFECTS (len1
))
4100 else if (TREE_SIDE_EFFECTS (len2
))
4102 else if (TREE_CODE (len1
) != INTEGER_CST
)
4104 else if (TREE_CODE (len2
) != INTEGER_CST
)
4106 else if (tree_int_cst_lt (len1
, len2
))
4111 /* If both arguments have side effects, we cannot optimize. */
4112 if (!len
|| TREE_SIDE_EFFECTS (len
))
4115 arg3_rtx
= expand_normal (len
);
4117 /* Make a place to write the result of the instruction. */
4120 && REG_P (result
) && GET_MODE (result
) == insn_mode
4121 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4122 result
= gen_reg_rtx (insn_mode
);
4124 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4125 GEN_INT (MIN (arg1_align
, arg2_align
)));
4134 /* Return the value in the proper mode for this function. */
4135 mode
= TYPE_MODE (TREE_TYPE (exp
));
4136 if (GET_MODE (result
) == mode
)
4139 return convert_to_mode (mode
, result
, 0);
4140 convert_move (target
, result
, 0);
4144 /* Expand the library call ourselves using a stabilized argument
4145 list to avoid re-evaluating the function's arguments twice. */
4146 #ifdef HAVE_cmpstrnsi
4149 fndecl
= get_callee_fndecl (exp
);
4150 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4151 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4152 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4153 return expand_call (fn
, target
, target
== const0_rtx
);
4159 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4160 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4161 the result in TARGET, if convenient. */
4164 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4165 ATTRIBUTE_UNUSED machine_mode mode
)
4167 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4169 if (!validate_arglist (exp
,
4170 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4173 /* If c_strlen can determine an expression for one of the string
4174 lengths, and it doesn't have side effects, then emit cmpstrnsi
4175 using length MIN(strlen(string)+1, arg3). */
4176 #ifdef HAVE_cmpstrnsi
4179 tree len
, len1
, len2
;
4180 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4183 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4184 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4185 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4187 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4188 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4189 machine_mode insn_mode
4190 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4192 len1
= c_strlen (arg1
, 1);
4193 len2
= c_strlen (arg2
, 1);
4196 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4198 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4200 /* If we don't have a constant length for the first, use the length
4201 of the second, if we know it. We don't require a constant for
4202 this case; some cost analysis could be done if both are available
4203 but neither is constant. For now, assume they're equally cheap,
4204 unless one has side effects. If both strings have constant lengths,
4211 else if (TREE_SIDE_EFFECTS (len1
))
4213 else if (TREE_SIDE_EFFECTS (len2
))
4215 else if (TREE_CODE (len1
) != INTEGER_CST
)
4217 else if (TREE_CODE (len2
) != INTEGER_CST
)
4219 else if (tree_int_cst_lt (len1
, len2
))
4224 /* If both arguments have side effects, we cannot optimize. */
4225 if (!len
|| TREE_SIDE_EFFECTS (len
))
4228 /* The actual new length parameter is MIN(len,arg3). */
4229 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4230 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4232 /* If we don't have POINTER_TYPE, call the function. */
4233 if (arg1_align
== 0 || arg2_align
== 0)
4236 /* Make a place to write the result of the instruction. */
4239 && REG_P (result
) && GET_MODE (result
) == insn_mode
4240 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4241 result
= gen_reg_rtx (insn_mode
);
4243 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4244 arg1
= builtin_save_expr (arg1
);
4245 arg2
= builtin_save_expr (arg2
);
4246 len
= builtin_save_expr (len
);
4248 arg1_rtx
= get_memory_rtx (arg1
, len
);
4249 arg2_rtx
= get_memory_rtx (arg2
, len
);
4250 arg3_rtx
= expand_normal (len
);
4251 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4252 GEN_INT (MIN (arg1_align
, arg2_align
)));
4257 /* Return the value in the proper mode for this function. */
4258 mode
= TYPE_MODE (TREE_TYPE (exp
));
4259 if (GET_MODE (result
) == mode
)
4262 return convert_to_mode (mode
, result
, 0);
4263 convert_move (target
, result
, 0);
4267 /* Expand the library call ourselves using a stabilized argument
4268 list to avoid re-evaluating the function's arguments twice. */
4269 fndecl
= get_callee_fndecl (exp
);
4270 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4272 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4273 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4274 return expand_call (fn
, target
, target
== const0_rtx
);
4280 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4281 if that's convenient. */
4284 expand_builtin_saveregs (void)
4289 /* Don't do __builtin_saveregs more than once in a function.
4290 Save the result of the first call and reuse it. */
4291 if (saveregs_value
!= 0)
4292 return saveregs_value
;
4294 /* When this function is called, it means that registers must be
4295 saved on entry to this function. So we migrate the call to the
4296 first insn of this function. */
4300 /* Do whatever the machine needs done in this case. */
4301 val
= targetm
.calls
.expand_builtin_saveregs ();
4306 saveregs_value
= val
;
4308 /* Put the insns after the NOTE that starts the function. If this
4309 is inside a start_sequence, make the outer-level insn chain current, so
4310 the code is placed at the start of the function. */
4311 push_topmost_sequence ();
4312 emit_insn_after (seq
, entry_of_function ());
4313 pop_topmost_sequence ();
4318 /* Expand a call to __builtin_next_arg. */
4321 expand_builtin_next_arg (void)
4323 /* Checking arguments is already done in fold_builtin_next_arg
4324 that must be called before this function. */
4325 return expand_binop (ptr_mode
, add_optab
,
4326 crtl
->args
.internal_arg_pointer
,
4327 crtl
->args
.arg_offset_rtx
,
4328 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4331 /* Make it easier for the backends by protecting the valist argument
4332 from multiple evaluations. */
4335 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4337 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4339 /* The current way of determining the type of valist is completely
4340 bogus. We should have the information on the va builtin instead. */
4342 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4344 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4346 if (TREE_SIDE_EFFECTS (valist
))
4347 valist
= save_expr (valist
);
4349 /* For this case, the backends will be expecting a pointer to
4350 vatype, but it's possible we've actually been given an array
4351 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4353 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4355 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4356 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4361 tree pt
= build_pointer_type (vatype
);
4365 if (! TREE_SIDE_EFFECTS (valist
))
4368 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4369 TREE_SIDE_EFFECTS (valist
) = 1;
4372 if (TREE_SIDE_EFFECTS (valist
))
4373 valist
= save_expr (valist
);
4374 valist
= fold_build2_loc (loc
, MEM_REF
,
4375 vatype
, valist
, build_int_cst (pt
, 0));
4381 /* The "standard" definition of va_list is void*. */
4384 std_build_builtin_va_list (void)
4386 return ptr_type_node
;
4389 /* The "standard" abi va_list is va_list_type_node. */
4392 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4394 return va_list_type_node
;
4397 /* The "standard" type of va_list is va_list_type_node. */
4400 std_canonical_va_list_type (tree type
)
4404 if (INDIRECT_REF_P (type
))
4405 type
= TREE_TYPE (type
);
4406 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4407 type
= TREE_TYPE (type
);
4408 wtype
= va_list_type_node
;
4410 /* Treat structure va_list types. */
4411 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4412 htype
= TREE_TYPE (htype
);
4413 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4415 /* If va_list is an array type, the argument may have decayed
4416 to a pointer type, e.g. by being passed to another function.
4417 In that case, unwrap both types so that we can compare the
4418 underlying records. */
4419 if (TREE_CODE (htype
) == ARRAY_TYPE
4420 || POINTER_TYPE_P (htype
))
4422 wtype
= TREE_TYPE (wtype
);
4423 htype
= TREE_TYPE (htype
);
4426 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4427 return va_list_type_node
;
4432 /* The "standard" implementation of va_start: just assign `nextarg' to
4436 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4438 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4439 convert_move (va_r
, nextarg
, 0);
4441 /* We do not have any valid bounds for the pointer, so
4442 just store zero bounds for it. */
4443 if (chkp_function_instrumented_p (current_function_decl
))
4444 chkp_expand_bounds_reset_for_mem (valist
,
4445 make_tree (TREE_TYPE (valist
),
4449 /* Expand EXP, a call to __builtin_va_start. */
4452 expand_builtin_va_start (tree exp
)
4456 location_t loc
= EXPR_LOCATION (exp
);
4458 if (call_expr_nargs (exp
) < 2)
4460 error_at (loc
, "too few arguments to function %<va_start%>");
4464 if (fold_builtin_next_arg (exp
, true))
4467 nextarg
= expand_builtin_next_arg ();
4468 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4470 if (targetm
.expand_builtin_va_start
)
4471 targetm
.expand_builtin_va_start (valist
, nextarg
);
4473 std_expand_builtin_va_start (valist
, nextarg
);
4478 /* Expand EXP, a call to __builtin_va_end. */
4481 expand_builtin_va_end (tree exp
)
4483 tree valist
= CALL_EXPR_ARG (exp
, 0);
4485 /* Evaluate for side effects, if needed. I hate macros that don't
4487 if (TREE_SIDE_EFFECTS (valist
))
4488 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4493 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4494 builtin rather than just as an assignment in stdarg.h because of the
4495 nastiness of array-type va_list types. */
4498 expand_builtin_va_copy (tree exp
)
4501 location_t loc
= EXPR_LOCATION (exp
);
4503 dst
= CALL_EXPR_ARG (exp
, 0);
4504 src
= CALL_EXPR_ARG (exp
, 1);
4506 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4507 src
= stabilize_va_list_loc (loc
, src
, 0);
4509 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4511 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4513 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4514 TREE_SIDE_EFFECTS (t
) = 1;
4515 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4519 rtx dstb
, srcb
, size
;
4521 /* Evaluate to pointers. */
4522 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4523 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4524 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4525 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4527 dstb
= convert_memory_address (Pmode
, dstb
);
4528 srcb
= convert_memory_address (Pmode
, srcb
);
4530 /* "Dereference" to BLKmode memories. */
4531 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4532 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4533 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4534 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4535 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4536 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4539 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4545 /* Expand a call to one of the builtin functions __builtin_frame_address or
4546 __builtin_return_address. */
4549 expand_builtin_frame_address (tree fndecl
, tree exp
)
4551 /* The argument must be a nonnegative integer constant.
4552 It counts the number of frames to scan up the stack.
4553 The value is the return address saved in that frame. */
4554 if (call_expr_nargs (exp
) == 0)
4555 /* Warning about missing arg was already issued. */
4557 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4559 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4560 error ("invalid argument to %<__builtin_frame_address%>");
4562 error ("invalid argument to %<__builtin_return_address%>");
4568 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4569 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4571 /* Some ports cannot access arbitrary stack frames. */
4574 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4575 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4577 warning (0, "unsupported argument to %<__builtin_return_address%>");
4581 /* For __builtin_frame_address, return what we've got. */
4582 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4586 && ! CONSTANT_P (tem
))
4587 tem
= copy_addr_to_reg (tem
);
4592 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4593 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4594 is the same as for allocate_dynamic_stack_space. */
4597 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4603 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4604 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4607 = (alloca_with_align
4608 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4609 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4614 /* Compute the argument. */
4615 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4617 /* Compute the alignment. */
4618 align
= (alloca_with_align
4619 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4620 : BIGGEST_ALIGNMENT
);
4622 /* Allocate the desired space. */
4623 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4624 result
= convert_memory_address (ptr_mode
, result
);
4629 /* Expand a call to bswap builtin in EXP.
4630 Return NULL_RTX if a normal call should be emitted rather than expanding the
4631 function in-line. If convenient, the result should be placed in TARGET.
4632 SUBTARGET may be used as the target for computing one of EXP's operands. */
4635 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4641 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4644 arg
= CALL_EXPR_ARG (exp
, 0);
4645 op0
= expand_expr (arg
,
4646 subtarget
&& GET_MODE (subtarget
) == target_mode
4647 ? subtarget
: NULL_RTX
,
4648 target_mode
, EXPAND_NORMAL
);
4649 if (GET_MODE (op0
) != target_mode
)
4650 op0
= convert_to_mode (target_mode
, op0
, 1);
4652 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4654 gcc_assert (target
);
4656 return convert_to_mode (target_mode
, target
, 1);
4659 /* Expand a call to a unary builtin in EXP.
4660 Return NULL_RTX if a normal call should be emitted rather than expanding the
4661 function in-line. If convenient, the result should be placed in TARGET.
4662 SUBTARGET may be used as the target for computing one of EXP's operands. */
4665 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4666 rtx subtarget
, optab op_optab
)
4670 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4673 /* Compute the argument. */
4674 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4676 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4677 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4678 VOIDmode
, EXPAND_NORMAL
);
4679 /* Compute op, into TARGET if possible.
4680 Set TARGET to wherever the result comes back. */
4681 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4682 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4683 gcc_assert (target
);
4685 return convert_to_mode (target_mode
, target
, 0);
4688 /* Expand a call to __builtin_expect. We just return our argument
4689 as the builtin_expect semantic should've been already executed by
4690 tree branch prediction pass. */
4693 expand_builtin_expect (tree exp
, rtx target
)
4697 if (call_expr_nargs (exp
) < 2)
4699 arg
= CALL_EXPR_ARG (exp
, 0);
4701 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4702 /* When guessing was done, the hints should be already stripped away. */
4703 gcc_assert (!flag_guess_branch_prob
4704 || optimize
== 0 || seen_error ());
4708 /* Expand a call to __builtin_assume_aligned. We just return our first
4709 argument as the builtin_assume_aligned semantic should've been already
4713 expand_builtin_assume_aligned (tree exp
, rtx target
)
4715 if (call_expr_nargs (exp
) < 2)
4717 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4719 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4720 && (call_expr_nargs (exp
) < 3
4721 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4726 expand_builtin_trap (void)
4728 if (targetm
.have_trap ())
4730 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4731 /* For trap insns when not accumulating outgoing args force
4732 REG_ARGS_SIZE note to prevent crossjumping of calls with
4733 different args sizes. */
4734 if (!ACCUMULATE_OUTGOING_ARGS
)
4735 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4738 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4742 /* Expand a call to __builtin_unreachable. We do nothing except emit
4743 a barrier saying that control flow will not pass here.
4745 It is the responsibility of the program being compiled to ensure
4746 that control flow does never reach __builtin_unreachable. */
4748 expand_builtin_unreachable (void)
4753 /* Expand EXP, a call to fabs, fabsf or fabsl.
4754 Return NULL_RTX if a normal call should be emitted rather than expanding
4755 the function inline. If convenient, the result should be placed
4756 in TARGET. SUBTARGET may be used as the target for computing
4760 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4766 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4769 arg
= CALL_EXPR_ARG (exp
, 0);
4770 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4771 mode
= TYPE_MODE (TREE_TYPE (arg
));
4772 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4773 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4776 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4777 Return NULL is a normal call should be emitted rather than expanding the
4778 function inline. If convenient, the result should be placed in TARGET.
4779 SUBTARGET may be used as the target for computing the operand. */
4782 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4787 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4790 arg
= CALL_EXPR_ARG (exp
, 0);
4791 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4793 arg
= CALL_EXPR_ARG (exp
, 1);
4794 op1
= expand_normal (arg
);
4796 return expand_copysign (op0
, op1
, target
);
4799 /* Expand a call to __builtin___clear_cache. */
4802 expand_builtin___clear_cache (tree exp
)
4804 if (!targetm
.code_for_clear_cache
)
4806 #ifdef CLEAR_INSN_CACHE
4807 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4808 does something. Just do the default expansion to a call to
4812 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4813 does nothing. There is no need to call it. Do nothing. */
4815 #endif /* CLEAR_INSN_CACHE */
4818 /* We have a "clear_cache" insn, and it will handle everything. */
4820 rtx begin_rtx
, end_rtx
;
4822 /* We must not expand to a library call. If we did, any
4823 fallback library function in libgcc that might contain a call to
4824 __builtin___clear_cache() would recurse infinitely. */
4825 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4827 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4831 if (targetm
.have_clear_cache ())
4833 struct expand_operand ops
[2];
4835 begin
= CALL_EXPR_ARG (exp
, 0);
4836 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4838 end
= CALL_EXPR_ARG (exp
, 1);
4839 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4841 create_address_operand (&ops
[0], begin_rtx
);
4842 create_address_operand (&ops
[1], end_rtx
);
4843 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4849 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4852 round_trampoline_addr (rtx tramp
)
4854 rtx temp
, addend
, mask
;
4856 /* If we don't need too much alignment, we'll have been guaranteed
4857 proper alignment by get_trampoline_type. */
4858 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4861 /* Round address up to desired boundary. */
4862 temp
= gen_reg_rtx (Pmode
);
4863 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4864 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4866 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4867 temp
, 0, OPTAB_LIB_WIDEN
);
4868 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4869 temp
, 0, OPTAB_LIB_WIDEN
);
4875 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4877 tree t_tramp
, t_func
, t_chain
;
4878 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4880 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4881 POINTER_TYPE
, VOID_TYPE
))
4884 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4885 t_func
= CALL_EXPR_ARG (exp
, 1);
4886 t_chain
= CALL_EXPR_ARG (exp
, 2);
4888 r_tramp
= expand_normal (t_tramp
);
4889 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4890 MEM_NOTRAP_P (m_tramp
) = 1;
4892 /* If ONSTACK, the TRAMP argument should be the address of a field
4893 within the local function's FRAME decl. Either way, let's see if
4894 we can fill in the MEM_ATTRs for this memory. */
4895 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4896 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4898 /* Creator of a heap trampoline is responsible for making sure the
4899 address is aligned to at least STACK_BOUNDARY. Normally malloc
4900 will ensure this anyhow. */
4901 tmp
= round_trampoline_addr (r_tramp
);
4904 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4905 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4906 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4909 /* The FUNC argument should be the address of the nested function.
4910 Extract the actual function decl to pass to the hook. */
4911 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4912 t_func
= TREE_OPERAND (t_func
, 0);
4913 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4915 r_chain
= expand_normal (t_chain
);
4917 /* Generate insns to initialize the trampoline. */
4918 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4922 trampolines_created
= 1;
4924 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4925 "trampoline generated for nested function %qD", t_func
);
4932 expand_builtin_adjust_trampoline (tree exp
)
4936 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4939 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4940 tramp
= round_trampoline_addr (tramp
);
4941 if (targetm
.calls
.trampoline_adjust_address
)
4942 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4947 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4948 function. The function first checks whether the back end provides
4949 an insn to implement signbit for the respective mode. If not, it
4950 checks whether the floating point format of the value is such that
4951 the sign bit can be extracted. If that is not the case, the
4952 function returns NULL_RTX to indicate that a normal call should be
4953 emitted rather than expanding the function in-line. EXP is the
4954 expression that is a call to the builtin function; if convenient,
4955 the result should be placed in TARGET. */
4957 expand_builtin_signbit (tree exp
, rtx target
)
4959 const struct real_format
*fmt
;
4960 machine_mode fmode
, imode
, rmode
;
4963 enum insn_code icode
;
4965 location_t loc
= EXPR_LOCATION (exp
);
4967 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4970 arg
= CALL_EXPR_ARG (exp
, 0);
4971 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4972 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4973 fmt
= REAL_MODE_FORMAT (fmode
);
4975 arg
= builtin_save_expr (arg
);
4977 /* Expand the argument yielding a RTX expression. */
4978 temp
= expand_normal (arg
);
4980 /* Check if the back end provides an insn that handles signbit for the
4982 icode
= optab_handler (signbit_optab
, fmode
);
4983 if (icode
!= CODE_FOR_nothing
)
4985 rtx_insn
*last
= get_last_insn ();
4986 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4987 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4989 delete_insns_since (last
);
4992 /* For floating point formats without a sign bit, implement signbit
4994 bitpos
= fmt
->signbit_ro
;
4997 /* But we can't do this if the format supports signed zero. */
4998 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5001 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5002 build_real (TREE_TYPE (arg
), dconst0
));
5003 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5006 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5008 imode
= int_mode_for_mode (fmode
);
5009 if (imode
== BLKmode
)
5011 temp
= gen_lowpart (imode
, temp
);
5016 /* Handle targets with different FP word orders. */
5017 if (FLOAT_WORDS_BIG_ENDIAN
)
5018 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5020 word
= bitpos
/ BITS_PER_WORD
;
5021 temp
= operand_subword_force (temp
, word
, fmode
);
5022 bitpos
= bitpos
% BITS_PER_WORD
;
5025 /* Force the intermediate word_mode (or narrower) result into a
5026 register. This avoids attempting to create paradoxical SUBREGs
5027 of floating point modes below. */
5028 temp
= force_reg (imode
, temp
);
5030 /* If the bitpos is within the "result mode" lowpart, the operation
5031 can be implement with a single bitwise AND. Otherwise, we need
5032 a right shift and an AND. */
5034 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5036 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5038 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5039 temp
= gen_lowpart (rmode
, temp
);
5040 temp
= expand_binop (rmode
, and_optab
, temp
,
5041 immed_wide_int_const (mask
, rmode
),
5042 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5046 /* Perform a logical right shift to place the signbit in the least
5047 significant bit, then truncate the result to the desired mode
5048 and mask just this bit. */
5049 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5050 temp
= gen_lowpart (rmode
, temp
);
5051 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5052 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5058 /* Expand fork or exec calls. TARGET is the desired target of the
5059 call. EXP is the call. FN is the
5060 identificator of the actual function. IGNORE is nonzero if the
5061 value is to be ignored. */
5064 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5069 /* If we are not profiling, just call the function. */
5070 if (!profile_arc_flag
)
5073 /* Otherwise call the wrapper. This should be equivalent for the rest of
5074 compiler, so the code does not diverge, and the wrapper may run the
5075 code necessary for keeping the profiling sane. */
5077 switch (DECL_FUNCTION_CODE (fn
))
5080 id
= get_identifier ("__gcov_fork");
5083 case BUILT_IN_EXECL
:
5084 id
= get_identifier ("__gcov_execl");
5087 case BUILT_IN_EXECV
:
5088 id
= get_identifier ("__gcov_execv");
5091 case BUILT_IN_EXECLP
:
5092 id
= get_identifier ("__gcov_execlp");
5095 case BUILT_IN_EXECLE
:
5096 id
= get_identifier ("__gcov_execle");
5099 case BUILT_IN_EXECVP
:
5100 id
= get_identifier ("__gcov_execvp");
5103 case BUILT_IN_EXECVE
:
5104 id
= get_identifier ("__gcov_execve");
5111 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5112 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5113 DECL_EXTERNAL (decl
) = 1;
5114 TREE_PUBLIC (decl
) = 1;
5115 DECL_ARTIFICIAL (decl
) = 1;
5116 TREE_NOTHROW (decl
) = 1;
5117 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5118 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5119 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5120 return expand_call (call
, target
, ignore
);
5125 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5126 the pointer in these functions is void*, the tree optimizers may remove
5127 casts. The mode computed in expand_builtin isn't reliable either, due
5128 to __sync_bool_compare_and_swap.
5130 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5131 group of builtins. This gives us log2 of the mode size. */
5133 static inline machine_mode
5134 get_builtin_sync_mode (int fcode_diff
)
5136 /* The size is not negotiable, so ask not to get BLKmode in return
5137 if the target indicates that a smaller size would be better. */
5138 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5141 /* Expand the memory expression LOC and return the appropriate memory operand
5142 for the builtin_sync operations. */
5145 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5149 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5150 addr
= convert_memory_address (Pmode
, addr
);
5152 /* Note that we explicitly do not want any alias information for this
5153 memory, so that we kill all other live memories. Otherwise we don't
5154 satisfy the full barrier semantics of the intrinsic. */
5155 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5157 /* The alignment needs to be at least according to that of the mode. */
5158 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5159 get_pointer_alignment (loc
)));
5160 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5161 MEM_VOLATILE_P (mem
) = 1;
5166 /* Make sure an argument is in the right mode.
5167 EXP is the tree argument.
5168 MODE is the mode it should be in. */
5171 expand_expr_force_mode (tree exp
, machine_mode mode
)
5174 machine_mode old_mode
;
5176 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5177 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5178 of CONST_INTs, where we know the old_mode only from the call argument. */
5180 old_mode
= GET_MODE (val
);
5181 if (old_mode
== VOIDmode
)
5182 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5183 val
= convert_modes (mode
, old_mode
, val
, 1);
5188 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5189 EXP is the CALL_EXPR. CODE is the rtx code
5190 that corresponds to the arithmetic or logical operation from the name;
5191 an exception here is that NOT actually means NAND. TARGET is an optional
5192 place for us to store the results; AFTER is true if this is the
5193 fetch_and_xxx form. */
5196 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5197 enum rtx_code code
, bool after
,
5201 location_t loc
= EXPR_LOCATION (exp
);
5203 if (code
== NOT
&& warn_sync_nand
)
5205 tree fndecl
= get_callee_fndecl (exp
);
5206 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5208 static bool warned_f_a_n
, warned_n_a_f
;
5212 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5220 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5221 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5222 warned_f_a_n
= true;
5225 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5233 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5234 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5235 warned_n_a_f
= true;
5243 /* Expand the operands. */
5244 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5245 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5247 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5251 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5252 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5253 true if this is the boolean form. TARGET is a place for us to store the
5254 results; this is NOT optional if IS_BOOL is true. */
5257 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5258 bool is_bool
, rtx target
)
5260 rtx old_val
, new_val
, mem
;
5263 /* Expand the operands. */
5264 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5265 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5266 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5268 pbool
= poval
= NULL
;
5269 if (target
!= const0_rtx
)
5276 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5277 false, MEMMODEL_SYNC_SEQ_CST
,
5278 MEMMODEL_SYNC_SEQ_CST
))
5284 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5285 general form is actually an atomic exchange, and some targets only
5286 support a reduced form with the second argument being a constant 1.
5287 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5291 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5296 /* Expand the operands. */
5297 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5298 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5300 return expand_sync_lock_test_and_set (target
, mem
, val
);
5303 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5306 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5310 /* Expand the operands. */
5311 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5313 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5316 /* Given an integer representing an ``enum memmodel'', verify its
5317 correctness and return the memory model enum. */
5319 static enum memmodel
5320 get_memmodel (tree exp
)
5323 unsigned HOST_WIDE_INT val
;
5325 /* If the parameter is not a constant, it's a run time value so we'll just
5326 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5327 if (TREE_CODE (exp
) != INTEGER_CST
)
5328 return MEMMODEL_SEQ_CST
;
5330 op
= expand_normal (exp
);
5333 if (targetm
.memmodel_check
)
5334 val
= targetm
.memmodel_check (val
);
5335 else if (val
& ~MEMMODEL_MASK
)
5337 warning (OPT_Winvalid_memory_model
,
5338 "Unknown architecture specifier in memory model to builtin.");
5339 return MEMMODEL_SEQ_CST
;
5342 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5343 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5345 warning (OPT_Winvalid_memory_model
,
5346 "invalid memory model argument to builtin");
5347 return MEMMODEL_SEQ_CST
;
5350 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5351 be conservative and promote consume to acquire. */
5352 if (val
== MEMMODEL_CONSUME
)
5353 val
= MEMMODEL_ACQUIRE
;
5355 return (enum memmodel
) val
;
5358 /* Expand the __atomic_exchange intrinsic:
5359 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5360 EXP is the CALL_EXPR.
5361 TARGET is an optional place for us to store the results. */
5364 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5367 enum memmodel model
;
5369 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5371 if (!flag_inline_atomics
)
5374 /* Expand the operands. */
5375 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5376 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5378 return expand_atomic_exchange (target
, mem
, val
, model
);
5381 /* Expand the __atomic_compare_exchange intrinsic:
5382 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5383 TYPE desired, BOOL weak,
5384 enum memmodel success,
5385 enum memmodel failure)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5390 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5393 rtx expect
, desired
, mem
, oldval
;
5394 rtx_code_label
*label
;
5395 enum memmodel success
, failure
;
5399 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5400 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5402 if (failure
> success
)
5404 warning (OPT_Winvalid_memory_model
,
5405 "failure memory model cannot be stronger than success memory "
5406 "model for %<__atomic_compare_exchange%>");
5407 success
= MEMMODEL_SEQ_CST
;
5410 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5412 warning (OPT_Winvalid_memory_model
,
5413 "invalid failure memory model for "
5414 "%<__atomic_compare_exchange%>");
5415 failure
= MEMMODEL_SEQ_CST
;
5416 success
= MEMMODEL_SEQ_CST
;
5420 if (!flag_inline_atomics
)
5423 /* Expand the operands. */
5424 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5426 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5427 expect
= convert_memory_address (Pmode
, expect
);
5428 expect
= gen_rtx_MEM (mode
, expect
);
5429 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5431 weak
= CALL_EXPR_ARG (exp
, 3);
5433 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5436 if (target
== const0_rtx
)
5439 /* Lest the rtl backend create a race condition with an imporoper store
5440 to memory, always create a new pseudo for OLDVAL. */
5443 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5444 is_weak
, success
, failure
))
5447 /* Conditionally store back to EXPECT, lest we create a race condition
5448 with an improper store to memory. */
5449 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5450 the normal case where EXPECT is totally private, i.e. a register. At
5451 which point the store can be unconditional. */
5452 label
= gen_label_rtx ();
5453 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5454 GET_MODE (target
), 1, label
);
5455 emit_move_insn (expect
, oldval
);
5461 /* Expand the __atomic_load intrinsic:
5462 TYPE __atomic_load (TYPE *object, enum memmodel)
5463 EXP is the CALL_EXPR.
5464 TARGET is an optional place for us to store the results. */
5467 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5470 enum memmodel model
;
5472 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5473 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5475 warning (OPT_Winvalid_memory_model
,
5476 "invalid memory model for %<__atomic_load%>");
5477 model
= MEMMODEL_SEQ_CST
;
5480 if (!flag_inline_atomics
)
5483 /* Expand the operand. */
5484 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5486 return expand_atomic_load (target
, mem
, model
);
5490 /* Expand the __atomic_store intrinsic:
5491 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5492 EXP is the CALL_EXPR.
5493 TARGET is an optional place for us to store the results. */
5496 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5499 enum memmodel model
;
5501 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5502 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5503 || is_mm_release (model
)))
5505 warning (OPT_Winvalid_memory_model
,
5506 "invalid memory model for %<__atomic_store%>");
5507 model
= MEMMODEL_SEQ_CST
;
5510 if (!flag_inline_atomics
)
5513 /* Expand the operands. */
5514 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5515 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5517 return expand_atomic_store (mem
, val
, model
, false);
5520 /* Expand the __atomic_fetch_XXX intrinsic:
5521 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5522 EXP is the CALL_EXPR.
5523 TARGET is an optional place for us to store the results.
5524 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5525 FETCH_AFTER is true if returning the result of the operation.
5526 FETCH_AFTER is false if returning the value before the operation.
5527 IGNORE is true if the result is not used.
5528 EXT_CALL is the correct builtin for an external call if this cannot be
5529 resolved to an instruction sequence. */
5532 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5533 enum rtx_code code
, bool fetch_after
,
5534 bool ignore
, enum built_in_function ext_call
)
5537 enum memmodel model
;
5541 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5543 /* Expand the operands. */
5544 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5545 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5547 /* Only try generating instructions if inlining is turned on. */
5548 if (flag_inline_atomics
)
5550 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5555 /* Return if a different routine isn't needed for the library call. */
5556 if (ext_call
== BUILT_IN_NONE
)
5559 /* Change the call to the specified function. */
5560 fndecl
= get_callee_fndecl (exp
);
5561 addr
= CALL_EXPR_FN (exp
);
5564 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5565 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5567 /* Expand the call here so we can emit trailing code. */
5568 ret
= expand_call (exp
, target
, ignore
);
5570 /* Replace the original function just in case it matters. */
5571 TREE_OPERAND (addr
, 0) = fndecl
;
5573 /* Then issue the arithmetic correction to return the right result. */
5578 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5580 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5583 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5590 #ifndef HAVE_atomic_clear
5591 # define HAVE_atomic_clear 0
5592 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5595 /* Expand an atomic clear operation.
5596 void _atomic_clear (BOOL *obj, enum memmodel)
5597 EXP is the call expression. */
5600 expand_builtin_atomic_clear (tree exp
)
5604 enum memmodel model
;
5606 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5607 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5608 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5610 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5612 warning (OPT_Winvalid_memory_model
,
5613 "invalid memory model for %<__atomic_store%>");
5614 model
= MEMMODEL_SEQ_CST
;
5617 if (HAVE_atomic_clear
)
5619 emit_insn (gen_atomic_clear (mem
, model
));
5623 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5624 Failing that, a store is issued by __atomic_store. The only way this can
5625 fail is if the bool type is larger than a word size. Unlikely, but
5626 handle it anyway for completeness. Assume a single threaded model since
5627 there is no atomic support in this case, and no barriers are required. */
5628 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5630 emit_move_insn (mem
, const0_rtx
);
5634 /* Expand an atomic test_and_set operation.
5635 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5636 EXP is the call expression. */
5639 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5642 enum memmodel model
;
5645 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5646 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5647 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5649 return expand_atomic_test_and_set (target
, mem
, model
);
5653 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5654 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5657 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5661 unsigned int mode_align
, type_align
;
5663 if (TREE_CODE (arg0
) != INTEGER_CST
)
5666 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5667 mode
= mode_for_size (size
, MODE_INT
, 0);
5668 mode_align
= GET_MODE_ALIGNMENT (mode
);
5670 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5671 type_align
= mode_align
;
5674 tree ttype
= TREE_TYPE (arg1
);
5676 /* This function is usually invoked and folded immediately by the front
5677 end before anything else has a chance to look at it. The pointer
5678 parameter at this point is usually cast to a void *, so check for that
5679 and look past the cast. */
5680 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5681 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5682 arg1
= TREE_OPERAND (arg1
, 0);
5684 ttype
= TREE_TYPE (arg1
);
5685 gcc_assert (POINTER_TYPE_P (ttype
));
5687 /* Get the underlying type of the object. */
5688 ttype
= TREE_TYPE (ttype
);
5689 type_align
= TYPE_ALIGN (ttype
);
5692 /* If the object has smaller alignment, the the lock free routines cannot
5694 if (type_align
< mode_align
)
5695 return boolean_false_node
;
5697 /* Check if a compare_and_swap pattern exists for the mode which represents
5698 the required size. The pattern is not allowed to fail, so the existence
5699 of the pattern indicates support is present. */
5700 if (can_compare_and_swap_p (mode
, true))
5701 return boolean_true_node
;
5703 return boolean_false_node
;
5706 /* Return true if the parameters to call EXP represent an object which will
5707 always generate lock free instructions. The first argument represents the
5708 size of the object, and the second parameter is a pointer to the object
5709 itself. If NULL is passed for the object, then the result is based on
5710 typical alignment for an object of the specified size. Otherwise return
5714 expand_builtin_atomic_always_lock_free (tree exp
)
5717 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5718 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5720 if (TREE_CODE (arg0
) != INTEGER_CST
)
5722 error ("non-constant argument 1 to __atomic_always_lock_free");
5726 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5727 if (size
== boolean_true_node
)
5732 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5733 is lock free on this architecture. */
5736 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5738 if (!flag_inline_atomics
)
5741 /* If it isn't always lock free, don't generate a result. */
5742 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5743 return boolean_true_node
;
5748 /* Return true if the parameters to call EXP represent an object which will
5749 always generate lock free instructions. The first argument represents the
5750 size of the object, and the second parameter is a pointer to the object
5751 itself. If NULL is passed for the object, then the result is based on
5752 typical alignment for an object of the specified size. Otherwise return
5756 expand_builtin_atomic_is_lock_free (tree exp
)
5759 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5760 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5762 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5764 error ("non-integer argument 1 to __atomic_is_lock_free");
5768 if (!flag_inline_atomics
)
5771 /* If the value is known at compile time, return the RTX for it. */
5772 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5773 if (size
== boolean_true_node
)
5779 /* Expand the __atomic_thread_fence intrinsic:
5780 void __atomic_thread_fence (enum memmodel)
5781 EXP is the CALL_EXPR. */
5784 expand_builtin_atomic_thread_fence (tree exp
)
5786 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5787 expand_mem_thread_fence (model
);
5790 /* Expand the __atomic_signal_fence intrinsic:
5791 void __atomic_signal_fence (enum memmodel)
5792 EXP is the CALL_EXPR. */
5795 expand_builtin_atomic_signal_fence (tree exp
)
5797 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5798 expand_mem_signal_fence (model
);
5801 /* Expand the __sync_synchronize intrinsic. */
5804 expand_builtin_sync_synchronize (void)
5806 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5810 expand_builtin_thread_pointer (tree exp
, rtx target
)
5812 enum insn_code icode
;
5813 if (!validate_arglist (exp
, VOID_TYPE
))
5815 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5816 if (icode
!= CODE_FOR_nothing
)
5818 struct expand_operand op
;
5819 /* If the target is not sutitable then create a new target. */
5820 if (target
== NULL_RTX
5822 || GET_MODE (target
) != Pmode
)
5823 target
= gen_reg_rtx (Pmode
);
5824 create_output_operand (&op
, target
, Pmode
);
5825 expand_insn (icode
, 1, &op
);
5828 error ("__builtin_thread_pointer is not supported on this target");
5833 expand_builtin_set_thread_pointer (tree exp
)
5835 enum insn_code icode
;
5836 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5838 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5839 if (icode
!= CODE_FOR_nothing
)
5841 struct expand_operand op
;
5842 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5843 Pmode
, EXPAND_NORMAL
);
5844 create_input_operand (&op
, val
, Pmode
);
5845 expand_insn (icode
, 1, &op
);
5848 error ("__builtin_set_thread_pointer is not supported on this target");
5852 /* Emit code to restore the current value of stack. */
5855 expand_stack_restore (tree var
)
5858 rtx sa
= expand_normal (var
);
5860 sa
= convert_memory_address (Pmode
, sa
);
5862 prev
= get_last_insn ();
5863 emit_stack_restore (SAVE_BLOCK
, sa
);
5865 record_new_stack_level ();
5867 fixup_args_size_notes (prev
, get_last_insn (), 0);
5870 /* Emit code to save the current value of stack. */
5873 expand_stack_save (void)
5877 emit_stack_save (SAVE_BLOCK
, &ret
);
5882 /* Expand OpenACC acc_on_device.
5884 This has to happen late (that is, not in early folding; expand_builtin_*,
5885 rather than fold_builtin_*), as we have to act differently for host and
5886 acceleration device (ACCEL_COMPILER conditional). */
5889 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED
,
5890 rtx target ATTRIBUTE_UNUSED
)
5892 #ifdef ACCEL_COMPILER
5893 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5896 tree arg
= CALL_EXPR_ARG (exp
, 0);
5898 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5899 machine_mode v_mode
= TYPE_MODE (TREE_TYPE (arg
));
5900 rtx v
= expand_normal (arg
), v1
, v2
;
5901 v1
= GEN_INT (GOMP_DEVICE_NOT_HOST
);
5902 v2
= GEN_INT (ACCEL_COMPILER_acc_device
);
5903 machine_mode target_mode
= TYPE_MODE (integer_type_node
);
5904 if (!target
|| !register_operand (target
, target_mode
))
5905 target
= gen_reg_rtx (target_mode
);
5906 emit_move_insn (target
, const1_rtx
);
5907 rtx_code_label
*done_label
= gen_label_rtx ();
5908 do_compare_rtx_and_jump (v
, v1
, EQ
, false, v_mode
, NULL_RTX
,
5909 NULL
, done_label
, PROB_EVEN
);
5910 do_compare_rtx_and_jump (v
, v2
, EQ
, false, v_mode
, NULL_RTX
,
5911 NULL
, done_label
, PROB_EVEN
);
5912 emit_move_insn (target
, const0_rtx
);
5913 emit_label (done_label
);
5922 /* Expand an expression EXP that calls a built-in function,
5923 with result going to TARGET if that's convenient
5924 (and in mode MODE if that's convenient).
5925 SUBTARGET may be used as the target for computing one of EXP's operands.
5926 IGNORE is nonzero if the value is to be ignored. */
5929 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5932 tree fndecl
= get_callee_fndecl (exp
);
5933 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5934 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5937 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5938 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5940 /* When ASan is enabled, we don't want to expand some memory/string
5941 builtins and rely on libsanitizer's hooks. This allows us to avoid
5942 redundant checks and be sure, that possible overflow will be detected
5945 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5946 return expand_call (exp
, target
, ignore
);
5948 /* When not optimizing, generate calls to library functions for a certain
5951 && !called_as_built_in (fndecl
)
5952 && fcode
!= BUILT_IN_FORK
5953 && fcode
!= BUILT_IN_EXECL
5954 && fcode
!= BUILT_IN_EXECV
5955 && fcode
!= BUILT_IN_EXECLP
5956 && fcode
!= BUILT_IN_EXECLE
5957 && fcode
!= BUILT_IN_EXECVP
5958 && fcode
!= BUILT_IN_EXECVE
5959 && fcode
!= BUILT_IN_ALLOCA
5960 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5961 && fcode
!= BUILT_IN_FREE
5962 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5963 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5964 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5965 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5966 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5967 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5968 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5969 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5970 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5971 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5972 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5973 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5974 return expand_call (exp
, target
, ignore
);
5976 /* The built-in function expanders test for target == const0_rtx
5977 to determine whether the function's result will be ignored. */
5979 target
= const0_rtx
;
5981 /* If the result of a pure or const built-in function is ignored, and
5982 none of its arguments are volatile, we can avoid expanding the
5983 built-in call and just evaluate the arguments for side-effects. */
5984 if (target
== const0_rtx
5985 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5986 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5988 bool volatilep
= false;
5990 call_expr_arg_iterator iter
;
5992 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5993 if (TREE_THIS_VOLATILE (arg
))
6001 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6002 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6007 /* expand_builtin_with_bounds is supposed to be used for
6008 instrumented builtin calls. */
6009 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6013 CASE_FLT_FN (BUILT_IN_FABS
):
6014 case BUILT_IN_FABSD32
:
6015 case BUILT_IN_FABSD64
:
6016 case BUILT_IN_FABSD128
:
6017 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6022 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6023 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6028 /* Just do a normal library call if we were unable to fold
6030 CASE_FLT_FN (BUILT_IN_CABS
):
6033 CASE_FLT_FN (BUILT_IN_EXP
):
6034 CASE_FLT_FN (BUILT_IN_EXP10
):
6035 CASE_FLT_FN (BUILT_IN_POW10
):
6036 CASE_FLT_FN (BUILT_IN_EXP2
):
6037 CASE_FLT_FN (BUILT_IN_EXPM1
):
6038 CASE_FLT_FN (BUILT_IN_LOGB
):
6039 CASE_FLT_FN (BUILT_IN_LOG
):
6040 CASE_FLT_FN (BUILT_IN_LOG10
):
6041 CASE_FLT_FN (BUILT_IN_LOG2
):
6042 CASE_FLT_FN (BUILT_IN_LOG1P
):
6043 CASE_FLT_FN (BUILT_IN_TAN
):
6044 CASE_FLT_FN (BUILT_IN_ASIN
):
6045 CASE_FLT_FN (BUILT_IN_ACOS
):
6046 CASE_FLT_FN (BUILT_IN_ATAN
):
6047 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6048 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6049 because of possible accuracy problems. */
6050 if (! flag_unsafe_math_optimizations
)
6052 CASE_FLT_FN (BUILT_IN_SQRT
):
6053 CASE_FLT_FN (BUILT_IN_FLOOR
):
6054 CASE_FLT_FN (BUILT_IN_CEIL
):
6055 CASE_FLT_FN (BUILT_IN_TRUNC
):
6056 CASE_FLT_FN (BUILT_IN_ROUND
):
6057 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6058 CASE_FLT_FN (BUILT_IN_RINT
):
6059 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6064 CASE_FLT_FN (BUILT_IN_FMA
):
6065 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6070 CASE_FLT_FN (BUILT_IN_ILOGB
):
6071 if (! flag_unsafe_math_optimizations
)
6073 CASE_FLT_FN (BUILT_IN_ISINF
):
6074 CASE_FLT_FN (BUILT_IN_FINITE
):
6075 case BUILT_IN_ISFINITE
:
6076 case BUILT_IN_ISNORMAL
:
6077 target
= expand_builtin_interclass_mathfn (exp
, target
);
6082 CASE_FLT_FN (BUILT_IN_ICEIL
):
6083 CASE_FLT_FN (BUILT_IN_LCEIL
):
6084 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6085 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6086 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6087 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6088 target
= expand_builtin_int_roundingfn (exp
, target
);
6093 CASE_FLT_FN (BUILT_IN_IRINT
):
6094 CASE_FLT_FN (BUILT_IN_LRINT
):
6095 CASE_FLT_FN (BUILT_IN_LLRINT
):
6096 CASE_FLT_FN (BUILT_IN_IROUND
):
6097 CASE_FLT_FN (BUILT_IN_LROUND
):
6098 CASE_FLT_FN (BUILT_IN_LLROUND
):
6099 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6104 CASE_FLT_FN (BUILT_IN_POWI
):
6105 target
= expand_builtin_powi (exp
, target
);
6110 CASE_FLT_FN (BUILT_IN_ATAN2
):
6111 CASE_FLT_FN (BUILT_IN_LDEXP
):
6112 CASE_FLT_FN (BUILT_IN_SCALB
):
6113 CASE_FLT_FN (BUILT_IN_SCALBN
):
6114 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6115 if (! flag_unsafe_math_optimizations
)
6118 CASE_FLT_FN (BUILT_IN_FMOD
):
6119 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6120 CASE_FLT_FN (BUILT_IN_DREM
):
6121 CASE_FLT_FN (BUILT_IN_POW
):
6122 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6127 CASE_FLT_FN (BUILT_IN_CEXPI
):
6128 target
= expand_builtin_cexpi (exp
, target
);
6129 gcc_assert (target
);
6132 CASE_FLT_FN (BUILT_IN_SIN
):
6133 CASE_FLT_FN (BUILT_IN_COS
):
6134 if (! flag_unsafe_math_optimizations
)
6136 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6141 CASE_FLT_FN (BUILT_IN_SINCOS
):
6142 if (! flag_unsafe_math_optimizations
)
6144 target
= expand_builtin_sincos (exp
);
6149 case BUILT_IN_APPLY_ARGS
:
6150 return expand_builtin_apply_args ();
6152 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6153 FUNCTION with a copy of the parameters described by
6154 ARGUMENTS, and ARGSIZE. It returns a block of memory
6155 allocated on the stack into which is stored all the registers
6156 that might possibly be used for returning the result of a
6157 function. ARGUMENTS is the value returned by
6158 __builtin_apply_args. ARGSIZE is the number of bytes of
6159 arguments that must be copied. ??? How should this value be
6160 computed? We'll also need a safe worst case value for varargs
6162 case BUILT_IN_APPLY
:
6163 if (!validate_arglist (exp
, POINTER_TYPE
,
6164 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6165 && !validate_arglist (exp
, REFERENCE_TYPE
,
6166 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6172 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6173 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6174 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6176 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6179 /* __builtin_return (RESULT) causes the function to return the
6180 value described by RESULT. RESULT is address of the block of
6181 memory returned by __builtin_apply. */
6182 case BUILT_IN_RETURN
:
6183 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6184 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6187 case BUILT_IN_SAVEREGS
:
6188 return expand_builtin_saveregs ();
6190 case BUILT_IN_VA_ARG_PACK
:
6191 /* All valid uses of __builtin_va_arg_pack () are removed during
6193 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6196 case BUILT_IN_VA_ARG_PACK_LEN
:
6197 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6199 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6202 /* Return the address of the first anonymous stack arg. */
6203 case BUILT_IN_NEXT_ARG
:
6204 if (fold_builtin_next_arg (exp
, false))
6206 return expand_builtin_next_arg ();
6208 case BUILT_IN_CLEAR_CACHE
:
6209 target
= expand_builtin___clear_cache (exp
);
6214 case BUILT_IN_CLASSIFY_TYPE
:
6215 return expand_builtin_classify_type (exp
);
6217 case BUILT_IN_CONSTANT_P
:
6220 case BUILT_IN_FRAME_ADDRESS
:
6221 case BUILT_IN_RETURN_ADDRESS
:
6222 return expand_builtin_frame_address (fndecl
, exp
);
6224 /* Returns the address of the area where the structure is returned.
6226 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6227 if (call_expr_nargs (exp
) != 0
6228 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6229 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6232 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6234 case BUILT_IN_ALLOCA
:
6235 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6236 /* If the allocation stems from the declaration of a variable-sized
6237 object, it cannot accumulate. */
6238 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6243 case BUILT_IN_STACK_SAVE
:
6244 return expand_stack_save ();
6246 case BUILT_IN_STACK_RESTORE
:
6247 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6250 case BUILT_IN_BSWAP16
:
6251 case BUILT_IN_BSWAP32
:
6252 case BUILT_IN_BSWAP64
:
6253 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6258 CASE_INT_FN (BUILT_IN_FFS
):
6259 target
= expand_builtin_unop (target_mode
, exp
, target
,
6260 subtarget
, ffs_optab
);
6265 CASE_INT_FN (BUILT_IN_CLZ
):
6266 target
= expand_builtin_unop (target_mode
, exp
, target
,
6267 subtarget
, clz_optab
);
6272 CASE_INT_FN (BUILT_IN_CTZ
):
6273 target
= expand_builtin_unop (target_mode
, exp
, target
,
6274 subtarget
, ctz_optab
);
6279 CASE_INT_FN (BUILT_IN_CLRSB
):
6280 target
= expand_builtin_unop (target_mode
, exp
, target
,
6281 subtarget
, clrsb_optab
);
6286 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6287 target
= expand_builtin_unop (target_mode
, exp
, target
,
6288 subtarget
, popcount_optab
);
6293 CASE_INT_FN (BUILT_IN_PARITY
):
6294 target
= expand_builtin_unop (target_mode
, exp
, target
,
6295 subtarget
, parity_optab
);
6300 case BUILT_IN_STRLEN
:
6301 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6306 case BUILT_IN_STRCPY
:
6307 target
= expand_builtin_strcpy (exp
, target
);
6312 case BUILT_IN_STRNCPY
:
6313 target
= expand_builtin_strncpy (exp
, target
);
6318 case BUILT_IN_STPCPY
:
6319 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6324 case BUILT_IN_MEMCPY
:
6325 target
= expand_builtin_memcpy (exp
, target
);
6330 case BUILT_IN_MEMPCPY
:
6331 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6336 case BUILT_IN_MEMSET
:
6337 target
= expand_builtin_memset (exp
, target
, mode
);
6342 case BUILT_IN_BZERO
:
6343 target
= expand_builtin_bzero (exp
);
6348 case BUILT_IN_STRCMP
:
6349 target
= expand_builtin_strcmp (exp
, target
);
6354 case BUILT_IN_STRNCMP
:
6355 target
= expand_builtin_strncmp (exp
, target
, mode
);
6361 case BUILT_IN_MEMCMP
:
6362 target
= expand_builtin_memcmp (exp
, target
, mode
);
6367 case BUILT_IN_SETJMP
:
6368 /* This should have been lowered to the builtins below. */
6371 case BUILT_IN_SETJMP_SETUP
:
6372 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6373 and the receiver label. */
6374 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6376 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6377 VOIDmode
, EXPAND_NORMAL
);
6378 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6379 rtx_insn
*label_r
= label_rtx (label
);
6381 /* This is copied from the handling of non-local gotos. */
6382 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6383 nonlocal_goto_handler_labels
6384 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6385 nonlocal_goto_handler_labels
);
6386 /* ??? Do not let expand_label treat us as such since we would
6387 not want to be both on the list of non-local labels and on
6388 the list of forced labels. */
6389 FORCED_LABEL (label
) = 0;
6394 case BUILT_IN_SETJMP_RECEIVER
:
6395 /* __builtin_setjmp_receiver is passed the receiver label. */
6396 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6398 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6399 rtx_insn
*label_r
= label_rtx (label
);
6401 expand_builtin_setjmp_receiver (label_r
);
6406 /* __builtin_longjmp is passed a pointer to an array of five words.
6407 It's similar to the C library longjmp function but works with
6408 __builtin_setjmp above. */
6409 case BUILT_IN_LONGJMP
:
6410 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6412 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6413 VOIDmode
, EXPAND_NORMAL
);
6414 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6416 if (value
!= const1_rtx
)
6418 error ("%<__builtin_longjmp%> second argument must be 1");
6422 expand_builtin_longjmp (buf_addr
, value
);
6427 case BUILT_IN_NONLOCAL_GOTO
:
6428 target
= expand_builtin_nonlocal_goto (exp
);
6433 /* This updates the setjmp buffer that is its argument with the value
6434 of the current stack pointer. */
6435 case BUILT_IN_UPDATE_SETJMP_BUF
:
6436 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6439 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6441 expand_builtin_update_setjmp_buf (buf_addr
);
6447 expand_builtin_trap ();
6450 case BUILT_IN_UNREACHABLE
:
6451 expand_builtin_unreachable ();
6454 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6455 case BUILT_IN_SIGNBITD32
:
6456 case BUILT_IN_SIGNBITD64
:
6457 case BUILT_IN_SIGNBITD128
:
6458 target
= expand_builtin_signbit (exp
, target
);
6463 /* Various hooks for the DWARF 2 __throw routine. */
6464 case BUILT_IN_UNWIND_INIT
:
6465 expand_builtin_unwind_init ();
6467 case BUILT_IN_DWARF_CFA
:
6468 return virtual_cfa_rtx
;
6469 #ifdef DWARF2_UNWIND_INFO
6470 case BUILT_IN_DWARF_SP_COLUMN
:
6471 return expand_builtin_dwarf_sp_column ();
6472 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6473 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6476 case BUILT_IN_FROB_RETURN_ADDR
:
6477 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6478 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6479 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6480 case BUILT_IN_EH_RETURN
:
6481 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6482 CALL_EXPR_ARG (exp
, 1));
6484 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6485 return expand_builtin_eh_return_data_regno (exp
);
6486 case BUILT_IN_EXTEND_POINTER
:
6487 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6488 case BUILT_IN_EH_POINTER
:
6489 return expand_builtin_eh_pointer (exp
);
6490 case BUILT_IN_EH_FILTER
:
6491 return expand_builtin_eh_filter (exp
);
6492 case BUILT_IN_EH_COPY_VALUES
:
6493 return expand_builtin_eh_copy_values (exp
);
6495 case BUILT_IN_VA_START
:
6496 return expand_builtin_va_start (exp
);
6497 case BUILT_IN_VA_END
:
6498 return expand_builtin_va_end (exp
);
6499 case BUILT_IN_VA_COPY
:
6500 return expand_builtin_va_copy (exp
);
6501 case BUILT_IN_EXPECT
:
6502 return expand_builtin_expect (exp
, target
);
6503 case BUILT_IN_ASSUME_ALIGNED
:
6504 return expand_builtin_assume_aligned (exp
, target
);
6505 case BUILT_IN_PREFETCH
:
6506 expand_builtin_prefetch (exp
);
6509 case BUILT_IN_INIT_TRAMPOLINE
:
6510 return expand_builtin_init_trampoline (exp
, true);
6511 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6512 return expand_builtin_init_trampoline (exp
, false);
6513 case BUILT_IN_ADJUST_TRAMPOLINE
:
6514 return expand_builtin_adjust_trampoline (exp
);
6517 case BUILT_IN_EXECL
:
6518 case BUILT_IN_EXECV
:
6519 case BUILT_IN_EXECLP
:
6520 case BUILT_IN_EXECLE
:
6521 case BUILT_IN_EXECVP
:
6522 case BUILT_IN_EXECVE
:
6523 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6528 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6529 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6530 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6531 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6532 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6533 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6534 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6539 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6540 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6541 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6542 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6543 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6544 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6545 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6550 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6551 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6552 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6553 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6554 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6555 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6556 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6561 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6562 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6563 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6564 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6565 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6566 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6567 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6572 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6573 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6574 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6575 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6576 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6577 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6578 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6583 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6584 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6585 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6586 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6587 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6588 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6589 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6594 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6595 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6596 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6597 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6598 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6599 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6600 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6605 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6606 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6607 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6608 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6609 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6610 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6611 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6616 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6617 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6618 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6619 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6620 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6621 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6622 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6627 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6628 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6629 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6630 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6631 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6632 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6633 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6638 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6639 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6640 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6641 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6642 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6643 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6644 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6649 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6650 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6651 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6652 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6653 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6654 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6655 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6660 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6661 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6662 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6663 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6664 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6665 if (mode
== VOIDmode
)
6666 mode
= TYPE_MODE (boolean_type_node
);
6667 if (!target
|| !register_operand (target
, mode
))
6668 target
= gen_reg_rtx (mode
);
6670 mode
= get_builtin_sync_mode
6671 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6672 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6677 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6678 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6679 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6681 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6682 mode
= get_builtin_sync_mode
6683 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6684 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6693 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6694 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6695 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6700 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6701 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6702 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6703 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6704 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6705 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6706 expand_builtin_sync_lock_release (mode
, exp
);
6709 case BUILT_IN_SYNC_SYNCHRONIZE
:
6710 expand_builtin_sync_synchronize ();
6713 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6714 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6715 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6716 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6717 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6718 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6719 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6730 unsigned int nargs
, z
;
6731 vec
<tree
, va_gc
> *vec
;
6734 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6735 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6739 /* If this is turned into an external library call, the weak parameter
6740 must be dropped to match the expected parameter list. */
6741 nargs
= call_expr_nargs (exp
);
6742 vec_alloc (vec
, nargs
- 1);
6743 for (z
= 0; z
< 3; z
++)
6744 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6745 /* Skip the boolean weak parameter. */
6746 for (z
= 4; z
< 6; z
++)
6747 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6748 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6752 case BUILT_IN_ATOMIC_LOAD_1
:
6753 case BUILT_IN_ATOMIC_LOAD_2
:
6754 case BUILT_IN_ATOMIC_LOAD_4
:
6755 case BUILT_IN_ATOMIC_LOAD_8
:
6756 case BUILT_IN_ATOMIC_LOAD_16
:
6757 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6758 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6763 case BUILT_IN_ATOMIC_STORE_1
:
6764 case BUILT_IN_ATOMIC_STORE_2
:
6765 case BUILT_IN_ATOMIC_STORE_4
:
6766 case BUILT_IN_ATOMIC_STORE_8
:
6767 case BUILT_IN_ATOMIC_STORE_16
:
6768 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6769 target
= expand_builtin_atomic_store (mode
, exp
);
6774 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6775 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6776 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6777 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6778 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6780 enum built_in_function lib
;
6781 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6782 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6783 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6784 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6790 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6791 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6792 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6793 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6794 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6796 enum built_in_function lib
;
6797 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6798 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6799 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6800 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6806 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6807 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6808 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6809 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6810 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6812 enum built_in_function lib
;
6813 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6814 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6815 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6816 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6822 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6823 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6824 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6825 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6826 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6828 enum built_in_function lib
;
6829 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6830 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6831 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6832 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6838 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6839 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6840 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6841 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6842 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6844 enum built_in_function lib
;
6845 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6846 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6847 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6848 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6854 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6855 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6856 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6857 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6858 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6860 enum built_in_function lib
;
6861 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6862 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6863 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6864 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6870 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6871 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6872 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6873 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6874 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6875 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6876 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6877 ignore
, BUILT_IN_NONE
);
6882 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6883 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6884 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6885 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6886 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6887 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6888 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6889 ignore
, BUILT_IN_NONE
);
6894 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6895 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6896 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6897 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6898 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6899 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6900 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6901 ignore
, BUILT_IN_NONE
);
6906 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6907 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6908 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6909 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6910 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6911 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6912 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6913 ignore
, BUILT_IN_NONE
);
6918 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6919 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6920 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6921 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6922 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6923 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6924 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6925 ignore
, BUILT_IN_NONE
);
6930 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6931 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6932 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6933 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6934 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6935 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6936 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6937 ignore
, BUILT_IN_NONE
);
6942 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6943 return expand_builtin_atomic_test_and_set (exp
, target
);
6945 case BUILT_IN_ATOMIC_CLEAR
:
6946 return expand_builtin_atomic_clear (exp
);
6948 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6949 return expand_builtin_atomic_always_lock_free (exp
);
6951 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6952 target
= expand_builtin_atomic_is_lock_free (exp
);
6957 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6958 expand_builtin_atomic_thread_fence (exp
);
6961 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6962 expand_builtin_atomic_signal_fence (exp
);
6965 case BUILT_IN_OBJECT_SIZE
:
6966 return expand_builtin_object_size (exp
);
6968 case BUILT_IN_MEMCPY_CHK
:
6969 case BUILT_IN_MEMPCPY_CHK
:
6970 case BUILT_IN_MEMMOVE_CHK
:
6971 case BUILT_IN_MEMSET_CHK
:
6972 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6977 case BUILT_IN_STRCPY_CHK
:
6978 case BUILT_IN_STPCPY_CHK
:
6979 case BUILT_IN_STRNCPY_CHK
:
6980 case BUILT_IN_STPNCPY_CHK
:
6981 case BUILT_IN_STRCAT_CHK
:
6982 case BUILT_IN_STRNCAT_CHK
:
6983 case BUILT_IN_SNPRINTF_CHK
:
6984 case BUILT_IN_VSNPRINTF_CHK
:
6985 maybe_emit_chk_warning (exp
, fcode
);
6988 case BUILT_IN_SPRINTF_CHK
:
6989 case BUILT_IN_VSPRINTF_CHK
:
6990 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6994 if (warn_free_nonheap_object
)
6995 maybe_emit_free_warning (exp
);
6998 case BUILT_IN_THREAD_POINTER
:
6999 return expand_builtin_thread_pointer (exp
, target
);
7001 case BUILT_IN_SET_THREAD_POINTER
:
7002 expand_builtin_set_thread_pointer (exp
);
7005 case BUILT_IN_CILK_DETACH
:
7006 expand_builtin_cilk_detach (exp
);
7009 case BUILT_IN_CILK_POP_FRAME
:
7010 expand_builtin_cilk_pop_frame (exp
);
7013 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7014 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7015 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7016 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7017 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7018 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7019 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7020 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7021 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7022 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7023 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7024 /* We allow user CHKP builtins if Pointer Bounds
7026 if (!chkp_function_instrumented_p (current_function_decl
))
7028 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7029 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7030 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7031 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7032 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7033 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7034 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7035 return expand_normal (size_zero_node
);
7036 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7037 return expand_normal (size_int (-1));
7043 case BUILT_IN_CHKP_BNDMK
:
7044 case BUILT_IN_CHKP_BNDSTX
:
7045 case BUILT_IN_CHKP_BNDCL
:
7046 case BUILT_IN_CHKP_BNDCU
:
7047 case BUILT_IN_CHKP_BNDLDX
:
7048 case BUILT_IN_CHKP_BNDRET
:
7049 case BUILT_IN_CHKP_INTERSECT
:
7050 case BUILT_IN_CHKP_NARROW
:
7051 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7052 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7053 /* Software implementation of Pointer Bounds Checker is NYI.
7054 Target support is required. */
7055 error ("Your target platform does not support -fcheck-pointer-bounds");
7058 case BUILT_IN_ACC_ON_DEVICE
:
7059 target
= expand_builtin_acc_on_device (exp
, target
);
7064 default: /* just do library call, if unknown builtin */
7068 /* The switch statement above can drop through to cause the function
7069 to be called normally. */
7070 return expand_call (exp
, target
, ignore
);
7073 /* Similar to expand_builtin but is used for instrumented calls. */
7076 expand_builtin_with_bounds (tree exp
, rtx target
,
7077 rtx subtarget ATTRIBUTE_UNUSED
,
7078 machine_mode mode
, int ignore
)
7080 tree fndecl
= get_callee_fndecl (exp
);
7081 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7083 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7085 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7086 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7088 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7089 && fcode
< END_CHKP_BUILTINS
);
7093 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7094 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7099 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7100 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7105 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7106 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7115 /* The switch statement above can drop through to cause the function
7116 to be called normally. */
7117 return expand_call (exp
, target
, ignore
);
7120 /* Determine whether a tree node represents a call to a built-in
7121 function. If the tree T is a call to a built-in function with
7122 the right number of arguments of the appropriate types, return
7123 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7124 Otherwise the return value is END_BUILTINS. */
7126 enum built_in_function
7127 builtin_mathfn_code (const_tree t
)
7129 const_tree fndecl
, arg
, parmlist
;
7130 const_tree argtype
, parmtype
;
7131 const_call_expr_arg_iterator iter
;
7133 if (TREE_CODE (t
) != CALL_EXPR
7134 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7135 return END_BUILTINS
;
7137 fndecl
= get_callee_fndecl (t
);
7138 if (fndecl
== NULL_TREE
7139 || TREE_CODE (fndecl
) != FUNCTION_DECL
7140 || ! DECL_BUILT_IN (fndecl
)
7141 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7142 return END_BUILTINS
;
7144 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7145 init_const_call_expr_arg_iterator (t
, &iter
);
7146 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7148 /* If a function doesn't take a variable number of arguments,
7149 the last element in the list will have type `void'. */
7150 parmtype
= TREE_VALUE (parmlist
);
7151 if (VOID_TYPE_P (parmtype
))
7153 if (more_const_call_expr_args_p (&iter
))
7154 return END_BUILTINS
;
7155 return DECL_FUNCTION_CODE (fndecl
);
7158 if (! more_const_call_expr_args_p (&iter
))
7159 return END_BUILTINS
;
7161 arg
= next_const_call_expr_arg (&iter
);
7162 argtype
= TREE_TYPE (arg
);
7164 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7166 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7167 return END_BUILTINS
;
7169 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7171 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7172 return END_BUILTINS
;
7174 else if (POINTER_TYPE_P (parmtype
))
7176 if (! POINTER_TYPE_P (argtype
))
7177 return END_BUILTINS
;
7179 else if (INTEGRAL_TYPE_P (parmtype
))
7181 if (! INTEGRAL_TYPE_P (argtype
))
7182 return END_BUILTINS
;
7185 return END_BUILTINS
;
7188 /* Variable-length argument list. */
7189 return DECL_FUNCTION_CODE (fndecl
);
7192 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7193 evaluate to a constant. */
7196 fold_builtin_constant_p (tree arg
)
7198 /* We return 1 for a numeric type that's known to be a constant
7199 value at compile-time or for an aggregate type that's a
7200 literal constant. */
7203 /* If we know this is a constant, emit the constant of one. */
7204 if (CONSTANT_CLASS_P (arg
)
7205 || (TREE_CODE (arg
) == CONSTRUCTOR
7206 && TREE_CONSTANT (arg
)))
7207 return integer_one_node
;
7208 if (TREE_CODE (arg
) == ADDR_EXPR
)
7210 tree op
= TREE_OPERAND (arg
, 0);
7211 if (TREE_CODE (op
) == STRING_CST
7212 || (TREE_CODE (op
) == ARRAY_REF
7213 && integer_zerop (TREE_OPERAND (op
, 1))
7214 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7215 return integer_one_node
;
7218 /* If this expression has side effects, show we don't know it to be a
7219 constant. Likewise if it's a pointer or aggregate type since in
7220 those case we only want literals, since those are only optimized
7221 when generating RTL, not later.
7222 And finally, if we are compiling an initializer, not code, we
7223 need to return a definite result now; there's not going to be any
7224 more optimization done. */
7225 if (TREE_SIDE_EFFECTS (arg
)
7226 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7227 || POINTER_TYPE_P (TREE_TYPE (arg
))
7229 || folding_initializer
7230 || force_folding_builtin_constant_p
)
7231 return integer_zero_node
;
7236 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7237 return it as a truthvalue. */
7240 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7243 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7245 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7246 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7247 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7248 pred_type
= TREE_VALUE (arg_types
);
7249 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7251 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7252 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7253 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7256 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7257 build_int_cst (ret_type
, 0));
7260 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7261 NULL_TREE if no simplification is possible. */
7264 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7266 tree inner
, fndecl
, inner_arg0
;
7267 enum tree_code code
;
7269 /* Distribute the expected value over short-circuiting operators.
7270 See through the cast from truthvalue_type_node to long. */
7272 while (CONVERT_EXPR_P (inner_arg0
)
7273 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7275 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7277 /* If this is a builtin_expect within a builtin_expect keep the
7278 inner one. See through a comparison against a constant. It
7279 might have been added to create a thruthvalue. */
7282 if (COMPARISON_CLASS_P (inner
)
7283 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7284 inner
= TREE_OPERAND (inner
, 0);
7286 if (TREE_CODE (inner
) == CALL_EXPR
7287 && (fndecl
= get_callee_fndecl (inner
))
7288 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7289 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7293 code
= TREE_CODE (inner
);
7294 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7296 tree op0
= TREE_OPERAND (inner
, 0);
7297 tree op1
= TREE_OPERAND (inner
, 1);
7299 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7300 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7301 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7303 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7306 /* If the argument isn't invariant then there's nothing else we can do. */
7307 if (!TREE_CONSTANT (inner_arg0
))
7310 /* If we expect that a comparison against the argument will fold to
7311 a constant return the constant. In practice, this means a true
7312 constant or the address of a non-weak symbol. */
7315 if (TREE_CODE (inner
) == ADDR_EXPR
)
7319 inner
= TREE_OPERAND (inner
, 0);
7321 while (TREE_CODE (inner
) == COMPONENT_REF
7322 || TREE_CODE (inner
) == ARRAY_REF
);
7323 if ((TREE_CODE (inner
) == VAR_DECL
7324 || TREE_CODE (inner
) == FUNCTION_DECL
)
7325 && DECL_WEAK (inner
))
7329 /* Otherwise, ARG0 already has the proper type for the return value. */
7333 /* Fold a call to __builtin_classify_type with argument ARG. */
7336 fold_builtin_classify_type (tree arg
)
7339 return build_int_cst (integer_type_node
, no_type_class
);
7341 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7344 /* Fold a call to __builtin_strlen with argument ARG. */
7347 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7349 if (!validate_arg (arg
, POINTER_TYPE
))
7353 tree len
= c_strlen (arg
, 0);
7356 return fold_convert_loc (loc
, type
, len
);
7362 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7365 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7367 REAL_VALUE_TYPE real
;
7369 /* __builtin_inff is intended to be usable to define INFINITY on all
7370 targets. If an infinity is not available, INFINITY expands "to a
7371 positive constant of type float that overflows at translation
7372 time", footnote "In this case, using INFINITY will violate the
7373 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7374 Thus we pedwarn to ensure this constraint violation is
7376 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7377 pedwarn (loc
, 0, "target format does not support infinity");
7380 return build_real (type
, real
);
7383 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7386 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7388 REAL_VALUE_TYPE real
;
7391 if (!validate_arg (arg
, POINTER_TYPE
))
7393 str
= c_getstr (arg
);
7397 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7400 return build_real (type
, real
);
7403 /* Return true if the floating point expression T has an integer value.
7404 We also allow +Inf, -Inf and NaN to be considered integer values. */
7407 integer_valued_real_p (tree t
)
7409 switch (TREE_CODE (t
))
7416 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7421 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7428 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7429 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7432 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7433 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7436 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7440 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7441 if (TREE_CODE (type
) == INTEGER_TYPE
)
7443 if (TREE_CODE (type
) == REAL_TYPE
)
7444 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7449 switch (builtin_mathfn_code (t
))
7451 CASE_FLT_FN (BUILT_IN_CEIL
):
7452 CASE_FLT_FN (BUILT_IN_FLOOR
):
7453 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7454 CASE_FLT_FN (BUILT_IN_RINT
):
7455 CASE_FLT_FN (BUILT_IN_ROUND
):
7456 CASE_FLT_FN (BUILT_IN_TRUNC
):
7459 CASE_FLT_FN (BUILT_IN_FMIN
):
7460 CASE_FLT_FN (BUILT_IN_FMAX
):
7461 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7462 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7475 /* FNDECL is assumed to be a builtin where truncation can be propagated
7476 across (for instance floor((double)f) == (double)floorf (f).
7477 Do the transformation for a call with argument ARG. */
7480 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7482 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7484 if (!validate_arg (arg
, REAL_TYPE
))
7487 /* Integer rounding functions are idempotent. */
7488 if (fcode
== builtin_mathfn_code (arg
))
7491 /* If argument is already integer valued, and we don't need to worry
7492 about setting errno, there's no need to perform rounding. */
7493 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7498 tree arg0
= strip_float_extensions (arg
);
7499 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7500 tree newtype
= TREE_TYPE (arg0
);
7503 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7504 && (decl
= mathfn_built_in (newtype
, fcode
)))
7505 return fold_convert_loc (loc
, ftype
,
7506 build_call_expr_loc (loc
, decl
, 1,
7507 fold_convert_loc (loc
,
7514 /* FNDECL is assumed to be builtin which can narrow the FP type of
7515 the argument, for instance lround((double)f) -> lroundf (f).
7516 Do the transformation for a call with argument ARG. */
7519 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7521 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7523 if (!validate_arg (arg
, REAL_TYPE
))
7526 /* If argument is already integer valued, and we don't need to worry
7527 about setting errno, there's no need to perform rounding. */
7528 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7529 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7530 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7534 tree ftype
= TREE_TYPE (arg
);
7535 tree arg0
= strip_float_extensions (arg
);
7536 tree newtype
= TREE_TYPE (arg0
);
7539 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7540 && (decl
= mathfn_built_in (newtype
, fcode
)))
7541 return build_call_expr_loc (loc
, decl
, 1,
7542 fold_convert_loc (loc
, newtype
, arg0
));
7545 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7546 sizeof (int) == sizeof (long). */
7547 if (TYPE_PRECISION (integer_type_node
)
7548 == TYPE_PRECISION (long_integer_type_node
))
7550 tree newfn
= NULL_TREE
;
7553 CASE_FLT_FN (BUILT_IN_ICEIL
):
7554 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7557 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7558 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7561 CASE_FLT_FN (BUILT_IN_IROUND
):
7562 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7565 CASE_FLT_FN (BUILT_IN_IRINT
):
7566 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7575 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7576 return fold_convert_loc (loc
,
7577 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7581 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7582 sizeof (long long) == sizeof (long). */
7583 if (TYPE_PRECISION (long_long_integer_type_node
)
7584 == TYPE_PRECISION (long_integer_type_node
))
7586 tree newfn
= NULL_TREE
;
7589 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7590 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7593 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7594 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7597 CASE_FLT_FN (BUILT_IN_LLROUND
):
7598 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7601 CASE_FLT_FN (BUILT_IN_LLRINT
):
7602 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7611 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7612 return fold_convert_loc (loc
,
7613 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7620 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7621 return type. Return NULL_TREE if no simplification can be made. */
7624 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7628 if (!validate_arg (arg
, COMPLEX_TYPE
)
7629 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7632 /* Calculate the result when the argument is a constant. */
7633 if (TREE_CODE (arg
) == COMPLEX_CST
7634 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7638 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7640 tree real
= TREE_OPERAND (arg
, 0);
7641 tree imag
= TREE_OPERAND (arg
, 1);
7643 /* If either part is zero, cabs is fabs of the other. */
7644 if (real_zerop (real
))
7645 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7646 if (real_zerop (imag
))
7647 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7649 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7650 if (flag_unsafe_math_optimizations
7651 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7653 const REAL_VALUE_TYPE sqrt2_trunc
7654 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7656 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7657 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7658 build_real (type
, sqrt2_trunc
));
7662 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7663 if (TREE_CODE (arg
) == NEGATE_EXPR
7664 || TREE_CODE (arg
) == CONJ_EXPR
)
7665 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7667 /* Don't do this when optimizing for size. */
7668 if (flag_unsafe_math_optimizations
7669 && optimize
&& optimize_function_for_speed_p (cfun
))
7671 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7673 if (sqrtfn
!= NULL_TREE
)
7675 tree rpart
, ipart
, result
;
7677 arg
= builtin_save_expr (arg
);
7679 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7680 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7682 rpart
= builtin_save_expr (rpart
);
7683 ipart
= builtin_save_expr (ipart
);
7685 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7686 fold_build2_loc (loc
, MULT_EXPR
, type
,
7688 fold_build2_loc (loc
, MULT_EXPR
, type
,
7691 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7698 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7699 complex tree type of the result. If NEG is true, the imaginary
7700 zero is negative. */
7703 build_complex_cproj (tree type
, bool neg
)
7705 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7709 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7710 build_real (TREE_TYPE (type
), rzero
));
7713 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7714 return type. Return NULL_TREE if no simplification can be made. */
7717 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7719 if (!validate_arg (arg
, COMPLEX_TYPE
)
7720 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7723 /* If there are no infinities, return arg. */
7724 if (! HONOR_INFINITIES (type
))
7725 return non_lvalue_loc (loc
, arg
);
7727 /* Calculate the result when the argument is a constant. */
7728 if (TREE_CODE (arg
) == COMPLEX_CST
)
7730 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7731 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7733 if (real_isinf (real
) || real_isinf (imag
))
7734 return build_complex_cproj (type
, imag
->sign
);
7738 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7740 tree real
= TREE_OPERAND (arg
, 0);
7741 tree imag
= TREE_OPERAND (arg
, 1);
7746 /* If the real part is inf and the imag part is known to be
7747 nonnegative, return (inf + 0i). Remember side-effects are
7748 possible in the imag part. */
7749 if (TREE_CODE (real
) == REAL_CST
7750 && real_isinf (TREE_REAL_CST_PTR (real
))
7751 && tree_expr_nonnegative_p (imag
))
7752 return omit_one_operand_loc (loc
, type
,
7753 build_complex_cproj (type
, false),
7756 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7757 Remember side-effects are possible in the real part. */
7758 if (TREE_CODE (imag
) == REAL_CST
7759 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7761 omit_one_operand_loc (loc
, type
,
7762 build_complex_cproj (type
, TREE_REAL_CST_PTR
7763 (imag
)->sign
), arg
);
7769 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7770 Return NULL_TREE if no simplification can be made. */
7773 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7776 enum built_in_function fcode
;
7779 if (!validate_arg (arg
, REAL_TYPE
))
7782 /* Calculate the result when the argument is a constant. */
7783 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7786 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7787 fcode
= builtin_mathfn_code (arg
);
7788 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7790 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7791 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7792 CALL_EXPR_ARG (arg
, 0),
7793 build_real (type
, dconsthalf
));
7794 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7797 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7798 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7800 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7804 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7806 /* The inner root was either sqrt or cbrt. */
7807 /* This was a conditional expression but it triggered a bug
7809 REAL_VALUE_TYPE dconstroot
;
7810 if (BUILTIN_SQRT_P (fcode
))
7811 dconstroot
= dconsthalf
;
7813 dconstroot
= dconst_third ();
7815 /* Adjust for the outer root. */
7816 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7817 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7818 tree_root
= build_real (type
, dconstroot
);
7819 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7823 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7824 if (flag_unsafe_math_optimizations
7825 && (fcode
== BUILT_IN_POW
7826 || fcode
== BUILT_IN_POWF
7827 || fcode
== BUILT_IN_POWL
))
7829 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7830 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7831 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7833 if (!tree_expr_nonnegative_p (arg0
))
7834 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7835 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7836 build_real (type
, dconsthalf
));
7837 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7843 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7844 Return NULL_TREE if no simplification can be made. */
7847 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7849 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7852 if (!validate_arg (arg
, REAL_TYPE
))
7855 /* Calculate the result when the argument is a constant. */
7856 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7859 if (flag_unsafe_math_optimizations
)
7861 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7862 if (BUILTIN_EXPONENT_P (fcode
))
7864 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7865 const REAL_VALUE_TYPE third_trunc
=
7866 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7867 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7868 CALL_EXPR_ARG (arg
, 0),
7869 build_real (type
, third_trunc
));
7870 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7873 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7874 if (BUILTIN_SQRT_P (fcode
))
7876 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7880 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7882 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7884 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7885 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7886 tree_root
= build_real (type
, dconstroot
);
7887 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7891 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7892 if (BUILTIN_CBRT_P (fcode
))
7894 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7895 if (tree_expr_nonnegative_p (arg0
))
7897 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7902 REAL_VALUE_TYPE dconstroot
;
7904 real_arithmetic (&dconstroot
, MULT_EXPR
,
7905 dconst_third_ptr (), dconst_third_ptr ());
7906 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7907 tree_root
= build_real (type
, dconstroot
);
7908 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7913 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7914 if (fcode
== BUILT_IN_POW
7915 || fcode
== BUILT_IN_POWF
7916 || fcode
== BUILT_IN_POWL
)
7918 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7919 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7920 if (tree_expr_nonnegative_p (arg00
))
7922 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7923 const REAL_VALUE_TYPE dconstroot
7924 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7925 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7926 build_real (type
, dconstroot
));
7927 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7934 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7935 TYPE is the type of the return value. Return NULL_TREE if no
7936 simplification can be made. */
7939 fold_builtin_cos (location_t loc
,
7940 tree arg
, tree type
, tree fndecl
)
7944 if (!validate_arg (arg
, REAL_TYPE
))
7947 /* Calculate the result when the argument is a constant. */
7948 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7951 /* Optimize cos(-x) into cos (x). */
7952 if ((narg
= fold_strip_sign_ops (arg
)))
7953 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7958 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7962 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7964 if (validate_arg (arg
, REAL_TYPE
))
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7972 /* Optimize cosh(-x) into cosh (x). */
7973 if ((narg
= fold_strip_sign_ops (arg
)))
7974 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7980 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7981 argument ARG. TYPE is the type of the return value. Return
7982 NULL_TREE if no simplification can be made. */
7985 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7988 if (validate_arg (arg
, COMPLEX_TYPE
)
7989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7993 /* Calculate the result when the argument is a constant. */
7994 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7997 /* Optimize fn(-x) into fn(x). */
7998 if ((tmp
= fold_strip_sign_ops (arg
)))
7999 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
8005 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8006 Return NULL_TREE if no simplification can be made. */
8009 fold_builtin_tan (tree arg
, tree type
)
8011 enum built_in_function fcode
;
8014 if (!validate_arg (arg
, REAL_TYPE
))
8017 /* Calculate the result when the argument is a constant. */
8018 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
8021 /* Optimize tan(atan(x)) = x. */
8022 fcode
= builtin_mathfn_code (arg
);
8023 if (flag_unsafe_math_optimizations
8024 && (fcode
== BUILT_IN_ATAN
8025 || fcode
== BUILT_IN_ATANF
8026 || fcode
== BUILT_IN_ATANL
))
8027 return CALL_EXPR_ARG (arg
, 0);
8032 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8033 NULL_TREE if no simplification can be made. */
8036 fold_builtin_sincos (location_t loc
,
8037 tree arg0
, tree arg1
, tree arg2
)
8042 if (!validate_arg (arg0
, REAL_TYPE
)
8043 || !validate_arg (arg1
, POINTER_TYPE
)
8044 || !validate_arg (arg2
, POINTER_TYPE
))
8047 type
= TREE_TYPE (arg0
);
8049 /* Calculate the result when the argument is a constant. */
8050 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8053 /* Canonicalize sincos to cexpi. */
8054 if (!targetm
.libc_has_function (function_c99_math_complex
))
8056 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8060 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8061 call
= builtin_save_expr (call
);
8063 return build2 (COMPOUND_EXPR
, void_type_node
,
8064 build2 (MODIFY_EXPR
, void_type_node
,
8065 build_fold_indirect_ref_loc (loc
, arg1
),
8066 build1 (IMAGPART_EXPR
, type
, call
)),
8067 build2 (MODIFY_EXPR
, void_type_node
,
8068 build_fold_indirect_ref_loc (loc
, arg2
),
8069 build1 (REALPART_EXPR
, type
, call
)));
8072 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8073 NULL_TREE if no simplification can be made. */
8076 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8079 tree realp
, imagp
, ifn
;
8082 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8083 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8086 /* Calculate the result when the argument is a constant. */
8087 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8090 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8092 /* In case we can figure out the real part of arg0 and it is constant zero
8094 if (!targetm
.libc_has_function (function_c99_math_complex
))
8096 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8100 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8101 && real_zerop (realp
))
8103 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8104 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8107 /* In case we can easily decompose real and imaginary parts split cexp
8108 to exp (r) * cexpi (i). */
8109 if (flag_unsafe_math_optimizations
8112 tree rfn
, rcall
, icall
;
8114 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8118 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8122 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8123 icall
= builtin_save_expr (icall
);
8124 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8125 rcall
= builtin_save_expr (rcall
);
8126 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8127 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8129 fold_build1_loc (loc
, REALPART_EXPR
,
8131 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8133 fold_build1_loc (loc
, IMAGPART_EXPR
,
8140 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8141 Return NULL_TREE if no simplification can be made. */
8144 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8146 if (!validate_arg (arg
, REAL_TYPE
))
8149 /* Optimize trunc of constant value. */
8150 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8152 REAL_VALUE_TYPE r
, x
;
8153 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8155 x
= TREE_REAL_CST (arg
);
8156 real_trunc (&r
, TYPE_MODE (type
), &x
);
8157 return build_real (type
, r
);
8160 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8163 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8164 Return NULL_TREE if no simplification can be made. */
8167 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8169 if (!validate_arg (arg
, REAL_TYPE
))
8172 /* Optimize floor of constant value. */
8173 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8177 x
= TREE_REAL_CST (arg
);
8178 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8180 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8183 real_floor (&r
, TYPE_MODE (type
), &x
);
8184 return build_real (type
, r
);
8188 /* Fold floor (x) where x is nonnegative to trunc (x). */
8189 if (tree_expr_nonnegative_p (arg
))
8191 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8193 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8196 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8199 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8200 Return NULL_TREE if no simplification can be made. */
8203 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8205 if (!validate_arg (arg
, REAL_TYPE
))
8208 /* Optimize ceil of constant value. */
8209 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8213 x
= TREE_REAL_CST (arg
);
8214 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8216 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8219 real_ceil (&r
, TYPE_MODE (type
), &x
);
8220 return build_real (type
, r
);
8224 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8227 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8228 Return NULL_TREE if no simplification can be made. */
8231 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8233 if (!validate_arg (arg
, REAL_TYPE
))
8236 /* Optimize round of constant value. */
8237 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8241 x
= TREE_REAL_CST (arg
);
8242 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8244 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8247 real_round (&r
, TYPE_MODE (type
), &x
);
8248 return build_real (type
, r
);
8252 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8255 /* Fold function call to builtin lround, lroundf or lroundl (or the
8256 corresponding long long versions) and other rounding functions. ARG
8257 is the argument to the call. Return NULL_TREE if no simplification
8261 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8263 if (!validate_arg (arg
, REAL_TYPE
))
8266 /* Optimize lround of constant value. */
8267 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8269 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8271 if (real_isfinite (&x
))
8273 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8274 tree ftype
= TREE_TYPE (arg
);
8278 switch (DECL_FUNCTION_CODE (fndecl
))
8280 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8281 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8282 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8283 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8286 CASE_FLT_FN (BUILT_IN_ICEIL
):
8287 CASE_FLT_FN (BUILT_IN_LCEIL
):
8288 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8289 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8292 CASE_FLT_FN (BUILT_IN_IROUND
):
8293 CASE_FLT_FN (BUILT_IN_LROUND
):
8294 CASE_FLT_FN (BUILT_IN_LLROUND
):
8295 real_round (&r
, TYPE_MODE (ftype
), &x
);
8302 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8304 return wide_int_to_tree (itype
, val
);
8308 switch (DECL_FUNCTION_CODE (fndecl
))
8310 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8311 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8312 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8313 if (tree_expr_nonnegative_p (arg
))
8314 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8315 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8320 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8323 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8324 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8325 the argument to the call. Return NULL_TREE if no simplification can
8329 fold_builtin_bitop (tree fndecl
, tree arg
)
8331 if (!validate_arg (arg
, INTEGER_TYPE
))
8334 /* Optimize for constant argument. */
8335 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8337 tree type
= TREE_TYPE (arg
);
8340 switch (DECL_FUNCTION_CODE (fndecl
))
8342 CASE_INT_FN (BUILT_IN_FFS
):
8343 result
= wi::ffs (arg
);
8346 CASE_INT_FN (BUILT_IN_CLZ
):
8347 if (wi::ne_p (arg
, 0))
8348 result
= wi::clz (arg
);
8349 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8350 result
= TYPE_PRECISION (type
);
8353 CASE_INT_FN (BUILT_IN_CTZ
):
8354 if (wi::ne_p (arg
, 0))
8355 result
= wi::ctz (arg
);
8356 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8357 result
= TYPE_PRECISION (type
);
8360 CASE_INT_FN (BUILT_IN_CLRSB
):
8361 result
= wi::clrsb (arg
);
8364 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8365 result
= wi::popcount (arg
);
8368 CASE_INT_FN (BUILT_IN_PARITY
):
8369 result
= wi::parity (arg
);
8376 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8382 /* Fold function call to builtin_bswap and the short, long and long long
8383 variants. Return NULL_TREE if no simplification can be made. */
8385 fold_builtin_bswap (tree fndecl
, tree arg
)
8387 if (! validate_arg (arg
, INTEGER_TYPE
))
8390 /* Optimize constant value. */
8391 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8393 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8395 switch (DECL_FUNCTION_CODE (fndecl
))
8397 case BUILT_IN_BSWAP16
:
8398 case BUILT_IN_BSWAP32
:
8399 case BUILT_IN_BSWAP64
:
8401 signop sgn
= TYPE_SIGN (type
);
8403 wide_int_to_tree (type
,
8404 wide_int::from (arg
, TYPE_PRECISION (type
),
8416 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8417 NULL_TREE if no simplification can be made. */
8420 fold_builtin_hypot (location_t loc
, tree fndecl
,
8421 tree arg0
, tree arg1
, tree type
)
8423 tree res
, narg0
, narg1
;
8425 if (!validate_arg (arg0
, REAL_TYPE
)
8426 || !validate_arg (arg1
, REAL_TYPE
))
8429 /* Calculate the result when the argument is a constant. */
8430 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8433 /* If either argument to hypot has a negate or abs, strip that off.
8434 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8435 narg0
= fold_strip_sign_ops (arg0
);
8436 narg1
= fold_strip_sign_ops (arg1
);
8439 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8440 narg1
? narg1
: arg1
);
8443 /* If either argument is zero, hypot is fabs of the other. */
8444 if (real_zerop (arg0
))
8445 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8446 else if (real_zerop (arg1
))
8447 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8449 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8450 if (flag_unsafe_math_optimizations
8451 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8453 const REAL_VALUE_TYPE sqrt2_trunc
8454 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8455 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8456 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8457 build_real (type
, sqrt2_trunc
));
8464 /* Fold a builtin function call to pow, powf, or powl. Return
8465 NULL_TREE if no simplification can be made. */
8467 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8471 if (!validate_arg (arg0
, REAL_TYPE
)
8472 || !validate_arg (arg1
, REAL_TYPE
))
8475 /* Calculate the result when the argument is a constant. */
8476 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8479 /* Optimize pow(1.0,y) = 1.0. */
8480 if (real_onep (arg0
))
8481 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8483 if (TREE_CODE (arg1
) == REAL_CST
8484 && !TREE_OVERFLOW (arg1
))
8486 REAL_VALUE_TYPE cint
;
8490 c
= TREE_REAL_CST (arg1
);
8492 /* Optimize pow(x,0.0) = 1.0. */
8493 if (REAL_VALUES_EQUAL (c
, dconst0
))
8494 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8497 /* Optimize pow(x,1.0) = x. */
8498 if (REAL_VALUES_EQUAL (c
, dconst1
))
8501 /* Optimize pow(x,-1.0) = 1.0/x. */
8502 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8503 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8504 build_real (type
, dconst1
), arg0
);
8506 /* Optimize pow(x,0.5) = sqrt(x). */
8507 if (flag_unsafe_math_optimizations
8508 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8510 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8512 if (sqrtfn
!= NULL_TREE
)
8513 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8516 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8517 if (flag_unsafe_math_optimizations
)
8519 const REAL_VALUE_TYPE dconstroot
8520 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8522 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8524 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8525 if (cbrtfn
!= NULL_TREE
)
8526 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8530 /* Check for an integer exponent. */
8531 n
= real_to_integer (&c
);
8532 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8533 if (real_identical (&c
, &cint
))
8535 /* Attempt to evaluate pow at compile-time, unless this should
8536 raise an exception. */
8537 if (TREE_CODE (arg0
) == REAL_CST
8538 && !TREE_OVERFLOW (arg0
)
8540 || (!flag_trapping_math
&& !flag_errno_math
)
8541 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8546 x
= TREE_REAL_CST (arg0
);
8547 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8548 if (flag_unsafe_math_optimizations
|| !inexact
)
8549 return build_real (type
, x
);
8552 /* Strip sign ops from even integer powers. */
8553 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8555 tree narg0
= fold_strip_sign_ops (arg0
);
8557 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8562 if (flag_unsafe_math_optimizations
)
8564 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8566 /* Optimize pow(expN(x),y) = expN(x*y). */
8567 if (BUILTIN_EXPONENT_P (fcode
))
8569 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8570 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8571 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8572 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8575 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8576 if (BUILTIN_SQRT_P (fcode
))
8578 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8579 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8580 build_real (type
, dconsthalf
));
8581 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8584 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8585 if (BUILTIN_CBRT_P (fcode
))
8587 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8588 if (tree_expr_nonnegative_p (arg
))
8590 const REAL_VALUE_TYPE dconstroot
8591 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8592 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8593 build_real (type
, dconstroot
));
8594 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8598 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8599 if (fcode
== BUILT_IN_POW
8600 || fcode
== BUILT_IN_POWF
8601 || fcode
== BUILT_IN_POWL
)
8603 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8604 if (tree_expr_nonnegative_p (arg00
))
8606 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8607 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8608 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8616 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8617 Return NULL_TREE if no simplification can be made. */
8619 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8620 tree arg0
, tree arg1
, tree type
)
8622 if (!validate_arg (arg0
, REAL_TYPE
)
8623 || !validate_arg (arg1
, INTEGER_TYPE
))
8626 /* Optimize pow(1.0,y) = 1.0. */
8627 if (real_onep (arg0
))
8628 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8630 if (tree_fits_shwi_p (arg1
))
8632 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8634 /* Evaluate powi at compile-time. */
8635 if (TREE_CODE (arg0
) == REAL_CST
8636 && !TREE_OVERFLOW (arg0
))
8639 x
= TREE_REAL_CST (arg0
);
8640 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8641 return build_real (type
, x
);
8644 /* Optimize pow(x,0) = 1.0. */
8646 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8649 /* Optimize pow(x,1) = x. */
8653 /* Optimize pow(x,-1) = 1.0/x. */
8655 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8656 build_real (type
, dconst1
), arg0
);
8662 /* A subroutine of fold_builtin to fold the various exponent
8663 functions. Return NULL_TREE if no simplification can be made.
8664 FUNC is the corresponding MPFR exponent function. */
8667 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8668 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8670 if (validate_arg (arg
, REAL_TYPE
))
8672 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8675 /* Calculate the result when the argument is a constant. */
8676 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8679 /* Optimize expN(logN(x)) = x. */
8680 if (flag_unsafe_math_optimizations
)
8682 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8684 if ((func
== mpfr_exp
8685 && (fcode
== BUILT_IN_LOG
8686 || fcode
== BUILT_IN_LOGF
8687 || fcode
== BUILT_IN_LOGL
))
8688 || (func
== mpfr_exp2
8689 && (fcode
== BUILT_IN_LOG2
8690 || fcode
== BUILT_IN_LOG2F
8691 || fcode
== BUILT_IN_LOG2L
))
8692 || (func
== mpfr_exp10
8693 && (fcode
== BUILT_IN_LOG10
8694 || fcode
== BUILT_IN_LOG10F
8695 || fcode
== BUILT_IN_LOG10L
)))
8696 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8703 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8704 arguments to the call, and TYPE is its return type.
8705 Return NULL_TREE if no simplification can be made. */
8708 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8710 if (!validate_arg (arg1
, POINTER_TYPE
)
8711 || !validate_arg (arg2
, INTEGER_TYPE
)
8712 || !validate_arg (len
, INTEGER_TYPE
))
8718 if (TREE_CODE (arg2
) != INTEGER_CST
8719 || !tree_fits_uhwi_p (len
))
8722 p1
= c_getstr (arg1
);
8723 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8729 if (target_char_cast (arg2
, &c
))
8732 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8735 return build_int_cst (TREE_TYPE (arg1
), 0);
8737 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8738 return fold_convert_loc (loc
, type
, tem
);
8744 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8745 Return NULL_TREE if no simplification can be made. */
8748 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8750 const char *p1
, *p2
;
8752 if (!validate_arg (arg1
, POINTER_TYPE
)
8753 || !validate_arg (arg2
, POINTER_TYPE
)
8754 || !validate_arg (len
, INTEGER_TYPE
))
8757 /* If the LEN parameter is zero, return zero. */
8758 if (integer_zerop (len
))
8759 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8762 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8763 if (operand_equal_p (arg1
, arg2
, 0))
8764 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8766 p1
= c_getstr (arg1
);
8767 p2
= c_getstr (arg2
);
8769 /* If all arguments are constant, and the value of len is not greater
8770 than the lengths of arg1 and arg2, evaluate at compile-time. */
8771 if (tree_fits_uhwi_p (len
) && p1
&& p2
8772 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8773 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8775 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8778 return integer_one_node
;
8780 return integer_minus_one_node
;
8782 return integer_zero_node
;
8785 /* If len parameter is one, return an expression corresponding to
8786 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8787 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8789 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8790 tree cst_uchar_ptr_node
8791 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8794 = fold_convert_loc (loc
, integer_type_node
,
8795 build1 (INDIRECT_REF
, cst_uchar_node
,
8796 fold_convert_loc (loc
,
8800 = fold_convert_loc (loc
, integer_type_node
,
8801 build1 (INDIRECT_REF
, cst_uchar_node
,
8802 fold_convert_loc (loc
,
8805 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8811 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8812 Return NULL_TREE if no simplification can be made. */
8815 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8817 const char *p1
, *p2
;
8819 if (!validate_arg (arg1
, POINTER_TYPE
)
8820 || !validate_arg (arg2
, POINTER_TYPE
))
8823 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8824 if (operand_equal_p (arg1
, arg2
, 0))
8825 return integer_zero_node
;
8827 p1
= c_getstr (arg1
);
8828 p2
= c_getstr (arg2
);
8832 const int i
= strcmp (p1
, p2
);
8834 return integer_minus_one_node
;
8836 return integer_one_node
;
8838 return integer_zero_node
;
8841 /* If the second arg is "", return *(const unsigned char*)arg1. */
8842 if (p2
&& *p2
== '\0')
8844 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8845 tree cst_uchar_ptr_node
8846 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8848 return fold_convert_loc (loc
, integer_type_node
,
8849 build1 (INDIRECT_REF
, cst_uchar_node
,
8850 fold_convert_loc (loc
,
8855 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8856 if (p1
&& *p1
== '\0')
8858 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8859 tree cst_uchar_ptr_node
8860 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8863 = fold_convert_loc (loc
, integer_type_node
,
8864 build1 (INDIRECT_REF
, cst_uchar_node
,
8865 fold_convert_loc (loc
,
8868 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8874 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8875 Return NULL_TREE if no simplification can be made. */
8878 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8880 const char *p1
, *p2
;
8882 if (!validate_arg (arg1
, POINTER_TYPE
)
8883 || !validate_arg (arg2
, POINTER_TYPE
)
8884 || !validate_arg (len
, INTEGER_TYPE
))
8887 /* If the LEN parameter is zero, return zero. */
8888 if (integer_zerop (len
))
8889 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8892 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8893 if (operand_equal_p (arg1
, arg2
, 0))
8894 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8896 p1
= c_getstr (arg1
);
8897 p2
= c_getstr (arg2
);
8899 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8901 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8903 return integer_one_node
;
8905 return integer_minus_one_node
;
8907 return integer_zero_node
;
8910 /* If the second arg is "", and the length is greater than zero,
8911 return *(const unsigned char*)arg1. */
8912 if (p2
&& *p2
== '\0'
8913 && TREE_CODE (len
) == INTEGER_CST
8914 && tree_int_cst_sgn (len
) == 1)
8916 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8917 tree cst_uchar_ptr_node
8918 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8920 return fold_convert_loc (loc
, integer_type_node
,
8921 build1 (INDIRECT_REF
, cst_uchar_node
,
8922 fold_convert_loc (loc
,
8927 /* If the first arg is "", and the length is greater than zero,
8928 return -*(const unsigned char*)arg2. */
8929 if (p1
&& *p1
== '\0'
8930 && TREE_CODE (len
) == INTEGER_CST
8931 && tree_int_cst_sgn (len
) == 1)
8933 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8934 tree cst_uchar_ptr_node
8935 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8937 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8938 build1 (INDIRECT_REF
, cst_uchar_node
,
8939 fold_convert_loc (loc
,
8942 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8945 /* If len parameter is one, return an expression corresponding to
8946 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8947 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8949 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8950 tree cst_uchar_ptr_node
8951 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8953 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8954 build1 (INDIRECT_REF
, cst_uchar_node
,
8955 fold_convert_loc (loc
,
8958 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8959 build1 (INDIRECT_REF
, cst_uchar_node
,
8960 fold_convert_loc (loc
,
8963 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8969 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8970 ARG. Return NULL_TREE if no simplification can be made. */
8973 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8975 if (!validate_arg (arg
, REAL_TYPE
))
8978 /* If ARG is a compile-time constant, determine the result. */
8979 if (TREE_CODE (arg
) == REAL_CST
8980 && !TREE_OVERFLOW (arg
))
8984 c
= TREE_REAL_CST (arg
);
8985 return (REAL_VALUE_NEGATIVE (c
)
8986 ? build_one_cst (type
)
8987 : build_zero_cst (type
));
8990 /* If ARG is non-negative, the result is always zero. */
8991 if (tree_expr_nonnegative_p (arg
))
8992 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8994 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8995 if (!HONOR_SIGNED_ZEROS (arg
))
8996 return fold_convert (type
,
8997 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8998 build_real (TREE_TYPE (arg
), dconst0
)));
9003 /* Fold function call to builtin copysign, copysignf or copysignl with
9004 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9008 fold_builtin_copysign (location_t loc
, tree fndecl
,
9009 tree arg1
, tree arg2
, tree type
)
9013 if (!validate_arg (arg1
, REAL_TYPE
)
9014 || !validate_arg (arg2
, REAL_TYPE
))
9017 /* copysign(X,X) is X. */
9018 if (operand_equal_p (arg1
, arg2
, 0))
9019 return fold_convert_loc (loc
, type
, arg1
);
9021 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9022 if (TREE_CODE (arg1
) == REAL_CST
9023 && TREE_CODE (arg2
) == REAL_CST
9024 && !TREE_OVERFLOW (arg1
)
9025 && !TREE_OVERFLOW (arg2
))
9027 REAL_VALUE_TYPE c1
, c2
;
9029 c1
= TREE_REAL_CST (arg1
);
9030 c2
= TREE_REAL_CST (arg2
);
9031 /* c1.sign := c2.sign. */
9032 real_copysign (&c1
, &c2
);
9033 return build_real (type
, c1
);
9036 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9037 Remember to evaluate Y for side-effects. */
9038 if (tree_expr_nonnegative_p (arg2
))
9039 return omit_one_operand_loc (loc
, type
,
9040 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9043 /* Strip sign changing operations for the first argument. */
9044 tem
= fold_strip_sign_ops (arg1
);
9046 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9051 /* Fold a call to builtin isascii with argument ARG. */
9054 fold_builtin_isascii (location_t loc
, tree arg
)
9056 if (!validate_arg (arg
, INTEGER_TYPE
))
9060 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9061 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9062 build_int_cst (integer_type_node
,
9063 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9064 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9065 arg
, integer_zero_node
);
9069 /* Fold a call to builtin toascii with argument ARG. */
9072 fold_builtin_toascii (location_t loc
, tree arg
)
9074 if (!validate_arg (arg
, INTEGER_TYPE
))
9077 /* Transform toascii(c) -> (c & 0x7f). */
9078 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9079 build_int_cst (integer_type_node
, 0x7f));
9082 /* Fold a call to builtin isdigit with argument ARG. */
9085 fold_builtin_isdigit (location_t loc
, tree arg
)
9087 if (!validate_arg (arg
, INTEGER_TYPE
))
9091 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9092 /* According to the C standard, isdigit is unaffected by locale.
9093 However, it definitely is affected by the target character set. */
9094 unsigned HOST_WIDE_INT target_digit0
9095 = lang_hooks
.to_target_charset ('0');
9097 if (target_digit0
== 0)
9100 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9101 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9102 build_int_cst (unsigned_type_node
, target_digit0
));
9103 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9104 build_int_cst (unsigned_type_node
, 9));
9108 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9111 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9113 if (!validate_arg (arg
, REAL_TYPE
))
9116 arg
= fold_convert_loc (loc
, type
, arg
);
9117 if (TREE_CODE (arg
) == REAL_CST
)
9118 return fold_abs_const (arg
, type
);
9119 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9122 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9125 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9127 if (!validate_arg (arg
, INTEGER_TYPE
))
9130 arg
= fold_convert_loc (loc
, type
, arg
);
9131 if (TREE_CODE (arg
) == INTEGER_CST
)
9132 return fold_abs_const (arg
, type
);
9133 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9136 /* Fold a fma operation with arguments ARG[012]. */
9139 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9140 tree type
, tree arg0
, tree arg1
, tree arg2
)
9142 if (TREE_CODE (arg0
) == REAL_CST
9143 && TREE_CODE (arg1
) == REAL_CST
9144 && TREE_CODE (arg2
) == REAL_CST
)
9145 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9150 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9153 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9155 if (validate_arg (arg0
, REAL_TYPE
)
9156 && validate_arg (arg1
, REAL_TYPE
)
9157 && validate_arg (arg2
, REAL_TYPE
))
9159 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9163 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9164 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9165 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9170 /* Fold a call to builtin fmin or fmax. */
9173 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9174 tree type
, bool max
)
9176 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9178 /* Calculate the result when the argument is a constant. */
9179 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9184 /* If either argument is NaN, return the other one. Avoid the
9185 transformation if we get (and honor) a signalling NaN. Using
9186 omit_one_operand() ensures we create a non-lvalue. */
9187 if (TREE_CODE (arg0
) == REAL_CST
9188 && real_isnan (&TREE_REAL_CST (arg0
))
9189 && (! HONOR_SNANS (arg0
)
9190 || ! TREE_REAL_CST (arg0
).signalling
))
9191 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9192 if (TREE_CODE (arg1
) == REAL_CST
9193 && real_isnan (&TREE_REAL_CST (arg1
))
9194 && (! HONOR_SNANS (arg1
)
9195 || ! TREE_REAL_CST (arg1
).signalling
))
9196 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9198 /* Transform fmin/fmax(x,x) -> x. */
9199 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9200 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9202 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9203 functions to return the numeric arg if the other one is NaN.
9204 These tree codes don't honor that, so only transform if
9205 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9206 handled, so we don't have to worry about it either. */
9207 if (flag_finite_math_only
)
9208 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9209 fold_convert_loc (loc
, type
, arg0
),
9210 fold_convert_loc (loc
, type
, arg1
));
9215 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9218 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9220 if (validate_arg (arg
, COMPLEX_TYPE
)
9221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9223 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9227 tree new_arg
= builtin_save_expr (arg
);
9228 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9229 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9230 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9237 /* Fold a call to builtin logb/ilogb. */
9240 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9242 if (! validate_arg (arg
, REAL_TYPE
))
9247 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9249 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9255 /* If arg is Inf or NaN and we're logb, return it. */
9256 if (TREE_CODE (rettype
) == REAL_TYPE
)
9258 /* For logb(-Inf) we have to return +Inf. */
9259 if (real_isinf (value
) && real_isneg (value
))
9261 REAL_VALUE_TYPE tem
;
9263 return build_real (rettype
, tem
);
9265 return fold_convert_loc (loc
, rettype
, arg
);
9267 /* Fall through... */
9269 /* Zero may set errno and/or raise an exception for logb, also
9270 for ilogb we don't know FP_ILOGB0. */
9273 /* For normal numbers, proceed iff radix == 2. In GCC,
9274 normalized significands are in the range [0.5, 1.0). We
9275 want the exponent as if they were [1.0, 2.0) so get the
9276 exponent and subtract 1. */
9277 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9278 return fold_convert_loc (loc
, rettype
,
9279 build_int_cst (integer_type_node
,
9280 REAL_EXP (value
)-1));
9288 /* Fold a call to builtin significand, if radix == 2. */
9291 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9293 if (! validate_arg (arg
, REAL_TYPE
))
9298 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9300 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9307 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9308 return fold_convert_loc (loc
, rettype
, arg
);
9310 /* For normal numbers, proceed iff radix == 2. */
9311 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9313 REAL_VALUE_TYPE result
= *value
;
9314 /* In GCC, normalized significands are in the range [0.5,
9315 1.0). We want them to be [1.0, 2.0) so set the
9317 SET_REAL_EXP (&result
, 1);
9318 return build_real (rettype
, result
);
9327 /* Fold a call to builtin frexp, we can assume the base is 2. */
9330 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9332 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9337 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9340 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9342 /* Proceed if a valid pointer type was passed in. */
9343 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9345 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9351 /* For +-0, return (*exp = 0, +-0). */
9352 exp
= integer_zero_node
;
9357 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9358 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9361 /* Since the frexp function always expects base 2, and in
9362 GCC normalized significands are already in the range
9363 [0.5, 1.0), we have exactly what frexp wants. */
9364 REAL_VALUE_TYPE frac_rvt
= *value
;
9365 SET_REAL_EXP (&frac_rvt
, 0);
9366 frac
= build_real (rettype
, frac_rvt
);
9367 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9374 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9375 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9376 TREE_SIDE_EFFECTS (arg1
) = 1;
9377 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9383 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9384 then we can assume the base is two. If it's false, then we have to
9385 check the mode of the TYPE parameter in certain cases. */
9388 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9389 tree type
, bool ldexp
)
9391 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9396 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9397 if (real_zerop (arg0
) || integer_zerop (arg1
)
9398 || (TREE_CODE (arg0
) == REAL_CST
9399 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9400 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9402 /* If both arguments are constant, then try to evaluate it. */
9403 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9404 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9405 && tree_fits_shwi_p (arg1
))
9407 /* Bound the maximum adjustment to twice the range of the
9408 mode's valid exponents. Use abs to ensure the range is
9409 positive as a sanity check. */
9410 const long max_exp_adj
= 2 *
9411 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9412 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9414 /* Get the user-requested adjustment. */
9415 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9417 /* The requested adjustment must be inside this range. This
9418 is a preliminary cap to avoid things like overflow, we
9419 may still fail to compute the result for other reasons. */
9420 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9422 REAL_VALUE_TYPE initial_result
;
9424 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9426 /* Ensure we didn't overflow. */
9427 if (! real_isinf (&initial_result
))
9429 const REAL_VALUE_TYPE trunc_result
9430 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9432 /* Only proceed if the target mode can hold the
9434 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9435 return build_real (type
, trunc_result
);
9444 /* Fold a call to builtin modf. */
9447 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9449 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9454 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9457 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9459 /* Proceed if a valid pointer type was passed in. */
9460 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9462 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9463 REAL_VALUE_TYPE trunc
, frac
;
9469 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9470 trunc
= frac
= *value
;
9473 /* For +-Inf, return (*arg1 = arg0, +-0). */
9475 frac
.sign
= value
->sign
;
9479 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9480 real_trunc (&trunc
, VOIDmode
, value
);
9481 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9482 /* If the original number was negative and already
9483 integral, then the fractional part is -0.0. */
9484 if (value
->sign
&& frac
.cl
== rvc_zero
)
9485 frac
.sign
= value
->sign
;
9489 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9490 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9491 build_real (rettype
, trunc
));
9492 TREE_SIDE_EFFECTS (arg1
) = 1;
9493 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9494 build_real (rettype
, frac
));
9500 /* Given a location LOC, an interclass builtin function decl FNDECL
9501 and its single argument ARG, return an folded expression computing
9502 the same, or NULL_TREE if we either couldn't or didn't want to fold
9503 (the latter happen if there's an RTL instruction available). */
9506 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9510 if (!validate_arg (arg
, REAL_TYPE
))
9513 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9516 mode
= TYPE_MODE (TREE_TYPE (arg
));
9518 /* If there is no optab, try generic code. */
9519 switch (DECL_FUNCTION_CODE (fndecl
))
9523 CASE_FLT_FN (BUILT_IN_ISINF
):
9525 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9526 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9527 tree
const type
= TREE_TYPE (arg
);
9531 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9532 real_from_string (&r
, buf
);
9533 result
= build_call_expr (isgr_fn
, 2,
9534 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9535 build_real (type
, r
));
9538 CASE_FLT_FN (BUILT_IN_FINITE
):
9539 case BUILT_IN_ISFINITE
:
9541 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9542 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9543 tree
const type
= TREE_TYPE (arg
);
9547 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9548 real_from_string (&r
, buf
);
9549 result
= build_call_expr (isle_fn
, 2,
9550 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9551 build_real (type
, r
));
9552 /*result = fold_build2_loc (loc, UNGT_EXPR,
9553 TREE_TYPE (TREE_TYPE (fndecl)),
9554 fold_build1_loc (loc, ABS_EXPR, type, arg),
9555 build_real (type, r));
9556 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9557 TREE_TYPE (TREE_TYPE (fndecl)),
9561 case BUILT_IN_ISNORMAL
:
9563 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9564 islessequal(fabs(x),DBL_MAX). */
9565 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9566 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9567 tree
const type
= TREE_TYPE (arg
);
9568 REAL_VALUE_TYPE rmax
, rmin
;
9571 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9572 real_from_string (&rmax
, buf
);
9573 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9574 real_from_string (&rmin
, buf
);
9575 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9576 result
= build_call_expr (isle_fn
, 2, arg
,
9577 build_real (type
, rmax
));
9578 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9579 build_call_expr (isge_fn
, 2, arg
,
9580 build_real (type
, rmin
)));
9590 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9591 ARG is the argument for the call. */
9594 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9596 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9599 if (!validate_arg (arg
, REAL_TYPE
))
9602 switch (builtin_index
)
9604 case BUILT_IN_ISINF
:
9605 if (!HONOR_INFINITIES (arg
))
9606 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9608 if (TREE_CODE (arg
) == REAL_CST
)
9610 r
= TREE_REAL_CST (arg
);
9611 if (real_isinf (&r
))
9612 return real_compare (GT_EXPR
, &r
, &dconst0
)
9613 ? integer_one_node
: integer_minus_one_node
;
9615 return integer_zero_node
;
9620 case BUILT_IN_ISINF_SIGN
:
9622 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9623 /* In a boolean context, GCC will fold the inner COND_EXPR to
9624 1. So e.g. "if (isinf_sign(x))" would be folded to just
9625 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9626 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9627 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9628 tree tmp
= NULL_TREE
;
9630 arg
= builtin_save_expr (arg
);
9632 if (signbit_fn
&& isinf_fn
)
9634 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9635 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9637 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9638 signbit_call
, integer_zero_node
);
9639 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9640 isinf_call
, integer_zero_node
);
9642 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9643 integer_minus_one_node
, integer_one_node
);
9644 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9652 case BUILT_IN_ISFINITE
:
9653 if (!HONOR_NANS (arg
)
9654 && !HONOR_INFINITIES (arg
))
9655 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9657 if (TREE_CODE (arg
) == REAL_CST
)
9659 r
= TREE_REAL_CST (arg
);
9660 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9665 case BUILT_IN_ISNAN
:
9666 if (!HONOR_NANS (arg
))
9667 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9669 if (TREE_CODE (arg
) == REAL_CST
)
9671 r
= TREE_REAL_CST (arg
);
9672 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9675 arg
= builtin_save_expr (arg
);
9676 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9683 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9684 This builtin will generate code to return the appropriate floating
9685 point classification depending on the value of the floating point
9686 number passed in. The possible return values must be supplied as
9687 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9688 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9689 one floating point argument which is "type generic". */
9692 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9694 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9695 arg
, type
, res
, tmp
;
9700 /* Verify the required arguments in the original call. */
9702 || !validate_arg (args
[0], INTEGER_TYPE
)
9703 || !validate_arg (args
[1], INTEGER_TYPE
)
9704 || !validate_arg (args
[2], INTEGER_TYPE
)
9705 || !validate_arg (args
[3], INTEGER_TYPE
)
9706 || !validate_arg (args
[4], INTEGER_TYPE
)
9707 || !validate_arg (args
[5], REAL_TYPE
))
9711 fp_infinite
= args
[1];
9712 fp_normal
= args
[2];
9713 fp_subnormal
= args
[3];
9716 type
= TREE_TYPE (arg
);
9717 mode
= TYPE_MODE (type
);
9718 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9722 (fabs(x) == Inf ? FP_INFINITE :
9723 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9724 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9726 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9727 build_real (type
, dconst0
));
9728 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9729 tmp
, fp_zero
, fp_subnormal
);
9731 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9732 real_from_string (&r
, buf
);
9733 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9734 arg
, build_real (type
, r
));
9735 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9737 if (HONOR_INFINITIES (mode
))
9740 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9741 build_real (type
, r
));
9742 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9746 if (HONOR_NANS (mode
))
9748 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9749 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9755 /* Fold a call to an unordered comparison function such as
9756 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9757 being called and ARG0 and ARG1 are the arguments for the call.
9758 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9759 the opposite of the desired result. UNORDERED_CODE is used
9760 for modes that can hold NaNs and ORDERED_CODE is used for
9764 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9765 enum tree_code unordered_code
,
9766 enum tree_code ordered_code
)
9768 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9769 enum tree_code code
;
9771 enum tree_code code0
, code1
;
9772 tree cmp_type
= NULL_TREE
;
9774 type0
= TREE_TYPE (arg0
);
9775 type1
= TREE_TYPE (arg1
);
9777 code0
= TREE_CODE (type0
);
9778 code1
= TREE_CODE (type1
);
9780 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9781 /* Choose the wider of two real types. */
9782 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9784 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9786 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9789 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9790 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9792 if (unordered_code
== UNORDERED_EXPR
)
9794 if (!HONOR_NANS (arg0
))
9795 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9796 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9799 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9800 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9801 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9804 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9805 arithmetics if it can never overflow, or into internal functions that
9806 return both result of arithmetics and overflowed boolean flag in
9807 a complex integer result, or some other check for overflow. */
9810 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9811 tree arg0
, tree arg1
, tree arg2
)
9813 enum internal_fn ifn
= IFN_LAST
;
9814 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9815 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9818 case BUILT_IN_ADD_OVERFLOW
:
9819 case BUILT_IN_SADD_OVERFLOW
:
9820 case BUILT_IN_SADDL_OVERFLOW
:
9821 case BUILT_IN_SADDLL_OVERFLOW
:
9822 case BUILT_IN_UADD_OVERFLOW
:
9823 case BUILT_IN_UADDL_OVERFLOW
:
9824 case BUILT_IN_UADDLL_OVERFLOW
:
9825 ifn
= IFN_ADD_OVERFLOW
;
9827 case BUILT_IN_SUB_OVERFLOW
:
9828 case BUILT_IN_SSUB_OVERFLOW
:
9829 case BUILT_IN_SSUBL_OVERFLOW
:
9830 case BUILT_IN_SSUBLL_OVERFLOW
:
9831 case BUILT_IN_USUB_OVERFLOW
:
9832 case BUILT_IN_USUBL_OVERFLOW
:
9833 case BUILT_IN_USUBLL_OVERFLOW
:
9834 ifn
= IFN_SUB_OVERFLOW
;
9836 case BUILT_IN_MUL_OVERFLOW
:
9837 case BUILT_IN_SMUL_OVERFLOW
:
9838 case BUILT_IN_SMULL_OVERFLOW
:
9839 case BUILT_IN_SMULLL_OVERFLOW
:
9840 case BUILT_IN_UMUL_OVERFLOW
:
9841 case BUILT_IN_UMULL_OVERFLOW
:
9842 case BUILT_IN_UMULLL_OVERFLOW
:
9843 ifn
= IFN_MUL_OVERFLOW
;
9848 tree ctype
= build_complex_type (type
);
9849 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9851 tree tgt
= save_expr (call
);
9852 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9853 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9854 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9856 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9857 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9860 /* Fold a call to built-in function FNDECL with 0 arguments.
9861 This function returns NULL_TREE if no simplification was possible. */
9864 fold_builtin_0 (location_t loc
, tree fndecl
)
9866 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9867 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9870 CASE_FLT_FN (BUILT_IN_INF
):
9871 case BUILT_IN_INFD32
:
9872 case BUILT_IN_INFD64
:
9873 case BUILT_IN_INFD128
:
9874 return fold_builtin_inf (loc
, type
, true);
9876 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9877 return fold_builtin_inf (loc
, type
, false);
9879 case BUILT_IN_CLASSIFY_TYPE
:
9880 return fold_builtin_classify_type (NULL_TREE
);
9888 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9889 This function returns NULL_TREE if no simplification was possible. */
9892 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9894 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9895 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9898 case BUILT_IN_CONSTANT_P
:
9900 tree val
= fold_builtin_constant_p (arg0
);
9902 /* Gimplification will pull the CALL_EXPR for the builtin out of
9903 an if condition. When not optimizing, we'll not CSE it back.
9904 To avoid link error types of regressions, return false now. */
9905 if (!val
&& !optimize
)
9906 val
= integer_zero_node
;
9911 case BUILT_IN_CLASSIFY_TYPE
:
9912 return fold_builtin_classify_type (arg0
);
9914 case BUILT_IN_STRLEN
:
9915 return fold_builtin_strlen (loc
, type
, arg0
);
9917 CASE_FLT_FN (BUILT_IN_FABS
):
9918 case BUILT_IN_FABSD32
:
9919 case BUILT_IN_FABSD64
:
9920 case BUILT_IN_FABSD128
:
9921 return fold_builtin_fabs (loc
, arg0
, type
);
9925 case BUILT_IN_LLABS
:
9926 case BUILT_IN_IMAXABS
:
9927 return fold_builtin_abs (loc
, arg0
, type
);
9929 CASE_FLT_FN (BUILT_IN_CONJ
):
9930 if (validate_arg (arg0
, COMPLEX_TYPE
)
9931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9932 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9935 CASE_FLT_FN (BUILT_IN_CREAL
):
9936 if (validate_arg (arg0
, COMPLEX_TYPE
)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9938 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9941 CASE_FLT_FN (BUILT_IN_CIMAG
):
9942 if (validate_arg (arg0
, COMPLEX_TYPE
)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9944 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9947 CASE_FLT_FN (BUILT_IN_CCOS
):
9948 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9950 CASE_FLT_FN (BUILT_IN_CCOSH
):
9951 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9953 CASE_FLT_FN (BUILT_IN_CPROJ
):
9954 return fold_builtin_cproj (loc
, arg0
, type
);
9956 CASE_FLT_FN (BUILT_IN_CSIN
):
9957 if (validate_arg (arg0
, COMPLEX_TYPE
)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9959 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9962 CASE_FLT_FN (BUILT_IN_CSINH
):
9963 if (validate_arg (arg0
, COMPLEX_TYPE
)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9965 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9968 CASE_FLT_FN (BUILT_IN_CTAN
):
9969 if (validate_arg (arg0
, COMPLEX_TYPE
)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9971 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9974 CASE_FLT_FN (BUILT_IN_CTANH
):
9975 if (validate_arg (arg0
, COMPLEX_TYPE
)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9977 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9980 CASE_FLT_FN (BUILT_IN_CLOG
):
9981 if (validate_arg (arg0
, COMPLEX_TYPE
)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9983 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9986 CASE_FLT_FN (BUILT_IN_CSQRT
):
9987 if (validate_arg (arg0
, COMPLEX_TYPE
)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9989 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9992 CASE_FLT_FN (BUILT_IN_CASIN
):
9993 if (validate_arg (arg0
, COMPLEX_TYPE
)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9995 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9998 CASE_FLT_FN (BUILT_IN_CACOS
):
9999 if (validate_arg (arg0
, COMPLEX_TYPE
)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10001 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10004 CASE_FLT_FN (BUILT_IN_CATAN
):
10005 if (validate_arg (arg0
, COMPLEX_TYPE
)
10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10007 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10010 CASE_FLT_FN (BUILT_IN_CASINH
):
10011 if (validate_arg (arg0
, COMPLEX_TYPE
)
10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10013 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10016 CASE_FLT_FN (BUILT_IN_CACOSH
):
10017 if (validate_arg (arg0
, COMPLEX_TYPE
)
10018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10019 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10022 CASE_FLT_FN (BUILT_IN_CATANH
):
10023 if (validate_arg (arg0
, COMPLEX_TYPE
)
10024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10025 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10028 CASE_FLT_FN (BUILT_IN_CABS
):
10029 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10031 CASE_FLT_FN (BUILT_IN_CARG
):
10032 return fold_builtin_carg (loc
, arg0
, type
);
10034 CASE_FLT_FN (BUILT_IN_SQRT
):
10035 return fold_builtin_sqrt (loc
, arg0
, type
);
10037 CASE_FLT_FN (BUILT_IN_CBRT
):
10038 return fold_builtin_cbrt (loc
, arg0
, type
);
10040 CASE_FLT_FN (BUILT_IN_ASIN
):
10041 if (validate_arg (arg0
, REAL_TYPE
))
10042 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10043 &dconstm1
, &dconst1
, true);
10046 CASE_FLT_FN (BUILT_IN_ACOS
):
10047 if (validate_arg (arg0
, REAL_TYPE
))
10048 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10049 &dconstm1
, &dconst1
, true);
10052 CASE_FLT_FN (BUILT_IN_ATAN
):
10053 if (validate_arg (arg0
, REAL_TYPE
))
10054 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10057 CASE_FLT_FN (BUILT_IN_ASINH
):
10058 if (validate_arg (arg0
, REAL_TYPE
))
10059 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10062 CASE_FLT_FN (BUILT_IN_ACOSH
):
10063 if (validate_arg (arg0
, REAL_TYPE
))
10064 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10065 &dconst1
, NULL
, true);
10068 CASE_FLT_FN (BUILT_IN_ATANH
):
10069 if (validate_arg (arg0
, REAL_TYPE
))
10070 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10071 &dconstm1
, &dconst1
, false);
10074 CASE_FLT_FN (BUILT_IN_SIN
):
10075 if (validate_arg (arg0
, REAL_TYPE
))
10076 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10079 CASE_FLT_FN (BUILT_IN_COS
):
10080 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10082 CASE_FLT_FN (BUILT_IN_TAN
):
10083 return fold_builtin_tan (arg0
, type
);
10085 CASE_FLT_FN (BUILT_IN_CEXP
):
10086 return fold_builtin_cexp (loc
, arg0
, type
);
10088 CASE_FLT_FN (BUILT_IN_CEXPI
):
10089 if (validate_arg (arg0
, REAL_TYPE
))
10090 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10093 CASE_FLT_FN (BUILT_IN_SINH
):
10094 if (validate_arg (arg0
, REAL_TYPE
))
10095 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10098 CASE_FLT_FN (BUILT_IN_COSH
):
10099 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10101 CASE_FLT_FN (BUILT_IN_TANH
):
10102 if (validate_arg (arg0
, REAL_TYPE
))
10103 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10106 CASE_FLT_FN (BUILT_IN_ERF
):
10107 if (validate_arg (arg0
, REAL_TYPE
))
10108 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10111 CASE_FLT_FN (BUILT_IN_ERFC
):
10112 if (validate_arg (arg0
, REAL_TYPE
))
10113 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10116 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10117 if (validate_arg (arg0
, REAL_TYPE
))
10118 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10121 CASE_FLT_FN (BUILT_IN_EXP
):
10122 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10124 CASE_FLT_FN (BUILT_IN_EXP2
):
10125 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10127 CASE_FLT_FN (BUILT_IN_EXP10
):
10128 CASE_FLT_FN (BUILT_IN_POW10
):
10129 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10131 CASE_FLT_FN (BUILT_IN_EXPM1
):
10132 if (validate_arg (arg0
, REAL_TYPE
))
10133 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10136 CASE_FLT_FN (BUILT_IN_LOG
):
10137 if (validate_arg (arg0
, REAL_TYPE
))
10138 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10141 CASE_FLT_FN (BUILT_IN_LOG2
):
10142 if (validate_arg (arg0
, REAL_TYPE
))
10143 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10146 CASE_FLT_FN (BUILT_IN_LOG10
):
10147 if (validate_arg (arg0
, REAL_TYPE
))
10148 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10151 CASE_FLT_FN (BUILT_IN_LOG1P
):
10152 if (validate_arg (arg0
, REAL_TYPE
))
10153 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10154 &dconstm1
, NULL
, false);
10157 CASE_FLT_FN (BUILT_IN_J0
):
10158 if (validate_arg (arg0
, REAL_TYPE
))
10159 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10163 CASE_FLT_FN (BUILT_IN_J1
):
10164 if (validate_arg (arg0
, REAL_TYPE
))
10165 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10169 CASE_FLT_FN (BUILT_IN_Y0
):
10170 if (validate_arg (arg0
, REAL_TYPE
))
10171 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10172 &dconst0
, NULL
, false);
10175 CASE_FLT_FN (BUILT_IN_Y1
):
10176 if (validate_arg (arg0
, REAL_TYPE
))
10177 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10178 &dconst0
, NULL
, false);
10181 CASE_FLT_FN (BUILT_IN_NAN
):
10182 case BUILT_IN_NAND32
:
10183 case BUILT_IN_NAND64
:
10184 case BUILT_IN_NAND128
:
10185 return fold_builtin_nan (arg0
, type
, true);
10187 CASE_FLT_FN (BUILT_IN_NANS
):
10188 return fold_builtin_nan (arg0
, type
, false);
10190 CASE_FLT_FN (BUILT_IN_FLOOR
):
10191 return fold_builtin_floor (loc
, fndecl
, arg0
);
10193 CASE_FLT_FN (BUILT_IN_CEIL
):
10194 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10196 CASE_FLT_FN (BUILT_IN_TRUNC
):
10197 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10199 CASE_FLT_FN (BUILT_IN_ROUND
):
10200 return fold_builtin_round (loc
, fndecl
, arg0
);
10202 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10203 CASE_FLT_FN (BUILT_IN_RINT
):
10204 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10206 CASE_FLT_FN (BUILT_IN_ICEIL
):
10207 CASE_FLT_FN (BUILT_IN_LCEIL
):
10208 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10209 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10210 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10211 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10212 CASE_FLT_FN (BUILT_IN_IROUND
):
10213 CASE_FLT_FN (BUILT_IN_LROUND
):
10214 CASE_FLT_FN (BUILT_IN_LLROUND
):
10215 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10217 CASE_FLT_FN (BUILT_IN_IRINT
):
10218 CASE_FLT_FN (BUILT_IN_LRINT
):
10219 CASE_FLT_FN (BUILT_IN_LLRINT
):
10220 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10222 case BUILT_IN_BSWAP16
:
10223 case BUILT_IN_BSWAP32
:
10224 case BUILT_IN_BSWAP64
:
10225 return fold_builtin_bswap (fndecl
, arg0
);
10227 CASE_INT_FN (BUILT_IN_FFS
):
10228 CASE_INT_FN (BUILT_IN_CLZ
):
10229 CASE_INT_FN (BUILT_IN_CTZ
):
10230 CASE_INT_FN (BUILT_IN_CLRSB
):
10231 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10232 CASE_INT_FN (BUILT_IN_PARITY
):
10233 return fold_builtin_bitop (fndecl
, arg0
);
10235 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10236 return fold_builtin_signbit (loc
, arg0
, type
);
10238 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10239 return fold_builtin_significand (loc
, arg0
, type
);
10241 CASE_FLT_FN (BUILT_IN_ILOGB
):
10242 CASE_FLT_FN (BUILT_IN_LOGB
):
10243 return fold_builtin_logb (loc
, arg0
, type
);
10245 case BUILT_IN_ISASCII
:
10246 return fold_builtin_isascii (loc
, arg0
);
10248 case BUILT_IN_TOASCII
:
10249 return fold_builtin_toascii (loc
, arg0
);
10251 case BUILT_IN_ISDIGIT
:
10252 return fold_builtin_isdigit (loc
, arg0
);
10254 CASE_FLT_FN (BUILT_IN_FINITE
):
10255 case BUILT_IN_FINITED32
:
10256 case BUILT_IN_FINITED64
:
10257 case BUILT_IN_FINITED128
:
10258 case BUILT_IN_ISFINITE
:
10260 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10263 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10266 CASE_FLT_FN (BUILT_IN_ISINF
):
10267 case BUILT_IN_ISINFD32
:
10268 case BUILT_IN_ISINFD64
:
10269 case BUILT_IN_ISINFD128
:
10271 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10274 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10277 case BUILT_IN_ISNORMAL
:
10278 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10280 case BUILT_IN_ISINF_SIGN
:
10281 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10283 CASE_FLT_FN (BUILT_IN_ISNAN
):
10284 case BUILT_IN_ISNAND32
:
10285 case BUILT_IN_ISNAND64
:
10286 case BUILT_IN_ISNAND128
:
10287 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10289 case BUILT_IN_FREE
:
10290 if (integer_zerop (arg0
))
10291 return build_empty_stmt (loc
);
10302 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10303 This function returns NULL_TREE if no simplification was possible. */
10306 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10308 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10309 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10313 CASE_FLT_FN (BUILT_IN_JN
):
10314 if (validate_arg (arg0
, INTEGER_TYPE
)
10315 && validate_arg (arg1
, REAL_TYPE
))
10316 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10319 CASE_FLT_FN (BUILT_IN_YN
):
10320 if (validate_arg (arg0
, INTEGER_TYPE
)
10321 && validate_arg (arg1
, REAL_TYPE
))
10322 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10326 CASE_FLT_FN (BUILT_IN_DREM
):
10327 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10328 if (validate_arg (arg0
, REAL_TYPE
)
10329 && validate_arg (arg1
, REAL_TYPE
))
10330 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10333 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10334 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10335 if (validate_arg (arg0
, REAL_TYPE
)
10336 && validate_arg (arg1
, POINTER_TYPE
))
10337 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10340 CASE_FLT_FN (BUILT_IN_ATAN2
):
10341 if (validate_arg (arg0
, REAL_TYPE
)
10342 && validate_arg (arg1
, REAL_TYPE
))
10343 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10346 CASE_FLT_FN (BUILT_IN_FDIM
):
10347 if (validate_arg (arg0
, REAL_TYPE
)
10348 && validate_arg (arg1
, REAL_TYPE
))
10349 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10352 CASE_FLT_FN (BUILT_IN_HYPOT
):
10353 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10355 CASE_FLT_FN (BUILT_IN_CPOW
):
10356 if (validate_arg (arg0
, COMPLEX_TYPE
)
10357 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10358 && validate_arg (arg1
, COMPLEX_TYPE
)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10360 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10363 CASE_FLT_FN (BUILT_IN_LDEXP
):
10364 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10365 CASE_FLT_FN (BUILT_IN_SCALBN
):
10366 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10367 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10368 type
, /*ldexp=*/false);
10370 CASE_FLT_FN (BUILT_IN_FREXP
):
10371 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10373 CASE_FLT_FN (BUILT_IN_MODF
):
10374 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10376 case BUILT_IN_STRSTR
:
10377 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10379 case BUILT_IN_STRSPN
:
10380 return fold_builtin_strspn (loc
, arg0
, arg1
);
10382 case BUILT_IN_STRCSPN
:
10383 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10385 case BUILT_IN_STRCHR
:
10386 case BUILT_IN_INDEX
:
10387 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10389 case BUILT_IN_STRRCHR
:
10390 case BUILT_IN_RINDEX
:
10391 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10393 case BUILT_IN_STRCMP
:
10394 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10396 case BUILT_IN_STRPBRK
:
10397 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10399 case BUILT_IN_EXPECT
:
10400 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10402 CASE_FLT_FN (BUILT_IN_POW
):
10403 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10405 CASE_FLT_FN (BUILT_IN_POWI
):
10406 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10408 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10409 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10411 CASE_FLT_FN (BUILT_IN_FMIN
):
10412 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10414 CASE_FLT_FN (BUILT_IN_FMAX
):
10415 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10417 case BUILT_IN_ISGREATER
:
10418 return fold_builtin_unordered_cmp (loc
, fndecl
,
10419 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10420 case BUILT_IN_ISGREATEREQUAL
:
10421 return fold_builtin_unordered_cmp (loc
, fndecl
,
10422 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10423 case BUILT_IN_ISLESS
:
10424 return fold_builtin_unordered_cmp (loc
, fndecl
,
10425 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10426 case BUILT_IN_ISLESSEQUAL
:
10427 return fold_builtin_unordered_cmp (loc
, fndecl
,
10428 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10429 case BUILT_IN_ISLESSGREATER
:
10430 return fold_builtin_unordered_cmp (loc
, fndecl
,
10431 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10432 case BUILT_IN_ISUNORDERED
:
10433 return fold_builtin_unordered_cmp (loc
, fndecl
,
10434 arg0
, arg1
, UNORDERED_EXPR
,
10437 /* We do the folding for va_start in the expander. */
10438 case BUILT_IN_VA_START
:
10441 case BUILT_IN_OBJECT_SIZE
:
10442 return fold_builtin_object_size (arg0
, arg1
);
10444 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10445 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10447 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10448 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10456 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10458 This function returns NULL_TREE if no simplification was possible. */
10461 fold_builtin_3 (location_t loc
, tree fndecl
,
10462 tree arg0
, tree arg1
, tree arg2
)
10464 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10465 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10469 CASE_FLT_FN (BUILT_IN_SINCOS
):
10470 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10472 CASE_FLT_FN (BUILT_IN_FMA
):
10473 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10476 CASE_FLT_FN (BUILT_IN_REMQUO
):
10477 if (validate_arg (arg0
, REAL_TYPE
)
10478 && validate_arg (arg1
, REAL_TYPE
)
10479 && validate_arg (arg2
, POINTER_TYPE
))
10480 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10483 case BUILT_IN_STRNCMP
:
10484 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10486 case BUILT_IN_MEMCHR
:
10487 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10489 case BUILT_IN_BCMP
:
10490 case BUILT_IN_MEMCMP
:
10491 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10493 case BUILT_IN_EXPECT
:
10494 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10496 case BUILT_IN_ADD_OVERFLOW
:
10497 case BUILT_IN_SUB_OVERFLOW
:
10498 case BUILT_IN_MUL_OVERFLOW
:
10499 case BUILT_IN_SADD_OVERFLOW
:
10500 case BUILT_IN_SADDL_OVERFLOW
:
10501 case BUILT_IN_SADDLL_OVERFLOW
:
10502 case BUILT_IN_SSUB_OVERFLOW
:
10503 case BUILT_IN_SSUBL_OVERFLOW
:
10504 case BUILT_IN_SSUBLL_OVERFLOW
:
10505 case BUILT_IN_SMUL_OVERFLOW
:
10506 case BUILT_IN_SMULL_OVERFLOW
:
10507 case BUILT_IN_SMULLL_OVERFLOW
:
10508 case BUILT_IN_UADD_OVERFLOW
:
10509 case BUILT_IN_UADDL_OVERFLOW
:
10510 case BUILT_IN_UADDLL_OVERFLOW
:
10511 case BUILT_IN_USUB_OVERFLOW
:
10512 case BUILT_IN_USUBL_OVERFLOW
:
10513 case BUILT_IN_USUBLL_OVERFLOW
:
10514 case BUILT_IN_UMUL_OVERFLOW
:
10515 case BUILT_IN_UMULL_OVERFLOW
:
10516 case BUILT_IN_UMULLL_OVERFLOW
:
10517 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10525 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10526 arguments. IGNORE is true if the result of the
10527 function call is ignored. This function returns NULL_TREE if no
10528 simplification was possible. */
10531 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10533 tree ret
= NULL_TREE
;
10538 ret
= fold_builtin_0 (loc
, fndecl
);
10541 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10544 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10547 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10550 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10555 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10556 SET_EXPR_LOCATION (ret
, loc
);
10557 TREE_NO_WARNING (ret
) = 1;
10563 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10564 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10565 of arguments in ARGS to be omitted. OLDNARGS is the number of
10566 elements in ARGS. */
10569 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10570 int skip
, tree fndecl
, int n
, va_list newargs
)
10572 int nargs
= oldnargs
- skip
+ n
;
10579 buffer
= XALLOCAVEC (tree
, nargs
);
10580 for (i
= 0; i
< n
; i
++)
10581 buffer
[i
] = va_arg (newargs
, tree
);
10582 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10583 buffer
[i
] = args
[j
];
10586 buffer
= args
+ skip
;
10588 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10591 /* Return true if FNDECL shouldn't be folded right now.
10592 If a built-in function has an inline attribute always_inline
10593 wrapper, defer folding it after always_inline functions have
10594 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10595 might not be performed. */
10598 avoid_folding_inline_builtin (tree fndecl
)
10600 return (DECL_DECLARED_INLINE_P (fndecl
)
10601 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10603 && !cfun
->always_inline_functions_inlined
10604 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10607 /* A wrapper function for builtin folding that prevents warnings for
10608 "statement without effect" and the like, caused by removing the
10609 call node earlier than the warning is generated. */
10612 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10614 tree ret
= NULL_TREE
;
10615 tree fndecl
= get_callee_fndecl (exp
);
10617 && TREE_CODE (fndecl
) == FUNCTION_DECL
10618 && DECL_BUILT_IN (fndecl
)
10619 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10620 yet. Defer folding until we see all the arguments
10621 (after inlining). */
10622 && !CALL_EXPR_VA_ARG_PACK (exp
))
10624 int nargs
= call_expr_nargs (exp
);
10626 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10627 instead last argument is __builtin_va_arg_pack (). Defer folding
10628 even in that case, until arguments are finalized. */
10629 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10631 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10633 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10634 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10635 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10639 if (avoid_folding_inline_builtin (fndecl
))
10642 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10643 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10644 CALL_EXPR_ARGP (exp
), ignore
);
10647 tree
*args
= CALL_EXPR_ARGP (exp
);
10648 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10656 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10657 N arguments are passed in the array ARGARRAY. Return a folded
10658 expression or NULL_TREE if no simplification was possible. */
10661 fold_builtin_call_array (location_t loc
, tree
,
10666 if (TREE_CODE (fn
) != ADDR_EXPR
)
10669 tree fndecl
= TREE_OPERAND (fn
, 0);
10670 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10671 && DECL_BUILT_IN (fndecl
))
10673 /* If last argument is __builtin_va_arg_pack (), arguments to this
10674 function are not finalized yet. Defer folding until they are. */
10675 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10677 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10679 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10680 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10681 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10684 if (avoid_folding_inline_builtin (fndecl
))
10686 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10687 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10689 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10695 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10696 along with N new arguments specified as the "..." parameters. SKIP
10697 is the number of arguments in EXP to be omitted. This function is used
10698 to do varargs-to-varargs transformations. */
10701 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10707 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10708 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10714 /* Validate a single argument ARG against a tree code CODE representing
10718 validate_arg (const_tree arg
, enum tree_code code
)
10722 else if (code
== POINTER_TYPE
)
10723 return POINTER_TYPE_P (TREE_TYPE (arg
));
10724 else if (code
== INTEGER_TYPE
)
10725 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10726 return code
== TREE_CODE (TREE_TYPE (arg
));
10729 /* This function validates the types of a function call argument list
10730 against a specified list of tree_codes. If the last specifier is a 0,
10731 that represents an ellipses, otherwise the last specifier must be a
10734 This is the GIMPLE version of validate_arglist. Eventually we want to
10735 completely convert builtins.c to work from GIMPLEs and the tree based
10736 validate_arglist will then be removed. */
10739 validate_gimple_arglist (const gcall
*call
, ...)
10741 enum tree_code code
;
10747 va_start (ap
, call
);
10752 code
= (enum tree_code
) va_arg (ap
, int);
10756 /* This signifies an ellipses, any further arguments are all ok. */
10760 /* This signifies an endlink, if no arguments remain, return
10761 true, otherwise return false. */
10762 res
= (i
== gimple_call_num_args (call
));
10765 /* If no parameters remain or the parameter's code does not
10766 match the specified code, return false. Otherwise continue
10767 checking any remaining arguments. */
10768 arg
= gimple_call_arg (call
, i
++);
10769 if (!validate_arg (arg
, code
))
10776 /* We need gotos here since we can only have one VA_CLOSE in a
10784 /* Default target-specific builtin expander that does nothing. */
10787 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10788 rtx target ATTRIBUTE_UNUSED
,
10789 rtx subtarget ATTRIBUTE_UNUSED
,
10790 machine_mode mode ATTRIBUTE_UNUSED
,
10791 int ignore ATTRIBUTE_UNUSED
)
10796 /* Returns true is EXP represents data that would potentially reside
10797 in a readonly section. */
10800 readonly_data_expr (tree exp
)
10804 if (TREE_CODE (exp
) != ADDR_EXPR
)
10807 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10811 /* Make sure we call decl_readonly_section only for trees it
10812 can handle (since it returns true for everything it doesn't
10814 if (TREE_CODE (exp
) == STRING_CST
10815 || TREE_CODE (exp
) == CONSTRUCTOR
10816 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10817 return decl_readonly_section (exp
, 0);
10822 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10823 to the call, and TYPE is its return type.
10825 Return NULL_TREE if no simplification was possible, otherwise return the
10826 simplified form of the call as a tree.
10828 The simplified form may be a constant or other expression which
10829 computes the same value, but in a more efficient manner (including
10830 calls to other builtin functions).
10832 The call may contain arguments which need to be evaluated, but
10833 which are not useful to determine the result of the call. In
10834 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10835 COMPOUND_EXPR will be an argument which must be evaluated.
10836 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10837 COMPOUND_EXPR in the chain will contain the tree for the simplified
10838 form of the builtin function call. */
10841 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10843 if (!validate_arg (s1
, POINTER_TYPE
)
10844 || !validate_arg (s2
, POINTER_TYPE
))
10849 const char *p1
, *p2
;
10851 p2
= c_getstr (s2
);
10855 p1
= c_getstr (s1
);
10858 const char *r
= strstr (p1
, p2
);
10862 return build_int_cst (TREE_TYPE (s1
), 0);
10864 /* Return an offset into the constant string argument. */
10865 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10866 return fold_convert_loc (loc
, type
, tem
);
10869 /* The argument is const char *, and the result is char *, so we need
10870 a type conversion here to avoid a warning. */
10872 return fold_convert_loc (loc
, type
, s1
);
10877 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10881 /* New argument list transforming strstr(s1, s2) to
10882 strchr(s1, s2[0]). */
10883 return build_call_expr_loc (loc
, fn
, 2, s1
,
10884 build_int_cst (integer_type_node
, p2
[0]));
10888 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10889 the call, and TYPE is its return type.
10891 Return NULL_TREE if no simplification was possible, otherwise return the
10892 simplified form of the call as a tree.
10894 The simplified form may be a constant or other expression which
10895 computes the same value, but in a more efficient manner (including
10896 calls to other builtin functions).
10898 The call may contain arguments which need to be evaluated, but
10899 which are not useful to determine the result of the call. In
10900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10901 COMPOUND_EXPR will be an argument which must be evaluated.
10902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10903 COMPOUND_EXPR in the chain will contain the tree for the simplified
10904 form of the builtin function call. */
10907 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10909 if (!validate_arg (s1
, POINTER_TYPE
)
10910 || !validate_arg (s2
, INTEGER_TYPE
))
10916 if (TREE_CODE (s2
) != INTEGER_CST
)
10919 p1
= c_getstr (s1
);
10926 if (target_char_cast (s2
, &c
))
10929 r
= strchr (p1
, c
);
10932 return build_int_cst (TREE_TYPE (s1
), 0);
10934 /* Return an offset into the constant string argument. */
10935 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10936 return fold_convert_loc (loc
, type
, tem
);
10942 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10943 the call, and TYPE is its return type.
10945 Return NULL_TREE if no simplification was possible, otherwise return the
10946 simplified form of the call as a tree.
10948 The simplified form may be a constant or other expression which
10949 computes the same value, but in a more efficient manner (including
10950 calls to other builtin functions).
10952 The call may contain arguments which need to be evaluated, but
10953 which are not useful to determine the result of the call. In
10954 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10955 COMPOUND_EXPR will be an argument which must be evaluated.
10956 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10957 COMPOUND_EXPR in the chain will contain the tree for the simplified
10958 form of the builtin function call. */
10961 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10963 if (!validate_arg (s1
, POINTER_TYPE
)
10964 || !validate_arg (s2
, INTEGER_TYPE
))
10971 if (TREE_CODE (s2
) != INTEGER_CST
)
10974 p1
= c_getstr (s1
);
10981 if (target_char_cast (s2
, &c
))
10984 r
= strrchr (p1
, c
);
10987 return build_int_cst (TREE_TYPE (s1
), 0);
10989 /* Return an offset into the constant string argument. */
10990 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10991 return fold_convert_loc (loc
, type
, tem
);
10994 if (! integer_zerop (s2
))
10997 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11001 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11002 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11006 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11007 to the call, and TYPE is its return type.
11009 Return NULL_TREE if no simplification was possible, otherwise return the
11010 simplified form of the call as a tree.
11012 The simplified form may be a constant or other expression which
11013 computes the same value, but in a more efficient manner (including
11014 calls to other builtin functions).
11016 The call may contain arguments which need to be evaluated, but
11017 which are not useful to determine the result of the call. In
11018 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11019 COMPOUND_EXPR will be an argument which must be evaluated.
11020 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11021 COMPOUND_EXPR in the chain will contain the tree for the simplified
11022 form of the builtin function call. */
11025 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11027 if (!validate_arg (s1
, POINTER_TYPE
)
11028 || !validate_arg (s2
, POINTER_TYPE
))
11033 const char *p1
, *p2
;
11035 p2
= c_getstr (s2
);
11039 p1
= c_getstr (s1
);
11042 const char *r
= strpbrk (p1
, p2
);
11046 return build_int_cst (TREE_TYPE (s1
), 0);
11048 /* Return an offset into the constant string argument. */
11049 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11050 return fold_convert_loc (loc
, type
, tem
);
11054 /* strpbrk(x, "") == NULL.
11055 Evaluate and ignore s1 in case it had side-effects. */
11056 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11059 return NULL_TREE
; /* Really call strpbrk. */
11061 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11065 /* New argument list transforming strpbrk(s1, s2) to
11066 strchr(s1, s2[0]). */
11067 return build_call_expr_loc (loc
, fn
, 2, s1
,
11068 build_int_cst (integer_type_node
, p2
[0]));
11072 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11075 Return NULL_TREE if no simplification was possible, otherwise return the
11076 simplified form of the call as a tree.
11078 The simplified form may be a constant or other expression which
11079 computes the same value, but in a more efficient manner (including
11080 calls to other builtin functions).
11082 The call may contain arguments which need to be evaluated, but
11083 which are not useful to determine the result of the call. In
11084 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11085 COMPOUND_EXPR will be an argument which must be evaluated.
11086 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11087 COMPOUND_EXPR in the chain will contain the tree for the simplified
11088 form of the builtin function call. */
11091 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11093 if (!validate_arg (s1
, POINTER_TYPE
)
11094 || !validate_arg (s2
, POINTER_TYPE
))
11098 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11100 /* If both arguments are constants, evaluate at compile-time. */
11103 const size_t r
= strspn (p1
, p2
);
11104 return build_int_cst (size_type_node
, r
);
11107 /* If either argument is "", return NULL_TREE. */
11108 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11109 /* Evaluate and ignore both arguments in case either one has
11111 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11117 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11120 Return NULL_TREE if no simplification was possible, otherwise return the
11121 simplified form of the call as a tree.
11123 The simplified form may be a constant or other expression which
11124 computes the same value, but in a more efficient manner (including
11125 calls to other builtin functions).
11127 The call may contain arguments which need to be evaluated, but
11128 which are not useful to determine the result of the call. In
11129 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11130 COMPOUND_EXPR will be an argument which must be evaluated.
11131 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11132 COMPOUND_EXPR in the chain will contain the tree for the simplified
11133 form of the builtin function call. */
11136 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11138 if (!validate_arg (s1
, POINTER_TYPE
)
11139 || !validate_arg (s2
, POINTER_TYPE
))
11143 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11145 /* If both arguments are constants, evaluate at compile-time. */
11148 const size_t r
= strcspn (p1
, p2
);
11149 return build_int_cst (size_type_node
, r
);
11152 /* If the first argument is "", return NULL_TREE. */
11153 if (p1
&& *p1
== '\0')
11155 /* Evaluate and ignore argument s2 in case it has
11157 return omit_one_operand_loc (loc
, size_type_node
,
11158 size_zero_node
, s2
);
11161 /* If the second argument is "", return __builtin_strlen(s1). */
11162 if (p2
&& *p2
== '\0')
11164 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11166 /* If the replacement _DECL isn't initialized, don't do the
11171 return build_call_expr_loc (loc
, fn
, 1, s1
);
11177 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11178 produced. False otherwise. This is done so that we don't output the error
11179 or warning twice or three times. */
11182 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11184 tree fntype
= TREE_TYPE (current_function_decl
);
11185 int nargs
= call_expr_nargs (exp
);
11187 /* There is good chance the current input_location points inside the
11188 definition of the va_start macro (perhaps on the token for
11189 builtin) in a system header, so warnings will not be emitted.
11190 Use the location in real source code. */
11191 source_location current_location
=
11192 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11195 if (!stdarg_p (fntype
))
11197 error ("%<va_start%> used in function with fixed args");
11203 if (va_start_p
&& (nargs
!= 2))
11205 error ("wrong number of arguments to function %<va_start%>");
11208 arg
= CALL_EXPR_ARG (exp
, 1);
11210 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11211 when we checked the arguments and if needed issued a warning. */
11216 /* Evidently an out of date version of <stdarg.h>; can't validate
11217 va_start's second argument, but can still work as intended. */
11218 warning_at (current_location
,
11220 "%<__builtin_next_arg%> called without an argument");
11223 else if (nargs
> 1)
11225 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11228 arg
= CALL_EXPR_ARG (exp
, 0);
11231 if (TREE_CODE (arg
) == SSA_NAME
)
11232 arg
= SSA_NAME_VAR (arg
);
11234 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11235 or __builtin_next_arg (0) the first time we see it, after checking
11236 the arguments and if needed issuing a warning. */
11237 if (!integer_zerop (arg
))
11239 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11241 /* Strip off all nops for the sake of the comparison. This
11242 is not quite the same as STRIP_NOPS. It does more.
11243 We must also strip off INDIRECT_EXPR for C++ reference
11245 while (CONVERT_EXPR_P (arg
)
11246 || TREE_CODE (arg
) == INDIRECT_REF
)
11247 arg
= TREE_OPERAND (arg
, 0);
11248 if (arg
!= last_parm
)
11250 /* FIXME: Sometimes with the tree optimizers we can get the
11251 not the last argument even though the user used the last
11252 argument. We just warn and set the arg to be the last
11253 argument so that we will get wrong-code because of
11255 warning_at (current_location
,
11257 "second parameter of %<va_start%> not last named argument");
11260 /* Undefined by C99 7.15.1.4p4 (va_start):
11261 "If the parameter parmN is declared with the register storage
11262 class, with a function or array type, or with a type that is
11263 not compatible with the type that results after application of
11264 the default argument promotions, the behavior is undefined."
11266 else if (DECL_REGISTER (arg
))
11268 warning_at (current_location
,
11270 "undefined behaviour when second parameter of "
11271 "%<va_start%> is declared with %<register%> storage");
11274 /* We want to verify the second parameter just once before the tree
11275 optimizers are run and then avoid keeping it in the tree,
11276 as otherwise we could warn even for correct code like:
11277 void foo (int i, ...)
11278 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11280 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11282 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11288 /* Expand a call EXP to __builtin_object_size. */
11291 expand_builtin_object_size (tree exp
)
11294 int object_size_type
;
11295 tree fndecl
= get_callee_fndecl (exp
);
11297 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11299 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11301 expand_builtin_trap ();
11305 ost
= CALL_EXPR_ARG (exp
, 1);
11308 if (TREE_CODE (ost
) != INTEGER_CST
11309 || tree_int_cst_sgn (ost
) < 0
11310 || compare_tree_int (ost
, 3) > 0)
11312 error ("%Klast argument of %D is not integer constant between 0 and 3",
11314 expand_builtin_trap ();
11318 object_size_type
= tree_to_shwi (ost
);
11320 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11323 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11324 FCODE is the BUILT_IN_* to use.
11325 Return NULL_RTX if we failed; the caller should emit a normal call,
11326 otherwise try to get the result in TARGET, if convenient (and in
11327 mode MODE if that's convenient). */
11330 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11331 enum built_in_function fcode
)
11333 tree dest
, src
, len
, size
;
11335 if (!validate_arglist (exp
,
11337 fcode
== BUILT_IN_MEMSET_CHK
11338 ? INTEGER_TYPE
: POINTER_TYPE
,
11339 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11342 dest
= CALL_EXPR_ARG (exp
, 0);
11343 src
= CALL_EXPR_ARG (exp
, 1);
11344 len
= CALL_EXPR_ARG (exp
, 2);
11345 size
= CALL_EXPR_ARG (exp
, 3);
11347 if (! tree_fits_uhwi_p (size
))
11350 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11354 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11356 warning_at (tree_nonartificial_location (exp
),
11357 0, "%Kcall to %D will always overflow destination buffer",
11358 exp
, get_callee_fndecl (exp
));
11363 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11364 mem{cpy,pcpy,move,set} is available. */
11367 case BUILT_IN_MEMCPY_CHK
:
11368 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11370 case BUILT_IN_MEMPCPY_CHK
:
11371 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11373 case BUILT_IN_MEMMOVE_CHK
:
11374 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11376 case BUILT_IN_MEMSET_CHK
:
11377 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11386 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11387 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11388 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11389 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11391 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11395 unsigned int dest_align
= get_pointer_alignment (dest
);
11397 /* If DEST is not a pointer type, call the normal function. */
11398 if (dest_align
== 0)
11401 /* If SRC and DEST are the same (and not volatile), do nothing. */
11402 if (operand_equal_p (src
, dest
, 0))
11406 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11408 /* Evaluate and ignore LEN in case it has side-effects. */
11409 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11410 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11413 expr
= fold_build_pointer_plus (dest
, len
);
11414 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11417 /* __memmove_chk special case. */
11418 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11420 unsigned int src_align
= get_pointer_alignment (src
);
11422 if (src_align
== 0)
11425 /* If src is categorized for a readonly section we can use
11426 normal __memcpy_chk. */
11427 if (readonly_data_expr (src
))
11429 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11432 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11433 dest
, src
, len
, size
);
11434 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11435 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11436 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11443 /* Emit warning if a buffer overflow is detected at compile time. */
11446 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11450 location_t loc
= tree_nonartificial_location (exp
);
11454 case BUILT_IN_STRCPY_CHK
:
11455 case BUILT_IN_STPCPY_CHK
:
11456 /* For __strcat_chk the warning will be emitted only if overflowing
11457 by at least strlen (dest) + 1 bytes. */
11458 case BUILT_IN_STRCAT_CHK
:
11459 len
= CALL_EXPR_ARG (exp
, 1);
11460 size
= CALL_EXPR_ARG (exp
, 2);
11463 case BUILT_IN_STRNCAT_CHK
:
11464 case BUILT_IN_STRNCPY_CHK
:
11465 case BUILT_IN_STPNCPY_CHK
:
11466 len
= CALL_EXPR_ARG (exp
, 2);
11467 size
= CALL_EXPR_ARG (exp
, 3);
11469 case BUILT_IN_SNPRINTF_CHK
:
11470 case BUILT_IN_VSNPRINTF_CHK
:
11471 len
= CALL_EXPR_ARG (exp
, 1);
11472 size
= CALL_EXPR_ARG (exp
, 3);
11475 gcc_unreachable ();
11481 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11486 len
= c_strlen (len
, 1);
11487 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11490 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11492 tree src
= CALL_EXPR_ARG (exp
, 1);
11493 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11495 src
= c_strlen (src
, 1);
11496 if (! src
|| ! tree_fits_uhwi_p (src
))
11498 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11499 exp
, get_callee_fndecl (exp
));
11502 else if (tree_int_cst_lt (src
, size
))
11505 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11508 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11509 exp
, get_callee_fndecl (exp
));
11512 /* Emit warning if a buffer overflow is detected at compile time
11513 in __sprintf_chk/__vsprintf_chk calls. */
11516 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11518 tree size
, len
, fmt
;
11519 const char *fmt_str
;
11520 int nargs
= call_expr_nargs (exp
);
11522 /* Verify the required arguments in the original call. */
11526 size
= CALL_EXPR_ARG (exp
, 2);
11527 fmt
= CALL_EXPR_ARG (exp
, 3);
11529 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11532 /* Check whether the format is a literal string constant. */
11533 fmt_str
= c_getstr (fmt
);
11534 if (fmt_str
== NULL
)
11537 if (!init_target_chars ())
11540 /* If the format doesn't contain % args or %%, we know its size. */
11541 if (strchr (fmt_str
, target_percent
) == 0)
11542 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11543 /* If the format is "%s" and first ... argument is a string literal,
11545 else if (fcode
== BUILT_IN_SPRINTF_CHK
11546 && strcmp (fmt_str
, target_percent_s
) == 0)
11552 arg
= CALL_EXPR_ARG (exp
, 4);
11553 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11556 len
= c_strlen (arg
, 1);
11557 if (!len
|| ! tree_fits_uhwi_p (len
))
11563 if (! tree_int_cst_lt (len
, size
))
11564 warning_at (tree_nonartificial_location (exp
),
11565 0, "%Kcall to %D will always overflow destination buffer",
11566 exp
, get_callee_fndecl (exp
));
11569 /* Emit warning if a free is called with address of a variable. */
11572 maybe_emit_free_warning (tree exp
)
11574 tree arg
= CALL_EXPR_ARG (exp
, 0);
11577 if (TREE_CODE (arg
) != ADDR_EXPR
)
11580 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11581 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11584 if (SSA_VAR_P (arg
))
11585 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11586 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11588 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11589 "%Kattempt to free a non-heap object", exp
);
11592 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11596 fold_builtin_object_size (tree ptr
, tree ost
)
11598 unsigned HOST_WIDE_INT bytes
;
11599 int object_size_type
;
11601 if (!validate_arg (ptr
, POINTER_TYPE
)
11602 || !validate_arg (ost
, INTEGER_TYPE
))
11607 if (TREE_CODE (ost
) != INTEGER_CST
11608 || tree_int_cst_sgn (ost
) < 0
11609 || compare_tree_int (ost
, 3) > 0)
11612 object_size_type
= tree_to_shwi (ost
);
11614 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11615 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11616 and (size_t) 0 for types 2 and 3. */
11617 if (TREE_SIDE_EFFECTS (ptr
))
11618 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11620 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11622 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11623 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11624 return build_int_cstu (size_type_node
, bytes
);
11626 else if (TREE_CODE (ptr
) == SSA_NAME
)
11628 /* If object size is not known yet, delay folding until
11629 later. Maybe subsequent passes will help determining
11631 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11632 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11633 && wi::fits_to_tree_p (bytes
, size_type_node
))
11634 return build_int_cstu (size_type_node
, bytes
);
11640 /* Builtins with folding operations that operate on "..." arguments
11641 need special handling; we need to store the arguments in a convenient
11642 data structure before attempting any folding. Fortunately there are
11643 only a few builtins that fall into this category. FNDECL is the
11644 function, EXP is the CALL_EXPR for the call. */
11647 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11649 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11650 tree ret
= NULL_TREE
;
11654 case BUILT_IN_FPCLASSIFY
:
11655 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11663 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11664 SET_EXPR_LOCATION (ret
, loc
);
11665 TREE_NO_WARNING (ret
) = 1;
11671 /* Initialize format string characters in the target charset. */
11674 init_target_chars (void)
11679 target_newline
= lang_hooks
.to_target_charset ('\n');
11680 target_percent
= lang_hooks
.to_target_charset ('%');
11681 target_c
= lang_hooks
.to_target_charset ('c');
11682 target_s
= lang_hooks
.to_target_charset ('s');
11683 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11687 target_percent_c
[0] = target_percent
;
11688 target_percent_c
[1] = target_c
;
11689 target_percent_c
[2] = '\0';
11691 target_percent_s
[0] = target_percent
;
11692 target_percent_s
[1] = target_s
;
11693 target_percent_s
[2] = '\0';
11695 target_percent_s_newline
[0] = target_percent
;
11696 target_percent_s_newline
[1] = target_s
;
11697 target_percent_s_newline
[2] = target_newline
;
11698 target_percent_s_newline
[3] = '\0';
11705 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11706 and no overflow/underflow occurred. INEXACT is true if M was not
11707 exactly calculated. TYPE is the tree type for the result. This
11708 function assumes that you cleared the MPFR flags and then
11709 calculated M to see if anything subsequently set a flag prior to
11710 entering this function. Return NULL_TREE if any checks fail. */
11713 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11715 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11716 overflow/underflow occurred. If -frounding-math, proceed iff the
11717 result of calling FUNC was exact. */
11718 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11719 && (!flag_rounding_math
|| !inexact
))
11721 REAL_VALUE_TYPE rr
;
11723 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11724 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11725 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11726 but the mpft_t is not, then we underflowed in the
11728 if (real_isfinite (&rr
)
11729 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11731 REAL_VALUE_TYPE rmode
;
11733 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11734 /* Proceed iff the specified mode can hold the value. */
11735 if (real_identical (&rmode
, &rr
))
11736 return build_real (type
, rmode
);
11742 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11743 number and no overflow/underflow occurred. INEXACT is true if M
11744 was not exactly calculated. TYPE is the tree type for the result.
11745 This function assumes that you cleared the MPFR flags and then
11746 calculated M to see if anything subsequently set a flag prior to
11747 entering this function. Return NULL_TREE if any checks fail, if
11748 FORCE_CONVERT is true, then bypass the checks. */
11751 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11753 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11754 overflow/underflow occurred. If -frounding-math, proceed iff the
11755 result of calling FUNC was exact. */
11757 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11758 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11759 && (!flag_rounding_math
|| !inexact
)))
11761 REAL_VALUE_TYPE re
, im
;
11763 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11764 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11765 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11766 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11767 but the mpft_t is not, then we underflowed in the
11770 || (real_isfinite (&re
) && real_isfinite (&im
)
11771 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11772 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11774 REAL_VALUE_TYPE re_mode
, im_mode
;
11776 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11777 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11778 /* Proceed iff the specified mode can hold the value. */
11780 || (real_identical (&re_mode
, &re
)
11781 && real_identical (&im_mode
, &im
)))
11782 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11783 build_real (TREE_TYPE (type
), im_mode
));
11789 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11790 FUNC on it and return the resulting value as a tree with type TYPE.
11791 If MIN and/or MAX are not NULL, then the supplied ARG must be
11792 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11793 acceptable values, otherwise they are not. The mpfr precision is
11794 set to the precision of TYPE. We assume that function FUNC returns
11795 zero if the result could be calculated exactly within the requested
11799 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11800 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11803 tree result
= NULL_TREE
;
11807 /* To proceed, MPFR must exactly represent the target floating point
11808 format, which only happens when the target base equals two. */
11809 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11810 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11812 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11814 if (real_isfinite (ra
)
11815 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11816 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11818 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11819 const int prec
= fmt
->p
;
11820 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11824 mpfr_init2 (m
, prec
);
11825 mpfr_from_real (m
, ra
, GMP_RNDN
);
11826 mpfr_clear_flags ();
11827 inexact
= func (m
, m
, rnd
);
11828 result
= do_mpfr_ckconv (m
, type
, inexact
);
11836 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11837 FUNC on it and return the resulting value as a tree with type TYPE.
11838 The mpfr precision is set to the precision of TYPE. We assume that
11839 function FUNC returns zero if the result could be calculated
11840 exactly within the requested precision. */
11843 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11844 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11846 tree result
= NULL_TREE
;
11851 /* To proceed, MPFR must exactly represent the target floating point
11852 format, which only happens when the target base equals two. */
11853 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11854 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11855 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11857 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11858 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11860 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11862 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11863 const int prec
= fmt
->p
;
11864 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11868 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11869 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11870 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11871 mpfr_clear_flags ();
11872 inexact
= func (m1
, m1
, m2
, rnd
);
11873 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11874 mpfr_clears (m1
, m2
, NULL
);
11881 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11882 FUNC on it and return the resulting value as a tree with type TYPE.
11883 The mpfr precision is set to the precision of TYPE. We assume that
11884 function FUNC returns zero if the result could be calculated
11885 exactly within the requested precision. */
11888 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11889 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11891 tree result
= NULL_TREE
;
11897 /* To proceed, MPFR must exactly represent the target floating point
11898 format, which only happens when the target base equals two. */
11899 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11900 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11901 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11902 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11904 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11905 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11906 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11908 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11910 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11911 const int prec
= fmt
->p
;
11912 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11916 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11917 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11918 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11919 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11920 mpfr_clear_flags ();
11921 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11922 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11923 mpfr_clears (m1
, m2
, m3
, NULL
);
11930 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11931 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11932 If ARG_SINP and ARG_COSP are NULL then the result is returned
11933 as a complex value.
11934 The type is taken from the type of ARG and is used for setting the
11935 precision of the calculation and results. */
11938 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11940 tree
const type
= TREE_TYPE (arg
);
11941 tree result
= NULL_TREE
;
11945 /* To proceed, MPFR must exactly represent the target floating point
11946 format, which only happens when the target base equals two. */
11947 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11948 && TREE_CODE (arg
) == REAL_CST
11949 && !TREE_OVERFLOW (arg
))
11951 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11953 if (real_isfinite (ra
))
11955 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11956 const int prec
= fmt
->p
;
11957 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11958 tree result_s
, result_c
;
11962 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11963 mpfr_from_real (m
, ra
, GMP_RNDN
);
11964 mpfr_clear_flags ();
11965 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11966 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11967 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11968 mpfr_clears (m
, ms
, mc
, NULL
);
11969 if (result_s
&& result_c
)
11971 /* If we are to return in a complex value do so. */
11972 if (!arg_sinp
&& !arg_cosp
)
11973 return build_complex (build_complex_type (type
),
11974 result_c
, result_s
);
11976 /* Dereference the sin/cos pointer arguments. */
11977 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11978 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11979 /* Proceed if valid pointer type were passed in. */
11980 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11981 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11983 /* Set the values. */
11984 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11986 TREE_SIDE_EFFECTS (result_s
) = 1;
11987 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11989 TREE_SIDE_EFFECTS (result_c
) = 1;
11990 /* Combine the assignments into a compound expr. */
11991 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11992 result_s
, result_c
));
12000 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12001 two-argument mpfr order N Bessel function FUNC on them and return
12002 the resulting value as a tree with type TYPE. The mpfr precision
12003 is set to the precision of TYPE. We assume that function FUNC
12004 returns zero if the result could be calculated exactly within the
12005 requested precision. */
12007 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12008 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12009 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12011 tree result
= NULL_TREE
;
12016 /* To proceed, MPFR must exactly represent the target floating point
12017 format, which only happens when the target base equals two. */
12018 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12019 && tree_fits_shwi_p (arg1
)
12020 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12022 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12023 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12026 && real_isfinite (ra
)
12027 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12029 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12030 const int prec
= fmt
->p
;
12031 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12035 mpfr_init2 (m
, prec
);
12036 mpfr_from_real (m
, ra
, GMP_RNDN
);
12037 mpfr_clear_flags ();
12038 inexact
= func (m
, n
, m
, rnd
);
12039 result
= do_mpfr_ckconv (m
, type
, inexact
);
12047 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12048 the pointer *(ARG_QUO) and return the result. The type is taken
12049 from the type of ARG0 and is used for setting the precision of the
12050 calculation and results. */
12053 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12055 tree
const type
= TREE_TYPE (arg0
);
12056 tree result
= NULL_TREE
;
12061 /* To proceed, MPFR must exactly represent the target floating point
12062 format, which only happens when the target base equals two. */
12063 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12064 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12065 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12067 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12068 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12070 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12072 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12073 const int prec
= fmt
->p
;
12074 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12079 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12080 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12081 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12082 mpfr_clear_flags ();
12083 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12084 /* Remquo is independent of the rounding mode, so pass
12085 inexact=0 to do_mpfr_ckconv(). */
12086 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12087 mpfr_clears (m0
, m1
, NULL
);
12090 /* MPFR calculates quo in the host's long so it may
12091 return more bits in quo than the target int can hold
12092 if sizeof(host long) > sizeof(target int). This can
12093 happen even for native compilers in LP64 mode. In
12094 these cases, modulo the quo value with the largest
12095 number that the target int can hold while leaving one
12096 bit for the sign. */
12097 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12098 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12100 /* Dereference the quo pointer argument. */
12101 arg_quo
= build_fold_indirect_ref (arg_quo
);
12102 /* Proceed iff a valid pointer type was passed in. */
12103 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12105 /* Set the value. */
12107 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12108 build_int_cst (TREE_TYPE (arg_quo
),
12110 TREE_SIDE_EFFECTS (result_quo
) = 1;
12111 /* Combine the quo assignment with the rem. */
12112 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12113 result_quo
, result_rem
));
12121 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12122 resulting value as a tree with type TYPE. The mpfr precision is
12123 set to the precision of TYPE. We assume that this mpfr function
12124 returns zero if the result could be calculated exactly within the
12125 requested precision. In addition, the integer pointer represented
12126 by ARG_SG will be dereferenced and set to the appropriate signgam
12130 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12132 tree result
= NULL_TREE
;
12136 /* To proceed, MPFR must exactly represent the target floating point
12137 format, which only happens when the target base equals two. Also
12138 verify ARG is a constant and that ARG_SG is an int pointer. */
12139 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12140 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12141 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12142 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12144 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12146 /* In addition to NaN and Inf, the argument cannot be zero or a
12147 negative integer. */
12148 if (real_isfinite (ra
)
12149 && ra
->cl
!= rvc_zero
12150 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12152 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12153 const int prec
= fmt
->p
;
12154 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12159 mpfr_init2 (m
, prec
);
12160 mpfr_from_real (m
, ra
, GMP_RNDN
);
12161 mpfr_clear_flags ();
12162 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12163 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12169 /* Dereference the arg_sg pointer argument. */
12170 arg_sg
= build_fold_indirect_ref (arg_sg
);
12171 /* Assign the signgam value into *arg_sg. */
12172 result_sg
= fold_build2 (MODIFY_EXPR
,
12173 TREE_TYPE (arg_sg
), arg_sg
,
12174 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12175 TREE_SIDE_EFFECTS (result_sg
) = 1;
12176 /* Combine the signgam assignment with the lgamma result. */
12177 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12178 result_sg
, result_lg
));
12186 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12187 function FUNC on it and return the resulting value as a tree with
12188 type TYPE. The mpfr precision is set to the precision of TYPE. We
12189 assume that function FUNC returns zero if the result could be
12190 calculated exactly within the requested precision. */
12193 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12195 tree result
= NULL_TREE
;
12199 /* To proceed, MPFR must exactly represent the target floating point
12200 format, which only happens when the target base equals two. */
12201 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12202 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12203 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12205 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12206 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12208 if (real_isfinite (re
) && real_isfinite (im
))
12210 const struct real_format
*const fmt
=
12211 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12212 const int prec
= fmt
->p
;
12213 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12214 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12218 mpc_init2 (m
, prec
);
12219 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12220 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12221 mpfr_clear_flags ();
12222 inexact
= func (m
, m
, crnd
);
12223 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12231 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12232 mpc function FUNC on it and return the resulting value as a tree
12233 with type TYPE. The mpfr precision is set to the precision of
12234 TYPE. We assume that function FUNC returns zero if the result
12235 could be calculated exactly within the requested precision. If
12236 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12237 in the arguments and/or results. */
12240 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12241 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12243 tree result
= NULL_TREE
;
12248 /* To proceed, MPFR must exactly represent the target floating point
12249 format, which only happens when the target base equals two. */
12250 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12252 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12254 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12256 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12257 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12258 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12259 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12262 || (real_isfinite (re0
) && real_isfinite (im0
)
12263 && real_isfinite (re1
) && real_isfinite (im1
)))
12265 const struct real_format
*const fmt
=
12266 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12267 const int prec
= fmt
->p
;
12268 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12269 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12273 mpc_init2 (m0
, prec
);
12274 mpc_init2 (m1
, prec
);
12275 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12276 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12277 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12278 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12279 mpfr_clear_flags ();
12280 inexact
= func (m0
, m0
, m1
, crnd
);
12281 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12290 /* A wrapper function for builtin folding that prevents warnings for
12291 "statement without effect" and the like, caused by removing the
12292 call node earlier than the warning is generated. */
12295 fold_call_stmt (gcall
*stmt
, bool ignore
)
12297 tree ret
= NULL_TREE
;
12298 tree fndecl
= gimple_call_fndecl (stmt
);
12299 location_t loc
= gimple_location (stmt
);
12301 && TREE_CODE (fndecl
) == FUNCTION_DECL
12302 && DECL_BUILT_IN (fndecl
)
12303 && !gimple_call_va_arg_pack_p (stmt
))
12305 int nargs
= gimple_call_num_args (stmt
);
12306 tree
*args
= (nargs
> 0
12307 ? gimple_call_arg_ptr (stmt
, 0)
12308 : &error_mark_node
);
12310 if (avoid_folding_inline_builtin (fndecl
))
12312 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12314 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12318 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12321 /* Propagate location information from original call to
12322 expansion of builtin. Otherwise things like
12323 maybe_emit_chk_warning, that operate on the expansion
12324 of a builtin, will use the wrong location information. */
12325 if (gimple_has_location (stmt
))
12327 tree realret
= ret
;
12328 if (TREE_CODE (ret
) == NOP_EXPR
)
12329 realret
= TREE_OPERAND (ret
, 0);
12330 if (CAN_HAVE_LOCATION_P (realret
)
12331 && !EXPR_HAS_LOCATION (realret
))
12332 SET_EXPR_LOCATION (realret
, loc
);
12342 /* Look up the function in builtin_decl that corresponds to DECL
12343 and set ASMSPEC as its user assembler name. DECL must be a
12344 function decl that declares a builtin. */
12347 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12350 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12351 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12354 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12355 set_user_assembler_name (builtin
, asmspec
);
12356 switch (DECL_FUNCTION_CODE (decl
))
12358 case BUILT_IN_MEMCPY
:
12359 init_block_move_fn (asmspec
);
12360 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12362 case BUILT_IN_MEMSET
:
12363 init_block_clear_fn (asmspec
);
12364 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12366 case BUILT_IN_MEMMOVE
:
12367 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12369 case BUILT_IN_MEMCMP
:
12370 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12372 case BUILT_IN_ABORT
:
12373 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12376 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12378 set_user_assembler_libfunc ("ffs", asmspec
);
12379 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12380 MODE_INT
, 0), "ffs");
12388 /* Return true if DECL is a builtin that expands to a constant or similarly
12391 is_simple_builtin (tree decl
)
12393 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12394 switch (DECL_FUNCTION_CODE (decl
))
12396 /* Builtins that expand to constants. */
12397 case BUILT_IN_CONSTANT_P
:
12398 case BUILT_IN_EXPECT
:
12399 case BUILT_IN_OBJECT_SIZE
:
12400 case BUILT_IN_UNREACHABLE
:
12401 /* Simple register moves or loads from stack. */
12402 case BUILT_IN_ASSUME_ALIGNED
:
12403 case BUILT_IN_RETURN_ADDRESS
:
12404 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12405 case BUILT_IN_FROB_RETURN_ADDR
:
12406 case BUILT_IN_RETURN
:
12407 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12408 case BUILT_IN_FRAME_ADDRESS
:
12409 case BUILT_IN_VA_END
:
12410 case BUILT_IN_STACK_SAVE
:
12411 case BUILT_IN_STACK_RESTORE
:
12412 /* Exception state returns or moves registers around. */
12413 case BUILT_IN_EH_FILTER
:
12414 case BUILT_IN_EH_POINTER
:
12415 case BUILT_IN_EH_COPY_VALUES
:
12425 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12426 most probably expanded inline into reasonably simple code. This is a
12427 superset of is_simple_builtin. */
12429 is_inexpensive_builtin (tree decl
)
12433 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12435 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12436 switch (DECL_FUNCTION_CODE (decl
))
12439 case BUILT_IN_ALLOCA
:
12440 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12441 case BUILT_IN_BSWAP16
:
12442 case BUILT_IN_BSWAP32
:
12443 case BUILT_IN_BSWAP64
:
12445 case BUILT_IN_CLZIMAX
:
12446 case BUILT_IN_CLZL
:
12447 case BUILT_IN_CLZLL
:
12449 case BUILT_IN_CTZIMAX
:
12450 case BUILT_IN_CTZL
:
12451 case BUILT_IN_CTZLL
:
12453 case BUILT_IN_FFSIMAX
:
12454 case BUILT_IN_FFSL
:
12455 case BUILT_IN_FFSLL
:
12456 case BUILT_IN_IMAXABS
:
12457 case BUILT_IN_FINITE
:
12458 case BUILT_IN_FINITEF
:
12459 case BUILT_IN_FINITEL
:
12460 case BUILT_IN_FINITED32
:
12461 case BUILT_IN_FINITED64
:
12462 case BUILT_IN_FINITED128
:
12463 case BUILT_IN_FPCLASSIFY
:
12464 case BUILT_IN_ISFINITE
:
12465 case BUILT_IN_ISINF_SIGN
:
12466 case BUILT_IN_ISINF
:
12467 case BUILT_IN_ISINFF
:
12468 case BUILT_IN_ISINFL
:
12469 case BUILT_IN_ISINFD32
:
12470 case BUILT_IN_ISINFD64
:
12471 case BUILT_IN_ISINFD128
:
12472 case BUILT_IN_ISNAN
:
12473 case BUILT_IN_ISNANF
:
12474 case BUILT_IN_ISNANL
:
12475 case BUILT_IN_ISNAND32
:
12476 case BUILT_IN_ISNAND64
:
12477 case BUILT_IN_ISNAND128
:
12478 case BUILT_IN_ISNORMAL
:
12479 case BUILT_IN_ISGREATER
:
12480 case BUILT_IN_ISGREATEREQUAL
:
12481 case BUILT_IN_ISLESS
:
12482 case BUILT_IN_ISLESSEQUAL
:
12483 case BUILT_IN_ISLESSGREATER
:
12484 case BUILT_IN_ISUNORDERED
:
12485 case BUILT_IN_VA_ARG_PACK
:
12486 case BUILT_IN_VA_ARG_PACK_LEN
:
12487 case BUILT_IN_VA_COPY
:
12488 case BUILT_IN_TRAP
:
12489 case BUILT_IN_SAVEREGS
:
12490 case BUILT_IN_POPCOUNTL
:
12491 case BUILT_IN_POPCOUNTLL
:
12492 case BUILT_IN_POPCOUNTIMAX
:
12493 case BUILT_IN_POPCOUNT
:
12494 case BUILT_IN_PARITYL
:
12495 case BUILT_IN_PARITYLL
:
12496 case BUILT_IN_PARITYIMAX
:
12497 case BUILT_IN_PARITY
:
12498 case BUILT_IN_LABS
:
12499 case BUILT_IN_LLABS
:
12500 case BUILT_IN_PREFETCH
:
12501 case BUILT_IN_ACC_ON_DEVICE
:
12505 return is_simple_builtin (decl
);