1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
40 #include "typeclass.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
47 #include "value-prof.h"
48 #include "diagnostic-core.h"
53 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
55 struct target_builtins default_target_builtins
;
57 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
60 /* Define the names of the builtin function types and codes. */
61 const char *const built_in_class_names
[BUILT_IN_LAST
]
62 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
65 const char * built_in_names
[(int) END_BUILTINS
] =
67 #include "builtins.def"
71 /* Setup an array of _DECL trees, make sure each element is
72 initialized to NULL_TREE. */
73 builtin_info_type builtin_info
;
75 /* Non-zero if __builtin_constant_p should be folded right away. */
76 bool force_folding_builtin_constant_p
;
78 static const char *c_getstr (tree
);
79 static rtx
c_readstr (const char *, enum machine_mode
);
80 static int target_char_cast (tree
, char *);
81 static rtx
get_memory_rtx (tree
, tree
);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx
result_vector (int, rtx
);
87 static void expand_builtin_update_setjmp_buf (rtx
);
88 static void expand_builtin_prefetch (tree
);
89 static rtx
expand_builtin_apply_args (void);
90 static rtx
expand_builtin_apply_args_1 (void);
91 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
92 static void expand_builtin_return (rtx
);
93 static enum type_class
type_to_class (tree
);
94 static rtx
expand_builtin_classify_type (tree
);
95 static void expand_errno_check (tree
, rtx
);
96 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
101 static rtx
expand_builtin_sincos (tree
);
102 static rtx
expand_builtin_cexpi (tree
, rtx
);
103 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
104 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
105 static rtx
expand_builtin_next_arg (void);
106 static rtx
expand_builtin_va_start (tree
);
107 static rtx
expand_builtin_va_end (tree
);
108 static rtx
expand_builtin_va_copy (tree
);
109 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strcmp (tree
, rtx
);
111 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
112 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
113 static rtx
expand_builtin_memcpy (tree
, rtx
);
114 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
116 enum machine_mode
, int);
117 static rtx
expand_builtin_strcpy (tree
, rtx
);
118 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
119 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strncpy (tree
, rtx
);
121 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
124 static rtx
expand_builtin_bzero (tree
);
125 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_alloca (tree
, bool);
127 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
128 static rtx
expand_builtin_frame_address (tree
, tree
);
129 static tree
stabilize_va_list_loc (location_t
, tree
, int);
130 static rtx
expand_builtin_expect (tree
, rtx
);
131 static tree
fold_builtin_constant_p (tree
);
132 static tree
fold_builtin_expect (location_t
, tree
, tree
);
133 static tree
fold_builtin_classify_type (tree
);
134 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
135 static tree
fold_builtin_inf (location_t
, tree
, int);
136 static tree
fold_builtin_nan (tree
, tree
, int);
137 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
138 static bool validate_arg (const_tree
, enum tree_code code
);
139 static bool integer_valued_real_p (tree
);
140 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
141 static bool readonly_data_expr (tree
);
142 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
143 static rtx
expand_builtin_signbit (tree
, rtx
);
144 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
145 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
146 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
147 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
148 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
149 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
150 static tree
fold_builtin_tan (tree
, tree
);
151 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
152 static tree
fold_builtin_floor (location_t
, tree
, tree
);
153 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
154 static tree
fold_builtin_round (location_t
, tree
, tree
);
155 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
156 static tree
fold_builtin_bitop (tree
, tree
);
157 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
158 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
160 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
162 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
164 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
165 static tree
fold_builtin_isascii (location_t
, tree
);
166 static tree
fold_builtin_toascii (location_t
, tree
);
167 static tree
fold_builtin_isdigit (location_t
, tree
);
168 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
169 static tree
fold_builtin_abs (location_t
, tree
, tree
);
170 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
172 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
173 static tree
fold_builtin_0 (location_t
, tree
, bool);
174 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
175 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
176 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
177 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
180 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
181 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
182 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
184 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
186 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
187 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
188 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
190 static rtx
expand_builtin_object_size (tree
);
191 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
192 enum built_in_function
);
193 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
194 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
195 static void maybe_emit_free_warning (tree
);
196 static tree
fold_builtin_object_size (tree
, tree
);
197 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
198 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
199 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
200 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
201 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
202 enum built_in_function
);
203 static bool init_target_chars (void);
205 static unsigned HOST_WIDE_INT target_newline
;
206 static unsigned HOST_WIDE_INT target_percent
;
207 static unsigned HOST_WIDE_INT target_c
;
208 static unsigned HOST_WIDE_INT target_s
;
209 static char target_percent_c
[3];
210 static char target_percent_s
[3];
211 static char target_percent_s_newline
[4];
212 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
213 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
214 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
215 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
216 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_sincos (tree
, tree
, tree
);
219 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
220 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
221 const REAL_VALUE_TYPE
*, bool);
222 static tree
do_mpfr_remquo (tree
, tree
, tree
);
223 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
224 static void expand_builtin_sync_synchronize (void);
226 /* Return true if NAME starts with __builtin_ or __sync_. */
229 is_builtin_name (const char *name
)
231 if (strncmp (name
, "__builtin_", 10) == 0)
233 if (strncmp (name
, "__sync_", 7) == 0)
235 if (strncmp (name
, "__atomic_", 9) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl
)
246 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
249 /* By default we assume that c99 functions are present at the runtime,
250 but sincos is not. */
252 default_libc_has_function (enum function_class fn_class
)
254 if (fn_class
== function_c94
255 || fn_class
== function_c99_misc
256 || fn_class
== function_c99_math_complex
)
263 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
269 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
274 /* Return true if NODE should be considered for inline expansion regardless
275 of the optimization level. This means whenever a function is invoked with
276 its "internal" name, which normally contains the prefix "__builtin". */
279 called_as_built_in (tree node
)
281 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
282 we want the name used to call the function, not the name it
284 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
285 return is_builtin_name (name
);
288 /* Compute values M and N such that M divides (address of EXP - N) and such
289 that N < M. If these numbers can be determined, store M in alignp and N in
290 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
291 *alignp and any bit-offset to *bitposp.
293 Note that the address (and thus the alignment) computed here is based
294 on the address to which a symbol resolves, whereas DECL_ALIGN is based
295 on the address at which an object is actually located. These two
296 addresses are not always the same. For example, on ARM targets,
297 the address &foo of a Thumb function foo() has the lowest bit set,
298 whereas foo() itself starts on an even address.
300 If ADDR_P is true we are taking the address of the memory reference EXP
301 and thus cannot rely on the access taking place. */
304 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
305 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
307 HOST_WIDE_INT bitsize
, bitpos
;
309 enum machine_mode mode
;
310 int unsignedp
, volatilep
;
311 unsigned int inner
, align
= BITS_PER_UNIT
;
312 bool known_alignment
= false;
314 /* Get the innermost object and the constant (bitpos) and possibly
315 variable (offset) offset of the access. */
316 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
317 &mode
, &unsignedp
, &volatilep
, true);
319 /* Extract alignment information from the innermost object and
320 possibly adjust bitpos and offset. */
321 if (TREE_CODE (exp
) == FUNCTION_DECL
)
323 /* Function addresses can encode extra information besides their
324 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
325 allows the low bit to be used as a virtual bit, we know
326 that the address itself must be at least 2-byte aligned. */
327 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
328 align
= 2 * BITS_PER_UNIT
;
330 else if (TREE_CODE (exp
) == LABEL_DECL
)
332 else if (TREE_CODE (exp
) == CONST_DECL
)
334 /* The alignment of a CONST_DECL is determined by its initializer. */
335 exp
= DECL_INITIAL (exp
);
336 align
= TYPE_ALIGN (TREE_TYPE (exp
));
337 #ifdef CONSTANT_ALIGNMENT
338 if (CONSTANT_CLASS_P (exp
))
339 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
341 known_alignment
= true;
343 else if (DECL_P (exp
))
345 align
= DECL_ALIGN (exp
);
346 known_alignment
= true;
348 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
352 else if (TREE_CODE (exp
) == INDIRECT_REF
353 || TREE_CODE (exp
) == MEM_REF
354 || TREE_CODE (exp
) == TARGET_MEM_REF
)
356 tree addr
= TREE_OPERAND (exp
, 0);
358 unsigned HOST_WIDE_INT ptr_bitpos
;
360 if (TREE_CODE (addr
) == BIT_AND_EXPR
361 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
363 align
= (tree_to_hwi (TREE_OPERAND (addr
, 1))
364 & -tree_to_hwi (TREE_OPERAND (addr
, 1)));
365 align
*= BITS_PER_UNIT
;
366 addr
= TREE_OPERAND (addr
, 0);
370 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
371 align
= MAX (ptr_align
, align
);
373 /* The alignment of the pointer operand in a TARGET_MEM_REF
374 has to take the variable offset parts into account. */
375 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
379 unsigned HOST_WIDE_INT step
= 1;
381 step
= tree_to_hwi (TMR_STEP (exp
));
382 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
384 if (TMR_INDEX2 (exp
))
385 align
= BITS_PER_UNIT
;
386 known_alignment
= false;
389 /* When EXP is an actual memory reference then we can use
390 TYPE_ALIGN of a pointer indirection to derive alignment.
391 Do so only if get_pointer_alignment_1 did not reveal absolute
392 alignment knowledge and if using that alignment would
393 improve the situation. */
394 if (!addr_p
&& !known_alignment
395 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
396 align
= TYPE_ALIGN (TREE_TYPE (exp
));
399 /* Else adjust bitpos accordingly. */
400 bitpos
+= ptr_bitpos
;
401 if (TREE_CODE (exp
) == MEM_REF
402 || TREE_CODE (exp
) == TARGET_MEM_REF
)
403 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
406 else if (TREE_CODE (exp
) == STRING_CST
)
408 /* STRING_CST are the only constant objects we allow to be not
409 wrapped inside a CONST_DECL. */
410 align
= TYPE_ALIGN (TREE_TYPE (exp
));
411 #ifdef CONSTANT_ALIGNMENT
412 if (CONSTANT_CLASS_P (exp
))
413 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
415 known_alignment
= true;
418 /* If there is a non-constant offset part extract the maximum
419 alignment that can prevail. */
425 if (TREE_CODE (offset
) == PLUS_EXPR
)
427 next_offset
= TREE_OPERAND (offset
, 0);
428 offset
= TREE_OPERAND (offset
, 1);
432 if (tree_fits_uhwi_p (offset
))
434 /* Any overflow in calculating offset_bits won't change
437 = ((unsigned) tree_to_uhwi (offset
) * BITS_PER_UNIT
);
440 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
442 else if (TREE_CODE (offset
) == MULT_EXPR
443 && tree_fits_uhwi_p (TREE_OPERAND (offset
, 1)))
445 /* Any overflow in calculating offset_factor won't change
447 unsigned offset_factor
448 = ((unsigned) tree_to_uhwi (TREE_OPERAND (offset
, 1))
452 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
456 inner
= MIN (inner
, BITS_PER_UNIT
);
459 offset
= next_offset
;
461 /* Alignment is innermost object alignment adjusted by the constant
462 and non-constant offset parts. */
463 align
= MIN (align
, inner
);
466 *bitposp
= bitpos
& (*alignp
- 1);
467 return known_alignment
;
470 /* For a memory reference expression EXP compute values M and N such that M
471 divides (&EXP - N) and such that N < M. If these numbers can be determined,
472 store M in alignp and N in *BITPOSP and return true. Otherwise return false
473 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
476 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
477 unsigned HOST_WIDE_INT
*bitposp
)
479 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
482 /* Return the alignment in bits of EXP, an object. */
485 get_object_alignment (tree exp
)
487 unsigned HOST_WIDE_INT bitpos
= 0;
490 get_object_alignment_1 (exp
, &align
, &bitpos
);
492 /* align and bitpos now specify known low bits of the pointer.
493 ptr & (align - 1) == bitpos. */
496 align
= (bitpos
& -bitpos
);
500 /* For a pointer valued expression EXP compute values M and N such that M
501 divides (EXP - N) and such that N < M. If these numbers can be determined,
502 store M in alignp and N in *BITPOSP and return true. Return false if
503 the results are just a conservative approximation.
505 If EXP is not a pointer, false is returned too. */
508 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
509 unsigned HOST_WIDE_INT
*bitposp
)
513 if (TREE_CODE (exp
) == ADDR_EXPR
)
514 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
515 alignp
, bitposp
, true);
516 else if (TREE_CODE (exp
) == SSA_NAME
517 && POINTER_TYPE_P (TREE_TYPE (exp
)))
519 unsigned int ptr_align
, ptr_misalign
;
520 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
522 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
524 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
525 *alignp
= ptr_align
* BITS_PER_UNIT
;
526 /* We cannot really tell whether this result is an approximation. */
532 *alignp
= BITS_PER_UNIT
;
536 else if (TREE_CODE (exp
) == INTEGER_CST
)
538 *alignp
= BIGGEST_ALIGNMENT
;
539 *bitposp
= ((tree_to_hwi (exp
) * BITS_PER_UNIT
)
540 & (BIGGEST_ALIGNMENT
- 1));
545 *alignp
= BITS_PER_UNIT
;
549 /* Return the alignment in bits of EXP, a pointer valued expression.
550 The alignment returned is, by default, the alignment of the thing that
551 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553 Otherwise, look at the expression to see if we can do better, i.e., if the
554 expression is actually pointing at an object whose alignment is tighter. */
557 get_pointer_alignment (tree exp
)
559 unsigned HOST_WIDE_INT bitpos
= 0;
562 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
568 align
= (bitpos
& -bitpos
);
573 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
574 way, because it could contain a zero byte in the middle.
575 TREE_STRING_LENGTH is the size of the character array, not the string.
577 ONLY_VALUE should be nonzero if the result is not going to be emitted
578 into the instruction stream and zero if it is going to be expanded.
579 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
580 is returned, otherwise NULL, since
581 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
582 evaluate the side-effects.
584 The value returned is of type `ssizetype'.
586 Unfortunately, string_constant can't access the values of const char
587 arrays with initializers, so neither can we do so here. */
590 c_strlen (tree src
, int only_value
)
593 HOST_WIDE_INT offset
;
599 if (TREE_CODE (src
) == COND_EXPR
600 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
604 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
605 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
606 if (tree_int_cst_equal (len1
, len2
))
610 if (TREE_CODE (src
) == COMPOUND_EXPR
611 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
612 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
614 loc
= EXPR_LOC_OR_HERE (src
);
616 src
= string_constant (src
, &offset_node
);
620 max
= TREE_STRING_LENGTH (src
) - 1;
621 ptr
= TREE_STRING_POINTER (src
);
623 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
625 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
626 compute the offset to the following null if we don't know where to
627 start searching for it. */
630 for (i
= 0; i
< max
; i
++)
634 /* We don't know the starting offset, but we do know that the string
635 has no internal zero bytes. We can assume that the offset falls
636 within the bounds of the string; otherwise, the programmer deserves
637 what he gets. Subtract the offset from the length of the string,
638 and return that. This would perhaps not be valid if we were dealing
639 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc
, size_int (max
), offset_node
);
644 /* We have a known offset into the string. Start searching there for
645 a null character if we can represent it as a single HOST_WIDE_INT. */
646 if (offset_node
== 0)
648 else if (!tree_fits_shwi_p (offset_node
))
651 offset
= tree_to_shwi (offset_node
);
653 /* If the offset is known to be out of bounds, warn, and call strlen at
655 if (offset
< 0 || offset
> max
)
657 /* Suppress multiple warnings for propagated constant strings. */
658 if (! TREE_NO_WARNING (src
))
660 warning_at (loc
, 0, "offset outside bounds of constant string");
661 TREE_NO_WARNING (src
) = 1;
666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
670 Since OFFSET is our starting index into the string, no further
671 calculation is needed. */
672 return ssize_int (strlen (ptr
+ offset
));
675 /* Return a char pointer for a C string if it is a string constant
676 or sum of string constant and integer constant. */
683 src
= string_constant (src
, &offset_node
);
687 if (offset_node
== 0)
688 return TREE_STRING_POINTER (src
);
689 else if (!tree_fits_uhwi_p (offset_node
)
690 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
693 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
696 /* Return a constant integer corresponding to target reading
697 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
700 c_readstr (const char *str
, enum machine_mode mode
)
705 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
706 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
707 / HOST_BITS_PER_WIDE_INT
;
709 for (i
= 0; i
< len
; i
++)
712 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
715 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
718 if (WORDS_BIG_ENDIAN
)
719 j
= GET_MODE_SIZE (mode
) - i
- 1;
720 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
721 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
722 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
726 ch
= (unsigned char) str
[i
];
727 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
730 c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
731 return immed_wide_int_const (c
, mode
);
734 /* Cast a target constant CST to target CHAR and if that value fits into
735 host char type, return zero and put that value into variable pointed to by
739 target_char_cast (tree cst
, char *p
)
741 unsigned HOST_WIDE_INT val
, hostval
;
743 if (TREE_CODE (cst
) != INTEGER_CST
744 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
747 /* Do not care if it fits or not right here. */
748 val
= tree_to_hwi (cst
);
750 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
751 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
754 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
755 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
764 /* Similar to save_expr, but assumes that arbitrary code is not executed
765 in between the multiple evaluations. In particular, we assume that a
766 non-addressable local variable will not be modified. */
769 builtin_save_expr (tree exp
)
771 if (TREE_CODE (exp
) == SSA_NAME
772 || (TREE_ADDRESSABLE (exp
) == 0
773 && (TREE_CODE (exp
) == PARM_DECL
774 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
777 return save_expr (exp
);
780 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
781 times to get the address of either a higher stack frame, or a return
782 address located within it (depending on FNDECL_CODE). */
785 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
789 #ifdef INITIAL_FRAME_ADDRESS_RTX
790 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
794 /* For a zero count with __builtin_return_address, we don't care what
795 frame address we return, because target-specific definitions will
796 override us. Therefore frame pointer elimination is OK, and using
797 the soft frame pointer is OK.
799 For a nonzero count, or a zero count with __builtin_frame_address,
800 we require a stable offset from the current frame pointer to the
801 previous one, so we must use the hard frame pointer, and
802 we must disable frame pointer elimination. */
803 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
804 tem
= frame_pointer_rtx
;
807 tem
= hard_frame_pointer_rtx
;
809 /* Tell reload not to eliminate the frame pointer. */
810 crtl
->accesses_prior_frames
= 1;
814 /* Some machines need special handling before we can access
815 arbitrary frames. For example, on the SPARC, we must first flush
816 all register windows to the stack. */
817 #ifdef SETUP_FRAME_ADDRESSES
819 SETUP_FRAME_ADDRESSES ();
822 /* On the SPARC, the return address is not in the frame, it is in a
823 register. There is no way to access it off of the current frame
824 pointer, but it can be accessed off the previous frame pointer by
825 reading the value from the register window save area. */
826 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
827 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
831 /* Scan back COUNT frames to the specified frame. */
832 for (i
= 0; i
< count
; i
++)
834 /* Assume the dynamic chain pointer is in the word that the
835 frame address points to, unless otherwise specified. */
836 #ifdef DYNAMIC_CHAIN_ADDRESS
837 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
839 tem
= memory_address (Pmode
, tem
);
840 tem
= gen_frame_mem (Pmode
, tem
);
841 tem
= copy_to_reg (tem
);
844 /* For __builtin_frame_address, return what we've got. But, on
845 the SPARC for example, we may have to add a bias. */
846 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
847 #ifdef FRAME_ADDR_RTX
848 return FRAME_ADDR_RTX (tem
);
853 /* For __builtin_return_address, get the return address from that frame. */
854 #ifdef RETURN_ADDR_RTX
855 tem
= RETURN_ADDR_RTX (count
, tem
);
857 tem
= memory_address (Pmode
,
858 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
859 tem
= gen_frame_mem (Pmode
, tem
);
864 /* Alias set used for setjmp buffer. */
865 static alias_set_type setjmp_alias_set
= -1;
867 /* Construct the leading half of a __builtin_setjmp call. Control will
868 return to RECEIVER_LABEL. This is also called directly by the SJLJ
869 exception handling code. */
872 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
874 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
878 if (setjmp_alias_set
== -1)
879 setjmp_alias_set
= new_alias_set ();
881 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
883 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
885 /* We store the frame pointer and the address of receiver_label in
886 the buffer and use the rest of it for the stack save area, which
887 is machine-dependent. */
889 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
890 set_mem_alias_set (mem
, setjmp_alias_set
);
891 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
893 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
894 GET_MODE_SIZE (Pmode
))),
895 set_mem_alias_set (mem
, setjmp_alias_set
);
897 emit_move_insn (validize_mem (mem
),
898 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
900 stack_save
= gen_rtx_MEM (sa_mode
,
901 plus_constant (Pmode
, buf_addr
,
902 2 * GET_MODE_SIZE (Pmode
)));
903 set_mem_alias_set (stack_save
, setjmp_alias_set
);
904 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
906 /* If there is further processing to do, do it. */
907 #ifdef HAVE_builtin_setjmp_setup
908 if (HAVE_builtin_setjmp_setup
)
909 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
912 /* We have a nonlocal label. */
913 cfun
->has_nonlocal_label
= 1;
916 /* Construct the trailing part of a __builtin_setjmp call. This is
917 also called directly by the SJLJ exception handling code.
918 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
921 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
925 /* Mark the FP as used when we get here, so we have to make sure it's
926 marked as used by this function. */
927 emit_use (hard_frame_pointer_rtx
);
929 /* Mark the static chain as clobbered here so life information
930 doesn't get messed up for it. */
931 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
932 if (chain
&& REG_P (chain
))
933 emit_clobber (chain
);
935 /* Now put in the code to restore the frame pointer, and argument
936 pointer, if needed. */
937 #ifdef HAVE_nonlocal_goto
938 if (! HAVE_nonlocal_goto
)
940 /* First adjust our frame pointer to its actual value. It was
941 previously set to the start of the virtual area corresponding to
942 the stacked variables when we branched here and now needs to be
943 adjusted to the actual hardware fp value.
945 Assignments to virtual registers are converted by
946 instantiate_virtual_regs into the corresponding assignment
947 to the underlying register (fp in this case) that makes
948 the original assignment true.
949 So the following insn will actually be decrementing fp by
950 STARTING_FRAME_OFFSET. */
951 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
953 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
954 if (fixed_regs
[ARG_POINTER_REGNUM
])
956 #ifdef ELIMINABLE_REGS
957 /* If the argument pointer can be eliminated in favor of the
958 frame pointer, we don't need to restore it. We assume here
959 that if such an elimination is present, it can always be used.
960 This is the case on all known machines; if we don't make this
961 assumption, we do unnecessary saving on many machines. */
963 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
965 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
966 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
967 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
970 if (i
== ARRAY_SIZE (elim_regs
))
973 /* Now restore our arg pointer from the address at which it
974 was saved in our stack frame. */
975 emit_move_insn (crtl
->args
.internal_arg_pointer
,
976 copy_to_reg (get_arg_pointer_save_area ()));
981 #ifdef HAVE_builtin_setjmp_receiver
982 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
983 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
986 #ifdef HAVE_nonlocal_goto_receiver
987 if (HAVE_nonlocal_goto_receiver
)
988 emit_insn (gen_nonlocal_goto_receiver ());
993 /* We must not allow the code we just generated to be reordered by
994 scheduling. Specifically, the update of the frame pointer must
995 happen immediately, not later. Similarly, we must block
996 (frame-related) register values to be used across this code. */
997 emit_insn (gen_blockage ());
1000 /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 all will be used on all machines). It operates similarly to the C
1002 library function of the same name, but is more efficient. Much of
1003 the code below is copied from the handling of non-local gotos. */
1006 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1008 rtx fp
, lab
, stack
, insn
, last
;
1009 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1011 /* DRAP is needed for stack realign if longjmp is expanded to current
1013 if (SUPPORTS_STACK_ALIGNMENT
)
1014 crtl
->need_drap
= true;
1016 if (setjmp_alias_set
== -1)
1017 setjmp_alias_set
= new_alias_set ();
1019 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1021 buf_addr
= force_reg (Pmode
, buf_addr
);
1023 /* We require that the user must pass a second argument of 1, because
1024 that is what builtin_setjmp will return. */
1025 gcc_assert (value
== const1_rtx
);
1027 last
= get_last_insn ();
1028 #ifdef HAVE_builtin_longjmp
1029 if (HAVE_builtin_longjmp
)
1030 emit_insn (gen_builtin_longjmp (buf_addr
));
1034 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1035 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1036 GET_MODE_SIZE (Pmode
)));
1038 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1039 2 * GET_MODE_SIZE (Pmode
)));
1040 set_mem_alias_set (fp
, setjmp_alias_set
);
1041 set_mem_alias_set (lab
, setjmp_alias_set
);
1042 set_mem_alias_set (stack
, setjmp_alias_set
);
1044 /* Pick up FP, label, and SP from the block and jump. This code is
1045 from expand_goto in stmt.c; see there for detailed comments. */
1046 #ifdef HAVE_nonlocal_goto
1047 if (HAVE_nonlocal_goto
)
1048 /* We have to pass a value to the nonlocal_goto pattern that will
1049 get copied into the static_chain pointer, but it does not matter
1050 what that value is, because builtin_setjmp does not use it. */
1051 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1055 lab
= copy_to_reg (lab
);
1057 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1058 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1060 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1061 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1063 emit_use (hard_frame_pointer_rtx
);
1064 emit_use (stack_pointer_rtx
);
1065 emit_indirect_jump (lab
);
1069 /* Search backwards and mark the jump insn as a non-local goto.
1070 Note that this precludes the use of __builtin_longjmp to a
1071 __builtin_setjmp target in the same function. However, we've
1072 already cautioned the user that these functions are for
1073 internal exception handling use only. */
1074 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1076 gcc_assert (insn
!= last
);
1080 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1083 else if (CALL_P (insn
))
1088 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1089 and the address of the save area. */
1092 expand_builtin_nonlocal_goto (tree exp
)
1094 tree t_label
, t_save_area
;
1095 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1097 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1100 t_label
= CALL_EXPR_ARG (exp
, 0);
1101 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1103 r_label
= expand_normal (t_label
);
1104 r_label
= convert_memory_address (Pmode
, r_label
);
1105 r_save_area
= expand_normal (t_save_area
);
1106 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1107 /* Copy the address of the save location to a register just in case it was
1108 based on the frame pointer. */
1109 r_save_area
= copy_to_reg (r_save_area
);
1110 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1111 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1112 plus_constant (Pmode
, r_save_area
,
1113 GET_MODE_SIZE (Pmode
)));
1115 crtl
->has_nonlocal_goto
= 1;
1117 #ifdef HAVE_nonlocal_goto
1118 /* ??? We no longer need to pass the static chain value, afaik. */
1119 if (HAVE_nonlocal_goto
)
1120 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1124 r_label
= copy_to_reg (r_label
);
1126 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1127 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1129 /* Restore frame pointer for containing function. */
1130 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1131 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1133 /* USE of hard_frame_pointer_rtx added for consistency;
1134 not clear if really needed. */
1135 emit_use (hard_frame_pointer_rtx
);
1136 emit_use (stack_pointer_rtx
);
1138 /* If the architecture is using a GP register, we must
1139 conservatively assume that the target function makes use of it.
1140 The prologue of functions with nonlocal gotos must therefore
1141 initialize the GP register to the appropriate value, and we
1142 must then make sure that this value is live at the point
1143 of the jump. (Note that this doesn't necessarily apply
1144 to targets with a nonlocal_goto pattern; they are free
1145 to implement it in their own way. Note also that this is
1146 a no-op if the GP register is a global invariant.) */
1147 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1148 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1149 emit_use (pic_offset_table_rtx
);
1151 emit_indirect_jump (r_label
);
1154 /* Search backwards to the jump insn and mark it as a
1156 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1160 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1163 else if (CALL_P (insn
))
1170 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1171 (not all will be used on all machines) that was passed to __builtin_setjmp.
1172 It updates the stack pointer in that block to correspond to the current
1176 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1178 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1180 = gen_rtx_MEM (sa_mode
,
1183 plus_constant (Pmode
, buf_addr
,
1184 2 * GET_MODE_SIZE (Pmode
))));
1186 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1189 /* Expand a call to __builtin_prefetch. For a target that does not support
1190 data prefetch, evaluate the memory address argument in case it has side
1194 expand_builtin_prefetch (tree exp
)
1196 tree arg0
, arg1
, arg2
;
1200 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1203 arg0
= CALL_EXPR_ARG (exp
, 0);
1205 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1206 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1208 nargs
= call_expr_nargs (exp
);
1210 arg1
= CALL_EXPR_ARG (exp
, 1);
1212 arg1
= integer_zero_node
;
1214 arg2
= CALL_EXPR_ARG (exp
, 2);
1216 arg2
= integer_three_node
;
1218 /* Argument 0 is an address. */
1219 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1221 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1222 if (TREE_CODE (arg1
) != INTEGER_CST
)
1224 error ("second argument to %<__builtin_prefetch%> must be a constant");
1225 arg1
= integer_zero_node
;
1227 op1
= expand_normal (arg1
);
1228 /* Argument 1 must be either zero or one. */
1229 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1231 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1236 /* Argument 2 (locality) must be a compile-time constant int. */
1237 if (TREE_CODE (arg2
) != INTEGER_CST
)
1239 error ("third argument to %<__builtin_prefetch%> must be a constant");
1240 arg2
= integer_zero_node
;
1242 op2
= expand_normal (arg2
);
1243 /* Argument 2 must be 0, 1, 2, or 3. */
1244 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1246 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1250 #ifdef HAVE_prefetch
1253 struct expand_operand ops
[3];
1255 create_address_operand (&ops
[0], op0
);
1256 create_integer_operand (&ops
[1], INTVAL (op1
));
1257 create_integer_operand (&ops
[2], INTVAL (op2
));
1258 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1263 /* Don't do anything with direct references to volatile memory, but
1264 generate code to handle other side effects. */
1265 if (!MEM_P (op0
) && side_effects_p (op0
))
1269 /* Get a MEM rtx for expression EXP which is the address of an operand
1270 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1271 the maximum length of the block of memory that might be accessed or
1275 get_memory_rtx (tree exp
, tree len
)
1277 tree orig_exp
= exp
;
1280 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1281 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1282 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1283 exp
= TREE_OPERAND (exp
, 0);
1285 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1286 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1288 /* Get an expression we can use to find the attributes to assign to MEM.
1289 First remove any nops. */
1290 while (CONVERT_EXPR_P (exp
)
1291 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1292 exp
= TREE_OPERAND (exp
, 0);
1294 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1295 (as builtin stringops may alias with anything). */
1296 exp
= fold_build2 (MEM_REF
,
1297 build_array_type (char_type_node
,
1298 build_range_type (sizetype
,
1299 size_one_node
, len
)),
1300 exp
, build_int_cst (ptr_type_node
, 0));
1302 /* If the MEM_REF has no acceptable address, try to get the base object
1303 from the original address we got, and build an all-aliasing
1304 unknown-sized access to that one. */
1305 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1306 set_mem_attributes (mem
, exp
, 0);
1307 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1308 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1311 exp
= build_fold_addr_expr (exp
);
1312 exp
= fold_build2 (MEM_REF
,
1313 build_array_type (char_type_node
,
1314 build_range_type (sizetype
,
1317 exp
, build_int_cst (ptr_type_node
, 0));
1318 set_mem_attributes (mem
, exp
, 0);
1320 set_mem_alias_set (mem
, 0);
1324 /* Built-in functions to perform an untyped call and return. */
1326 #define apply_args_mode \
1327 (this_target_builtins->x_apply_args_mode)
1328 #define apply_result_mode \
1329 (this_target_builtins->x_apply_result_mode)
1331 /* Return the size required for the block returned by __builtin_apply_args,
1332 and initialize apply_args_mode. */
1335 apply_args_size (void)
1337 static int size
= -1;
1340 enum machine_mode mode
;
1342 /* The values computed by this function never change. */
1345 /* The first value is the incoming arg-pointer. */
1346 size
= GET_MODE_SIZE (Pmode
);
1348 /* The second value is the structure value address unless this is
1349 passed as an "invisible" first argument. */
1350 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1351 size
+= GET_MODE_SIZE (Pmode
);
1353 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1354 if (FUNCTION_ARG_REGNO_P (regno
))
1356 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1358 gcc_assert (mode
!= VOIDmode
);
1360 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1361 if (size
% align
!= 0)
1362 size
= CEIL (size
, align
) * align
;
1363 size
+= GET_MODE_SIZE (mode
);
1364 apply_args_mode
[regno
] = mode
;
1368 apply_args_mode
[regno
] = VOIDmode
;
1374 /* Return the size required for the block returned by __builtin_apply,
1375 and initialize apply_result_mode. */
1378 apply_result_size (void)
1380 static int size
= -1;
1382 enum machine_mode mode
;
1384 /* The values computed by this function never change. */
1389 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1390 if (targetm
.calls
.function_value_regno_p (regno
))
1392 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1394 gcc_assert (mode
!= VOIDmode
);
1396 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1397 if (size
% align
!= 0)
1398 size
= CEIL (size
, align
) * align
;
1399 size
+= GET_MODE_SIZE (mode
);
1400 apply_result_mode
[regno
] = mode
;
1403 apply_result_mode
[regno
] = VOIDmode
;
1405 /* Allow targets that use untyped_call and untyped_return to override
1406 the size so that machine-specific information can be stored here. */
1407 #ifdef APPLY_RESULT_SIZE
1408 size
= APPLY_RESULT_SIZE
;
1414 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1415 /* Create a vector describing the result block RESULT. If SAVEP is true,
1416 the result block is used to save the values; otherwise it is used to
1417 restore the values. */
1420 result_vector (int savep
, rtx result
)
1422 int regno
, size
, align
, nelts
;
1423 enum machine_mode mode
;
1425 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1428 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1429 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1431 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1432 if (size
% align
!= 0)
1433 size
= CEIL (size
, align
) * align
;
1434 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1435 mem
= adjust_address (result
, mode
, size
);
1436 savevec
[nelts
++] = (savep
1437 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1438 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1439 size
+= GET_MODE_SIZE (mode
);
1441 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1443 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1445 /* Save the state required to perform an untyped call with the same
1446 arguments as were passed to the current function. */
1449 expand_builtin_apply_args_1 (void)
1452 int size
, align
, regno
;
1453 enum machine_mode mode
;
1454 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1456 /* Create a block where the arg-pointer, structure value address,
1457 and argument registers can be saved. */
1458 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1460 /* Walk past the arg-pointer and structure value address. */
1461 size
= GET_MODE_SIZE (Pmode
);
1462 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1463 size
+= GET_MODE_SIZE (Pmode
);
1465 /* Save each register used in calling a function to the block. */
1466 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1467 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1469 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1470 if (size
% align
!= 0)
1471 size
= CEIL (size
, align
) * align
;
1473 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1475 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1476 size
+= GET_MODE_SIZE (mode
);
1479 /* Save the arg pointer to the block. */
1480 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1481 #ifdef STACK_GROWS_DOWNWARD
1482 /* We need the pointer as the caller actually passed them to us, not
1483 as we might have pretended they were passed. Make sure it's a valid
1484 operand, as emit_move_insn isn't expected to handle a PLUS. */
1486 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1489 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1491 size
= GET_MODE_SIZE (Pmode
);
1493 /* Save the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 if (struct_incoming_value
)
1497 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1498 copy_to_reg (struct_incoming_value
));
1499 size
+= GET_MODE_SIZE (Pmode
);
1502 /* Return the address of the block. */
1503 return copy_addr_to_reg (XEXP (registers
, 0));
1506 /* __builtin_apply_args returns block of memory allocated on
1507 the stack into which is stored the arg pointer, structure
1508 value address, static chain, and all the registers that might
1509 possibly be used in performing a function call. The code is
1510 moved to the start of the function so the incoming values are
1514 expand_builtin_apply_args (void)
1516 /* Don't do __builtin_apply_args more than once in a function.
1517 Save the result of the first call and reuse it. */
1518 if (apply_args_value
!= 0)
1519 return apply_args_value
;
1521 /* When this function is called, it means that registers must be
1522 saved on entry to this function. So we migrate the
1523 call to the first insn of this function. */
1528 temp
= expand_builtin_apply_args_1 ();
1532 apply_args_value
= temp
;
1534 /* Put the insns after the NOTE that starts the function.
1535 If this is inside a start_sequence, make the outer-level insn
1536 chain current, so the code is placed at the start of the
1537 function. If internal_arg_pointer is a non-virtual pseudo,
1538 it needs to be placed after the function that initializes
1540 push_topmost_sequence ();
1541 if (REG_P (crtl
->args
.internal_arg_pointer
)
1542 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1543 emit_insn_before (seq
, parm_birth_insn
);
1545 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1546 pop_topmost_sequence ();
1551 /* Perform an untyped call and save the state required to perform an
1552 untyped return of whatever value was returned by the given function. */
1555 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1557 int size
, align
, regno
;
1558 enum machine_mode mode
;
1559 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1560 rtx old_stack_level
= 0;
1561 rtx call_fusage
= 0;
1562 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1564 arguments
= convert_memory_address (Pmode
, arguments
);
1566 /* Create a block where the return registers can be saved. */
1567 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1569 /* Fetch the arg pointer from the ARGUMENTS block. */
1570 incoming_args
= gen_reg_rtx (Pmode
);
1571 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1572 #ifndef STACK_GROWS_DOWNWARD
1573 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1574 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1577 /* Push a new argument block and copy the arguments. Do not allow
1578 the (potential) memcpy call below to interfere with our stack
1580 do_pending_stack_adjust ();
1583 /* Save the stack with nonlocal if available. */
1584 #ifdef HAVE_save_stack_nonlocal
1585 if (HAVE_save_stack_nonlocal
)
1586 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1589 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1591 /* Allocate a block of memory onto the stack and copy the memory
1592 arguments to the outgoing arguments address. We can pass TRUE
1593 as the 4th argument because we just saved the stack pointer
1594 and will restore it right after the call. */
1595 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1597 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1598 may have already set current_function_calls_alloca to true.
1599 current_function_calls_alloca won't be set if argsize is zero,
1600 so we have to guarantee need_drap is true here. */
1601 if (SUPPORTS_STACK_ALIGNMENT
)
1602 crtl
->need_drap
= true;
1604 dest
= virtual_outgoing_args_rtx
;
1605 #ifndef STACK_GROWS_DOWNWARD
1606 if (CONST_INT_P (argsize
))
1607 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1609 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1611 dest
= gen_rtx_MEM (BLKmode
, dest
);
1612 set_mem_align (dest
, PARM_BOUNDARY
);
1613 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1614 set_mem_align (src
, PARM_BOUNDARY
);
1615 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1617 /* Refer to the argument block. */
1619 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1620 set_mem_align (arguments
, PARM_BOUNDARY
);
1622 /* Walk past the arg-pointer and structure value address. */
1623 size
= GET_MODE_SIZE (Pmode
);
1625 size
+= GET_MODE_SIZE (Pmode
);
1627 /* Restore each of the registers previously saved. Make USE insns
1628 for each of these registers for use in making the call. */
1629 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1630 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1632 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1633 if (size
% align
!= 0)
1634 size
= CEIL (size
, align
) * align
;
1635 reg
= gen_rtx_REG (mode
, regno
);
1636 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1637 use_reg (&call_fusage
, reg
);
1638 size
+= GET_MODE_SIZE (mode
);
1641 /* Restore the structure value address unless this is passed as an
1642 "invisible" first argument. */
1643 size
= GET_MODE_SIZE (Pmode
);
1646 rtx value
= gen_reg_rtx (Pmode
);
1647 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1648 emit_move_insn (struct_value
, value
);
1649 if (REG_P (struct_value
))
1650 use_reg (&call_fusage
, struct_value
);
1651 size
+= GET_MODE_SIZE (Pmode
);
1654 /* All arguments and registers used for the call are set up by now! */
1655 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1657 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1658 and we don't want to load it into a register as an optimization,
1659 because prepare_call_address already did it if it should be done. */
1660 if (GET_CODE (function
) != SYMBOL_REF
)
1661 function
= memory_address (FUNCTION_MODE
, function
);
1663 /* Generate the actual call instruction and save the return value. */
1664 #ifdef HAVE_untyped_call
1665 if (HAVE_untyped_call
)
1666 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1667 result
, result_vector (1, result
)));
1670 #ifdef HAVE_call_value
1671 if (HAVE_call_value
)
1675 /* Locate the unique return register. It is not possible to
1676 express a call that sets more than one return register using
1677 call_value; use untyped_call for that. In fact, untyped_call
1678 only needs to save the return registers in the given block. */
1679 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1680 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1682 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1684 valreg
= gen_rtx_REG (mode
, regno
);
1687 emit_call_insn (GEN_CALL_VALUE (valreg
,
1688 gen_rtx_MEM (FUNCTION_MODE
, function
),
1689 const0_rtx
, NULL_RTX
, const0_rtx
));
1691 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1697 /* Find the CALL insn we just emitted, and attach the register usage
1699 call_insn
= last_call_insn ();
1700 add_function_usage_to (call_insn
, call_fusage
);
1702 /* Restore the stack. */
1703 #ifdef HAVE_save_stack_nonlocal
1704 if (HAVE_save_stack_nonlocal
)
1705 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1708 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1709 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1713 /* Return the address of the result block. */
1714 result
= copy_addr_to_reg (XEXP (result
, 0));
1715 return convert_memory_address (ptr_mode
, result
);
1718 /* Perform an untyped return. */
1721 expand_builtin_return (rtx result
)
1723 int size
, align
, regno
;
1724 enum machine_mode mode
;
1726 rtx call_fusage
= 0;
1728 result
= convert_memory_address (Pmode
, result
);
1730 apply_result_size ();
1731 result
= gen_rtx_MEM (BLKmode
, result
);
1733 #ifdef HAVE_untyped_return
1734 if (HAVE_untyped_return
)
1736 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1742 /* Restore the return value and note that each value is used. */
1744 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1745 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1747 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1748 if (size
% align
!= 0)
1749 size
= CEIL (size
, align
) * align
;
1750 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1751 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1753 push_to_sequence (call_fusage
);
1755 call_fusage
= get_insns ();
1757 size
+= GET_MODE_SIZE (mode
);
1760 /* Put the USE insns before the return. */
1761 emit_insn (call_fusage
);
1763 /* Return whatever values was restored by jumping directly to the end
1765 expand_naked_return ();
1768 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1770 static enum type_class
1771 type_to_class (tree type
)
1773 switch (TREE_CODE (type
))
1775 case VOID_TYPE
: return void_type_class
;
1776 case INTEGER_TYPE
: return integer_type_class
;
1777 case ENUMERAL_TYPE
: return enumeral_type_class
;
1778 case BOOLEAN_TYPE
: return boolean_type_class
;
1779 case POINTER_TYPE
: return pointer_type_class
;
1780 case REFERENCE_TYPE
: return reference_type_class
;
1781 case OFFSET_TYPE
: return offset_type_class
;
1782 case REAL_TYPE
: return real_type_class
;
1783 case COMPLEX_TYPE
: return complex_type_class
;
1784 case FUNCTION_TYPE
: return function_type_class
;
1785 case METHOD_TYPE
: return method_type_class
;
1786 case RECORD_TYPE
: return record_type_class
;
1788 case QUAL_UNION_TYPE
: return union_type_class
;
1789 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1790 ? string_type_class
: array_type_class
);
1791 case LANG_TYPE
: return lang_type_class
;
1792 default: return no_type_class
;
1796 /* Expand a call EXP to __builtin_classify_type. */
1799 expand_builtin_classify_type (tree exp
)
1801 if (call_expr_nargs (exp
))
1802 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1803 return GEN_INT (no_type_class
);
1806 /* This helper macro, meant to be used in mathfn_built_in below,
1807 determines which among a set of three builtin math functions is
1808 appropriate for a given type mode. The `F' and `L' cases are
1809 automatically generated from the `double' case. */
1810 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1811 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1812 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1813 fcodel = BUILT_IN_MATHFN##L ; break;
1814 /* Similar to above, but appends _R after any F/L suffix. */
1815 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1816 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1817 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1818 fcodel = BUILT_IN_MATHFN##L_R ; break;
1820 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1821 if available. If IMPLICIT is true use the implicit builtin declaration,
1822 otherwise use the explicit declaration. If we can't do the conversion,
1826 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1828 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1832 CASE_MATHFN (BUILT_IN_ACOS
)
1833 CASE_MATHFN (BUILT_IN_ACOSH
)
1834 CASE_MATHFN (BUILT_IN_ASIN
)
1835 CASE_MATHFN (BUILT_IN_ASINH
)
1836 CASE_MATHFN (BUILT_IN_ATAN
)
1837 CASE_MATHFN (BUILT_IN_ATAN2
)
1838 CASE_MATHFN (BUILT_IN_ATANH
)
1839 CASE_MATHFN (BUILT_IN_CBRT
)
1840 CASE_MATHFN (BUILT_IN_CEIL
)
1841 CASE_MATHFN (BUILT_IN_CEXPI
)
1842 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1843 CASE_MATHFN (BUILT_IN_COS
)
1844 CASE_MATHFN (BUILT_IN_COSH
)
1845 CASE_MATHFN (BUILT_IN_DREM
)
1846 CASE_MATHFN (BUILT_IN_ERF
)
1847 CASE_MATHFN (BUILT_IN_ERFC
)
1848 CASE_MATHFN (BUILT_IN_EXP
)
1849 CASE_MATHFN (BUILT_IN_EXP10
)
1850 CASE_MATHFN (BUILT_IN_EXP2
)
1851 CASE_MATHFN (BUILT_IN_EXPM1
)
1852 CASE_MATHFN (BUILT_IN_FABS
)
1853 CASE_MATHFN (BUILT_IN_FDIM
)
1854 CASE_MATHFN (BUILT_IN_FLOOR
)
1855 CASE_MATHFN (BUILT_IN_FMA
)
1856 CASE_MATHFN (BUILT_IN_FMAX
)
1857 CASE_MATHFN (BUILT_IN_FMIN
)
1858 CASE_MATHFN (BUILT_IN_FMOD
)
1859 CASE_MATHFN (BUILT_IN_FREXP
)
1860 CASE_MATHFN (BUILT_IN_GAMMA
)
1861 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1862 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1863 CASE_MATHFN (BUILT_IN_HYPOT
)
1864 CASE_MATHFN (BUILT_IN_ILOGB
)
1865 CASE_MATHFN (BUILT_IN_ICEIL
)
1866 CASE_MATHFN (BUILT_IN_IFLOOR
)
1867 CASE_MATHFN (BUILT_IN_INF
)
1868 CASE_MATHFN (BUILT_IN_IRINT
)
1869 CASE_MATHFN (BUILT_IN_IROUND
)
1870 CASE_MATHFN (BUILT_IN_ISINF
)
1871 CASE_MATHFN (BUILT_IN_J0
)
1872 CASE_MATHFN (BUILT_IN_J1
)
1873 CASE_MATHFN (BUILT_IN_JN
)
1874 CASE_MATHFN (BUILT_IN_LCEIL
)
1875 CASE_MATHFN (BUILT_IN_LDEXP
)
1876 CASE_MATHFN (BUILT_IN_LFLOOR
)
1877 CASE_MATHFN (BUILT_IN_LGAMMA
)
1878 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1879 CASE_MATHFN (BUILT_IN_LLCEIL
)
1880 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1881 CASE_MATHFN (BUILT_IN_LLRINT
)
1882 CASE_MATHFN (BUILT_IN_LLROUND
)
1883 CASE_MATHFN (BUILT_IN_LOG
)
1884 CASE_MATHFN (BUILT_IN_LOG10
)
1885 CASE_MATHFN (BUILT_IN_LOG1P
)
1886 CASE_MATHFN (BUILT_IN_LOG2
)
1887 CASE_MATHFN (BUILT_IN_LOGB
)
1888 CASE_MATHFN (BUILT_IN_LRINT
)
1889 CASE_MATHFN (BUILT_IN_LROUND
)
1890 CASE_MATHFN (BUILT_IN_MODF
)
1891 CASE_MATHFN (BUILT_IN_NAN
)
1892 CASE_MATHFN (BUILT_IN_NANS
)
1893 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1894 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1895 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1896 CASE_MATHFN (BUILT_IN_POW
)
1897 CASE_MATHFN (BUILT_IN_POWI
)
1898 CASE_MATHFN (BUILT_IN_POW10
)
1899 CASE_MATHFN (BUILT_IN_REMAINDER
)
1900 CASE_MATHFN (BUILT_IN_REMQUO
)
1901 CASE_MATHFN (BUILT_IN_RINT
)
1902 CASE_MATHFN (BUILT_IN_ROUND
)
1903 CASE_MATHFN (BUILT_IN_SCALB
)
1904 CASE_MATHFN (BUILT_IN_SCALBLN
)
1905 CASE_MATHFN (BUILT_IN_SCALBN
)
1906 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1907 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1908 CASE_MATHFN (BUILT_IN_SIN
)
1909 CASE_MATHFN (BUILT_IN_SINCOS
)
1910 CASE_MATHFN (BUILT_IN_SINH
)
1911 CASE_MATHFN (BUILT_IN_SQRT
)
1912 CASE_MATHFN (BUILT_IN_TAN
)
1913 CASE_MATHFN (BUILT_IN_TANH
)
1914 CASE_MATHFN (BUILT_IN_TGAMMA
)
1915 CASE_MATHFN (BUILT_IN_TRUNC
)
1916 CASE_MATHFN (BUILT_IN_Y0
)
1917 CASE_MATHFN (BUILT_IN_Y1
)
1918 CASE_MATHFN (BUILT_IN_YN
)
1924 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1926 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1928 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1933 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1936 return builtin_decl_explicit (fcode2
);
1939 /* Like mathfn_built_in_1(), but always use the implicit array. */
1942 mathfn_built_in (tree type
, enum built_in_function fn
)
1944 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1947 /* If errno must be maintained, expand the RTL to check if the result,
1948 TARGET, of a built-in function call, EXP, is NaN, and if so set
1952 expand_errno_check (tree exp
, rtx target
)
1954 rtx lab
= gen_label_rtx ();
1956 /* Test the result; if it is NaN, set errno=EDOM because
1957 the argument was not in the domain. */
1958 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1959 NULL_RTX
, NULL_RTX
, lab
,
1960 /* The jump is very likely. */
1961 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1964 /* If this built-in doesn't throw an exception, set errno directly. */
1965 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1967 #ifdef GEN_ERRNO_RTX
1968 rtx errno_rtx
= GEN_ERRNO_RTX
;
1971 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1973 emit_move_insn (errno_rtx
,
1974 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1980 /* Make sure the library call isn't expanded as a tail call. */
1981 CALL_EXPR_TAILCALL (exp
) = 0;
1983 /* We can't set errno=EDOM directly; let the library call do it.
1984 Pop the arguments right away in case the call gets deleted. */
1986 expand_call (exp
, target
, 0);
1991 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1992 Return NULL_RTX if a normal call should be emitted rather than expanding
1993 the function in-line. EXP is the expression that is a call to the builtin
1994 function; if convenient, the result should be placed in TARGET.
1995 SUBTARGET may be used as the target for computing one of EXP's operands. */
1998 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2000 optab builtin_optab
;
2002 tree fndecl
= get_callee_fndecl (exp
);
2003 enum machine_mode mode
;
2004 bool errno_set
= false;
2005 bool try_widening
= false;
2008 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2011 arg
= CALL_EXPR_ARG (exp
, 0);
2013 switch (DECL_FUNCTION_CODE (fndecl
))
2015 CASE_FLT_FN (BUILT_IN_SQRT
):
2016 errno_set
= ! tree_expr_nonnegative_p (arg
);
2017 try_widening
= true;
2018 builtin_optab
= sqrt_optab
;
2020 CASE_FLT_FN (BUILT_IN_EXP
):
2021 errno_set
= true; builtin_optab
= exp_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_EXP10
):
2023 CASE_FLT_FN (BUILT_IN_POW10
):
2024 errno_set
= true; builtin_optab
= exp10_optab
; break;
2025 CASE_FLT_FN (BUILT_IN_EXP2
):
2026 errno_set
= true; builtin_optab
= exp2_optab
; break;
2027 CASE_FLT_FN (BUILT_IN_EXPM1
):
2028 errno_set
= true; builtin_optab
= expm1_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_LOGB
):
2030 errno_set
= true; builtin_optab
= logb_optab
; break;
2031 CASE_FLT_FN (BUILT_IN_LOG
):
2032 errno_set
= true; builtin_optab
= log_optab
; break;
2033 CASE_FLT_FN (BUILT_IN_LOG10
):
2034 errno_set
= true; builtin_optab
= log10_optab
; break;
2035 CASE_FLT_FN (BUILT_IN_LOG2
):
2036 errno_set
= true; builtin_optab
= log2_optab
; break;
2037 CASE_FLT_FN (BUILT_IN_LOG1P
):
2038 errno_set
= true; builtin_optab
= log1p_optab
; break;
2039 CASE_FLT_FN (BUILT_IN_ASIN
):
2040 builtin_optab
= asin_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_ACOS
):
2042 builtin_optab
= acos_optab
; break;
2043 CASE_FLT_FN (BUILT_IN_TAN
):
2044 builtin_optab
= tan_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_ATAN
):
2046 builtin_optab
= atan_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_FLOOR
):
2048 builtin_optab
= floor_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_CEIL
):
2050 builtin_optab
= ceil_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_TRUNC
):
2052 builtin_optab
= btrunc_optab
; break;
2053 CASE_FLT_FN (BUILT_IN_ROUND
):
2054 builtin_optab
= round_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2056 builtin_optab
= nearbyint_optab
;
2057 if (flag_trapping_math
)
2059 /* Else fallthrough and expand as rint. */
2060 CASE_FLT_FN (BUILT_IN_RINT
):
2061 builtin_optab
= rint_optab
; break;
2062 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2063 builtin_optab
= significand_optab
; break;
2068 /* Make a suitable register to place result in. */
2069 mode
= TYPE_MODE (TREE_TYPE (exp
));
2071 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2074 /* Before working hard, check whether the instruction is available, but try
2075 to widen the mode for specific operations. */
2076 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2077 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2078 && (!errno_set
|| !optimize_insn_for_size_p ()))
2080 rtx result
= gen_reg_rtx (mode
);
2082 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2083 need to expand the argument again. This way, we will not perform
2084 side-effects more the once. */
2085 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2087 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2091 /* Compute into RESULT.
2092 Set RESULT to wherever the result comes back. */
2093 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2098 expand_errno_check (exp
, result
);
2100 /* Output the entire sequence. */
2101 insns
= get_insns ();
2107 /* If we were unable to expand via the builtin, stop the sequence
2108 (without outputting the insns) and call to the library function
2109 with the stabilized argument list. */
2113 return expand_call (exp
, target
, target
== const0_rtx
);
2116 /* Expand a call to the builtin binary math functions (pow and atan2).
2117 Return NULL_RTX if a normal call should be emitted rather than expanding the
2118 function in-line. EXP is the expression that is a call to the builtin
2119 function; if convenient, the result should be placed in TARGET.
2120 SUBTARGET may be used as the target for computing one of EXP's
2124 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2126 optab builtin_optab
;
2127 rtx op0
, op1
, insns
, result
;
2128 int op1_type
= REAL_TYPE
;
2129 tree fndecl
= get_callee_fndecl (exp
);
2131 enum machine_mode mode
;
2132 bool errno_set
= true;
2134 switch (DECL_FUNCTION_CODE (fndecl
))
2136 CASE_FLT_FN (BUILT_IN_SCALBN
):
2137 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2138 CASE_FLT_FN (BUILT_IN_LDEXP
):
2139 op1_type
= INTEGER_TYPE
;
2144 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2147 arg0
= CALL_EXPR_ARG (exp
, 0);
2148 arg1
= CALL_EXPR_ARG (exp
, 1);
2150 switch (DECL_FUNCTION_CODE (fndecl
))
2152 CASE_FLT_FN (BUILT_IN_POW
):
2153 builtin_optab
= pow_optab
; break;
2154 CASE_FLT_FN (BUILT_IN_ATAN2
):
2155 builtin_optab
= atan2_optab
; break;
2156 CASE_FLT_FN (BUILT_IN_SCALB
):
2157 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2159 builtin_optab
= scalb_optab
; break;
2160 CASE_FLT_FN (BUILT_IN_SCALBN
):
2161 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2162 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2164 /* Fall through... */
2165 CASE_FLT_FN (BUILT_IN_LDEXP
):
2166 builtin_optab
= ldexp_optab
; break;
2167 CASE_FLT_FN (BUILT_IN_FMOD
):
2168 builtin_optab
= fmod_optab
; break;
2169 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2170 CASE_FLT_FN (BUILT_IN_DREM
):
2171 builtin_optab
= remainder_optab
; break;
2176 /* Make a suitable register to place result in. */
2177 mode
= TYPE_MODE (TREE_TYPE (exp
));
2179 /* Before working hard, check whether the instruction is available. */
2180 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2183 result
= gen_reg_rtx (mode
);
2185 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2188 if (errno_set
&& optimize_insn_for_size_p ())
2191 /* Always stabilize the argument list. */
2192 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2193 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2195 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2196 op1
= expand_normal (arg1
);
2200 /* Compute into RESULT.
2201 Set RESULT to wherever the result comes back. */
2202 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2203 result
, 0, OPTAB_DIRECT
);
2205 /* If we were unable to expand via the builtin, stop the sequence
2206 (without outputting the insns) and call to the library function
2207 with the stabilized argument list. */
2211 return expand_call (exp
, target
, target
== const0_rtx
);
2215 expand_errno_check (exp
, result
);
2217 /* Output the entire sequence. */
2218 insns
= get_insns ();
2225 /* Expand a call to the builtin trinary math functions (fma).
2226 Return NULL_RTX if a normal call should be emitted rather than expanding the
2227 function in-line. EXP is the expression that is a call to the builtin
2228 function; if convenient, the result should be placed in TARGET.
2229 SUBTARGET may be used as the target for computing one of EXP's
2233 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2235 optab builtin_optab
;
2236 rtx op0
, op1
, op2
, insns
, result
;
2237 tree fndecl
= get_callee_fndecl (exp
);
2238 tree arg0
, arg1
, arg2
;
2239 enum machine_mode mode
;
2241 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2244 arg0
= CALL_EXPR_ARG (exp
, 0);
2245 arg1
= CALL_EXPR_ARG (exp
, 1);
2246 arg2
= CALL_EXPR_ARG (exp
, 2);
2248 switch (DECL_FUNCTION_CODE (fndecl
))
2250 CASE_FLT_FN (BUILT_IN_FMA
):
2251 builtin_optab
= fma_optab
; break;
2256 /* Make a suitable register to place result in. */
2257 mode
= TYPE_MODE (TREE_TYPE (exp
));
2259 /* Before working hard, check whether the instruction is available. */
2260 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2263 result
= gen_reg_rtx (mode
);
2265 /* Always stabilize the argument list. */
2266 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2267 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2268 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2270 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2271 op1
= expand_normal (arg1
);
2272 op2
= expand_normal (arg2
);
2276 /* Compute into RESULT.
2277 Set RESULT to wherever the result comes back. */
2278 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2281 /* If we were unable to expand via the builtin, stop the sequence
2282 (without outputting the insns) and call to the library function
2283 with the stabilized argument list. */
2287 return expand_call (exp
, target
, target
== const0_rtx
);
2290 /* Output the entire sequence. */
2291 insns
= get_insns ();
2298 /* Expand a call to the builtin sin and cos math functions.
2299 Return NULL_RTX if a normal call should be emitted rather than expanding the
2300 function in-line. EXP is the expression that is a call to the builtin
2301 function; if convenient, the result should be placed in TARGET.
2302 SUBTARGET may be used as the target for computing one of EXP's
2306 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2308 optab builtin_optab
;
2310 tree fndecl
= get_callee_fndecl (exp
);
2311 enum machine_mode mode
;
2314 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2317 arg
= CALL_EXPR_ARG (exp
, 0);
2319 switch (DECL_FUNCTION_CODE (fndecl
))
2321 CASE_FLT_FN (BUILT_IN_SIN
):
2322 CASE_FLT_FN (BUILT_IN_COS
):
2323 builtin_optab
= sincos_optab
; break;
2328 /* Make a suitable register to place result in. */
2329 mode
= TYPE_MODE (TREE_TYPE (exp
));
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2334 switch (DECL_FUNCTION_CODE (fndecl
))
2336 CASE_FLT_FN (BUILT_IN_SIN
):
2337 builtin_optab
= sin_optab
; break;
2338 CASE_FLT_FN (BUILT_IN_COS
):
2339 builtin_optab
= cos_optab
; break;
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2347 rtx result
= gen_reg_rtx (mode
);
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2354 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab
== sincos_optab
)
2364 switch (DECL_FUNCTION_CODE (fndecl
))
2366 CASE_FLT_FN (BUILT_IN_SIN
):
2367 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2369 CASE_FLT_FN (BUILT_IN_COS
):
2370 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2378 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2382 /* Output the entire sequence. */
2383 insns
= get_insns ();
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2395 return expand_call (exp
, target
, target
== const0_rtx
);
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg
, tree fndecl
)
2405 bool errno_set
= false;
2406 optab builtin_optab
= unknown_optab
;
2407 enum machine_mode mode
;
2409 switch (DECL_FUNCTION_CODE (fndecl
))
2411 CASE_FLT_FN (BUILT_IN_ILOGB
):
2412 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF
):
2414 builtin_optab
= isinf_optab
; break;
2415 case BUILT_IN_ISNORMAL
:
2416 case BUILT_IN_ISFINITE
:
2417 CASE_FLT_FN (BUILT_IN_FINITE
):
2418 case BUILT_IN_FINITED32
:
2419 case BUILT_IN_FINITED64
:
2420 case BUILT_IN_FINITED128
:
2421 case BUILT_IN_ISINFD32
:
2422 case BUILT_IN_ISINFD64
:
2423 case BUILT_IN_ISINFD128
:
2424 /* These builtins have no optabs (yet). */
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math
&& errno_set
)
2432 return CODE_FOR_nothing
;
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode
= TYPE_MODE (TREE_TYPE (arg
));
2438 return optab_handler (builtin_optab
, mode
);
2439 return CODE_FOR_nothing
;
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2450 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2452 enum insn_code icode
= CODE_FOR_nothing
;
2454 tree fndecl
= get_callee_fndecl (exp
);
2455 enum machine_mode mode
;
2458 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2461 arg
= CALL_EXPR_ARG (exp
, 0);
2462 icode
= interclass_mathfn_icode (arg
, fndecl
);
2463 mode
= TYPE_MODE (TREE_TYPE (arg
));
2465 if (icode
!= CODE_FOR_nothing
)
2467 struct expand_operand ops
[1];
2468 rtx last
= get_last_insn ();
2469 tree orig_arg
= arg
;
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2476 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2478 if (mode
!= GET_MODE (op0
))
2479 op0
= convert_to_mode (mode
, op0
, 0);
2481 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2482 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2483 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2484 return ops
[0].value
;
2486 delete_insns_since (last
);
2487 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2499 expand_builtin_sincos (tree exp
)
2501 rtx op0
, op1
, op2
, target1
, target2
;
2502 enum machine_mode mode
;
2503 tree arg
, sinp
, cosp
;
2505 location_t loc
= EXPR_LOCATION (exp
);
2506 tree alias_type
, alias_off
;
2508 if (!validate_arglist (exp
, REAL_TYPE
,
2509 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2512 arg
= CALL_EXPR_ARG (exp
, 0);
2513 sinp
= CALL_EXPR_ARG (exp
, 1);
2514 cosp
= CALL_EXPR_ARG (exp
, 2);
2516 /* Make a suitable register to place result in. */
2517 mode
= TYPE_MODE (TREE_TYPE (arg
));
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2523 target1
= gen_reg_rtx (mode
);
2524 target2
= gen_reg_rtx (mode
);
2526 op0
= expand_normal (arg
);
2527 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2528 alias_off
= build_int_cst (alias_type
, 0);
2529 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2531 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2537 gcc_assert (result
);
2539 /* Move target1 and target2 to the memory locations indicated
2541 emit_move_insn (op1
, target1
);
2542 emit_move_insn (op2
, target2
);
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2552 expand_builtin_cexpi (tree exp
, rtx target
)
2554 tree fndecl
= get_callee_fndecl (exp
);
2556 enum machine_mode mode
;
2558 location_t loc
= EXPR_LOCATION (exp
);
2560 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2563 arg
= CALL_EXPR_ARG (exp
, 0);
2564 type
= TREE_TYPE (arg
);
2565 mode
= TYPE_MODE (TREE_TYPE (arg
));
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2572 op1
= gen_reg_rtx (mode
);
2573 op2
= gen_reg_rtx (mode
);
2575 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2580 else if (targetm
.libc_has_function (function_sincos
))
2582 tree call
, fn
= NULL_TREE
;
2586 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2587 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2588 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2589 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2590 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2591 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2595 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2596 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2597 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2598 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2599 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2600 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2602 /* Make sure not to fold the sincos call again. */
2603 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2605 call
, 3, arg
, top1
, top2
));
2609 tree call
, fn
= NULL_TREE
, narg
;
2610 tree ctype
= build_complex_type (type
);
2612 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2613 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2614 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2615 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2616 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2617 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn
== NULL_TREE
)
2627 const char *name
= NULL
;
2629 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2631 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2633 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2636 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2637 fn
= build_fn_decl (name
, fntype
);
2640 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2641 build_real (type
, dconst0
), arg
);
2643 /* Make sure not to fold the cexp call again. */
2644 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2645 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2646 target
, VOIDmode
, EXPAND_NORMAL
);
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2651 make_tree (TREE_TYPE (arg
), op2
),
2652 make_tree (TREE_TYPE (arg
), op1
)),
2653 target
, VOIDmode
, EXPAND_NORMAL
);
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2662 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2665 tree fntype
= TREE_TYPE (fndecl
);
2666 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2669 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2671 SET_EXPR_LOCATION (fn
, loc
);
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2683 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2685 convert_optab builtin_optab
;
2686 rtx op0
, insns
, tmp
;
2687 tree fndecl
= get_callee_fndecl (exp
);
2688 enum built_in_function fallback_fn
;
2689 tree fallback_fndecl
;
2690 enum machine_mode mode
;
2693 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2696 arg
= CALL_EXPR_ARG (exp
, 0);
2698 switch (DECL_FUNCTION_CODE (fndecl
))
2700 CASE_FLT_FN (BUILT_IN_ICEIL
):
2701 CASE_FLT_FN (BUILT_IN_LCEIL
):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2703 builtin_optab
= lceil_optab
;
2704 fallback_fn
= BUILT_IN_CEIL
;
2707 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2708 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2710 builtin_optab
= lfloor_optab
;
2711 fallback_fn
= BUILT_IN_FLOOR
;
2718 /* Make a suitable register to place result in. */
2719 mode
= TYPE_MODE (TREE_TYPE (exp
));
2721 target
= gen_reg_rtx (mode
);
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2728 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2735 /* Output the entire sequence. */
2736 insns
= get_insns ();
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl
== NULL_TREE
)
2756 const char *name
= NULL
;
2758 switch (DECL_FUNCTION_CODE (fndecl
))
2760 case BUILT_IN_ICEIL
:
2761 case BUILT_IN_LCEIL
:
2762 case BUILT_IN_LLCEIL
:
2765 case BUILT_IN_ICEILF
:
2766 case BUILT_IN_LCEILF
:
2767 case BUILT_IN_LLCEILF
:
2770 case BUILT_IN_ICEILL
:
2771 case BUILT_IN_LCEILL
:
2772 case BUILT_IN_LLCEILL
:
2775 case BUILT_IN_IFLOOR
:
2776 case BUILT_IN_LFLOOR
:
2777 case BUILT_IN_LLFLOOR
:
2780 case BUILT_IN_IFLOORF
:
2781 case BUILT_IN_LFLOORF
:
2782 case BUILT_IN_LLFLOORF
:
2785 case BUILT_IN_IFLOORL
:
2786 case BUILT_IN_LFLOORL
:
2787 case BUILT_IN_LLFLOORL
:
2794 fntype
= build_function_type_list (TREE_TYPE (arg
),
2795 TREE_TYPE (arg
), NULL_TREE
);
2796 fallback_fndecl
= build_fn_decl (name
, fntype
);
2799 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2801 tmp
= expand_normal (exp
);
2802 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target
= gen_reg_rtx (mode
);
2807 expand_fix (target
, tmp
, 0);
2812 /* Expand a call to one of the builtin math functions doing integer
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
2816 function; if convenient, the result should be placed in TARGET. */
2819 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2821 convert_optab builtin_optab
;
2823 tree fndecl
= get_callee_fndecl (exp
);
2825 enum machine_mode mode
;
2826 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2828 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2831 arg
= CALL_EXPR_ARG (exp
, 0);
2833 switch (DECL_FUNCTION_CODE (fndecl
))
2835 CASE_FLT_FN (BUILT_IN_IRINT
):
2836 fallback_fn
= BUILT_IN_LRINT
;
2838 CASE_FLT_FN (BUILT_IN_LRINT
):
2839 CASE_FLT_FN (BUILT_IN_LLRINT
):
2840 builtin_optab
= lrint_optab
;
2843 CASE_FLT_FN (BUILT_IN_IROUND
):
2844 fallback_fn
= BUILT_IN_LROUND
;
2846 CASE_FLT_FN (BUILT_IN_LROUND
):
2847 CASE_FLT_FN (BUILT_IN_LLROUND
):
2848 builtin_optab
= lround_optab
;
2855 /* There's no easy way to detect the case we need to set EDOM. */
2856 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2859 /* Make a suitable register to place result in. */
2860 mode
= TYPE_MODE (TREE_TYPE (exp
));
2862 /* There's no easy way to detect the case we need to set EDOM. */
2863 if (!flag_errno_math
)
2865 rtx result
= gen_reg_rtx (mode
);
2867 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2868 need to expand the argument again. This way, we will not perform
2869 side-effects more the once. */
2870 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2872 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2876 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2878 /* Output the entire sequence. */
2879 insns
= get_insns ();
2885 /* If we were unable to expand via the builtin, stop the sequence
2886 (without outputting the insns) and call to the library function
2887 with the stabilized argument list. */
2891 if (fallback_fn
!= BUILT_IN_NONE
)
2893 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2894 targets, (int) round (x) should never be transformed into
2895 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2896 a call to lround in the hope that the target provides at least some
2897 C99 functions. This should result in the best user experience for
2898 not full C99 targets. */
2899 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2902 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2903 fallback_fndecl
, 1, arg
);
2905 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2906 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2907 return convert_to_mode (mode
, target
, 0);
2910 return expand_call (exp
, target
, target
== const0_rtx
);
2913 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2914 a normal call should be emitted rather than expanding the function
2915 in-line. EXP is the expression that is a call to the builtin
2916 function; if convenient, the result should be placed in TARGET. */
2919 expand_builtin_powi (tree exp
, rtx target
)
2923 enum machine_mode mode
;
2924 enum machine_mode mode2
;
2926 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2929 arg0
= CALL_EXPR_ARG (exp
, 0);
2930 arg1
= CALL_EXPR_ARG (exp
, 1);
2931 mode
= TYPE_MODE (TREE_TYPE (exp
));
2933 /* Emit a libcall to libgcc. */
2935 /* Mode of the 2nd argument must match that of an int. */
2936 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2938 if (target
== NULL_RTX
)
2939 target
= gen_reg_rtx (mode
);
2941 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2942 if (GET_MODE (op0
) != mode
)
2943 op0
= convert_to_mode (mode
, op0
, 0);
2944 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2945 if (GET_MODE (op1
) != mode2
)
2946 op1
= convert_to_mode (mode2
, op1
, 0);
2948 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2949 target
, LCT_CONST
, mode
, 2,
2950 op0
, mode
, op1
, mode2
);
2955 /* Expand expression EXP which is a call to the strlen builtin. Return
2956 NULL_RTX if we failed the caller should emit a normal call, otherwise
2957 try to get the result in TARGET, if convenient. */
2960 expand_builtin_strlen (tree exp
, rtx target
,
2961 enum machine_mode target_mode
)
2963 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2967 struct expand_operand ops
[4];
2970 tree src
= CALL_EXPR_ARG (exp
, 0);
2971 rtx src_reg
, before_strlen
;
2972 enum machine_mode insn_mode
= target_mode
;
2973 enum insn_code icode
= CODE_FOR_nothing
;
2976 /* If the length can be computed at compile-time, return it. */
2977 len
= c_strlen (src
, 0);
2979 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2981 /* If the length can be computed at compile-time and is constant
2982 integer, but there are side-effects in src, evaluate
2983 src for side-effects, then return len.
2984 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2985 can be optimized into: i++; x = 3; */
2986 len
= c_strlen (src
, 1);
2987 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2989 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2990 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2993 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2995 /* If SRC is not a pointer type, don't do this operation inline. */
2999 /* Bail out if we can't compute strlen in the right mode. */
3000 while (insn_mode
!= VOIDmode
)
3002 icode
= optab_handler (strlen_optab
, insn_mode
);
3003 if (icode
!= CODE_FOR_nothing
)
3006 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3008 if (insn_mode
== VOIDmode
)
3011 /* Make a place to hold the source address. We will not expand
3012 the actual source until we are sure that the expansion will
3013 not fail -- there are trees that cannot be expanded twice. */
3014 src_reg
= gen_reg_rtx (Pmode
);
3016 /* Mark the beginning of the strlen sequence so we can emit the
3017 source operand later. */
3018 before_strlen
= get_last_insn ();
3020 create_output_operand (&ops
[0], target
, insn_mode
);
3021 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3022 create_integer_operand (&ops
[2], 0);
3023 create_integer_operand (&ops
[3], align
);
3024 if (!maybe_expand_insn (icode
, 4, ops
))
3027 /* Now that we are assured of success, expand the source. */
3029 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3032 #ifdef POINTERS_EXTEND_UNSIGNED
3033 if (GET_MODE (pat
) != Pmode
)
3034 pat
= convert_to_mode (Pmode
, pat
,
3035 POINTERS_EXTEND_UNSIGNED
);
3037 emit_move_insn (src_reg
, pat
);
3043 emit_insn_after (pat
, before_strlen
);
3045 emit_insn_before (pat
, get_insns ());
3047 /* Return the value in the proper mode for this function. */
3048 if (GET_MODE (ops
[0].value
) == target_mode
)
3049 target
= ops
[0].value
;
3050 else if (target
!= 0)
3051 convert_move (target
, ops
[0].value
, 0);
3053 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3059 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3060 bytes from constant string DATA + OFFSET and return it as target
3064 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3065 enum machine_mode mode
)
3067 const char *str
= (const char *) data
;
3069 gcc_assert (offset
>= 0
3070 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3071 <= strlen (str
) + 1));
3073 return c_readstr (str
+ offset
, mode
);
3076 /* Expand a call EXP to the memcpy builtin.
3077 Return NULL_RTX if we failed, the caller should emit a normal call,
3078 otherwise try to get the result in TARGET, if convenient (and in
3079 mode MODE if that's convenient). */
3082 expand_builtin_memcpy (tree exp
, rtx target
)
3084 if (!validate_arglist (exp
,
3085 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3089 tree dest
= CALL_EXPR_ARG (exp
, 0);
3090 tree src
= CALL_EXPR_ARG (exp
, 1);
3091 tree len
= CALL_EXPR_ARG (exp
, 2);
3092 const char *src_str
;
3093 unsigned int src_align
= get_pointer_alignment (src
);
3094 unsigned int dest_align
= get_pointer_alignment (dest
);
3095 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3096 HOST_WIDE_INT expected_size
= -1;
3097 unsigned int expected_align
= 0;
3099 /* If DEST is not a pointer type, call the normal function. */
3100 if (dest_align
== 0)
3103 /* If either SRC is not a pointer type, don't do this
3104 operation in-line. */
3108 if (currently_expanding_gimple_stmt
)
3109 stringop_block_profile (currently_expanding_gimple_stmt
,
3110 &expected_align
, &expected_size
);
3112 if (expected_align
< dest_align
)
3113 expected_align
= dest_align
;
3114 dest_mem
= get_memory_rtx (dest
, len
);
3115 set_mem_align (dest_mem
, dest_align
);
3116 len_rtx
= expand_normal (len
);
3117 src_str
= c_getstr (src
);
3119 /* If SRC is a string constant and block move would be done
3120 by pieces, we can avoid loading the string from memory
3121 and only stored the computed constants. */
3123 && CONST_INT_P (len_rtx
)
3124 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3125 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3126 CONST_CAST (char *, src_str
),
3129 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3130 builtin_memcpy_read_str
,
3131 CONST_CAST (char *, src_str
),
3132 dest_align
, false, 0);
3133 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3134 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3138 src_mem
= get_memory_rtx (src
, len
);
3139 set_mem_align (src_mem
, src_align
);
3141 /* Copy word part most expediently. */
3142 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3143 CALL_EXPR_TAILCALL (exp
)
3144 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3145 expected_align
, expected_size
);
3149 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3150 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3156 /* Expand a call EXP to the mempcpy builtin.
3157 Return NULL_RTX if we failed; the caller should emit a normal call,
3158 otherwise try to get the result in TARGET, if convenient (and in
3159 mode MODE if that's convenient). If ENDP is 0 return the
3160 destination pointer, if ENDP is 1 return the end pointer ala
3161 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3165 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3167 if (!validate_arglist (exp
,
3168 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3172 tree dest
= CALL_EXPR_ARG (exp
, 0);
3173 tree src
= CALL_EXPR_ARG (exp
, 1);
3174 tree len
= CALL_EXPR_ARG (exp
, 2);
3175 return expand_builtin_mempcpy_args (dest
, src
, len
,
3176 target
, mode
, /*endp=*/ 1);
3180 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3181 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3182 so that this can also be called without constructing an actual CALL_EXPR.
3183 The other arguments and return value are the same as for
3184 expand_builtin_mempcpy. */
3187 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3188 rtx target
, enum machine_mode mode
, int endp
)
3190 /* If return value is ignored, transform mempcpy into memcpy. */
3191 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3193 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3194 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3196 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3200 const char *src_str
;
3201 unsigned int src_align
= get_pointer_alignment (src
);
3202 unsigned int dest_align
= get_pointer_alignment (dest
);
3203 rtx dest_mem
, src_mem
, len_rtx
;
3205 /* If either SRC or DEST is not a pointer type, don't do this
3206 operation in-line. */
3207 if (dest_align
== 0 || src_align
== 0)
3210 /* If LEN is not constant, call the normal function. */
3211 if (! tree_fits_uhwi_p (len
))
3214 len_rtx
= expand_normal (len
);
3215 src_str
= c_getstr (src
);
3217 /* If SRC is a string constant and block move would be done
3218 by pieces, we can avoid loading the string from memory
3219 and only stored the computed constants. */
3221 && CONST_INT_P (len_rtx
)
3222 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3223 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3224 CONST_CAST (char *, src_str
),
3227 dest_mem
= get_memory_rtx (dest
, len
);
3228 set_mem_align (dest_mem
, dest_align
);
3229 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3230 builtin_memcpy_read_str
,
3231 CONST_CAST (char *, src_str
),
3232 dest_align
, false, endp
);
3233 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3234 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3238 if (CONST_INT_P (len_rtx
)
3239 && can_move_by_pieces (INTVAL (len_rtx
),
3240 MIN (dest_align
, src_align
)))
3242 dest_mem
= get_memory_rtx (dest
, len
);
3243 set_mem_align (dest_mem
, dest_align
);
3244 src_mem
= get_memory_rtx (src
, len
);
3245 set_mem_align (src_mem
, src_align
);
3246 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3247 MIN (dest_align
, src_align
), endp
);
3248 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3249 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3258 # define HAVE_movstr 0
3259 # define CODE_FOR_movstr CODE_FOR_nothing
3262 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3263 we failed, the caller should emit a normal call, otherwise try to
3264 get the result in TARGET, if convenient. If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3270 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3272 struct expand_operand ops
[3];
3279 dest_mem
= get_memory_rtx (dest
, NULL
);
3280 src_mem
= get_memory_rtx (src
, NULL
);
3283 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3284 dest_mem
= replace_equiv_address (dest_mem
, target
);
3287 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3288 create_fixed_operand (&ops
[1], dest_mem
);
3289 create_fixed_operand (&ops
[2], src_mem
);
3290 expand_insn (CODE_FOR_movstr
, 3, ops
);
3292 if (endp
&& target
!= const0_rtx
)
3294 target
= ops
[0].value
;
3295 /* movstr is supposed to set end to the address of the NUL
3296 terminator. If the caller requested a mempcpy-like return value,
3300 rtx tem
= plus_constant (GET_MODE (target
),
3301 gen_lowpart (GET_MODE (target
), target
), 1);
3302 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3308 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3309 NULL_RTX if we failed the caller should emit a normal call, otherwise
3310 try to get the result in TARGET, if convenient (and in mode MODE if that's
3314 expand_builtin_strcpy (tree exp
, rtx target
)
3316 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3318 tree dest
= CALL_EXPR_ARG (exp
, 0);
3319 tree src
= CALL_EXPR_ARG (exp
, 1);
3320 return expand_builtin_strcpy_args (dest
, src
, target
);
3325 /* Helper function to do the actual work for expand_builtin_strcpy. The
3326 arguments to the builtin_strcpy call DEST and SRC are broken out
3327 so that this can also be called without constructing an actual CALL_EXPR.
3328 The other arguments and return value are the same as for
3329 expand_builtin_strcpy. */
3332 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3334 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3337 /* Expand a call EXP to the stpcpy builtin.
3338 Return NULL_RTX if we failed the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient (and in
3340 mode MODE if that's convenient). */
3343 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3346 location_t loc
= EXPR_LOCATION (exp
);
3348 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3351 dst
= CALL_EXPR_ARG (exp
, 0);
3352 src
= CALL_EXPR_ARG (exp
, 1);
3354 /* If return value is ignored, transform stpcpy into strcpy. */
3355 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3357 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3358 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3359 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3366 /* Ensure we get an actual string whose length can be evaluated at
3367 compile-time, not an expression containing a string. This is
3368 because the latter will potentially produce pessimized code
3369 when used to produce the return value. */
3370 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3371 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3373 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3374 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3375 target
, mode
, /*endp=*/2);
3380 if (TREE_CODE (len
) == INTEGER_CST
)
3382 rtx len_rtx
= expand_normal (len
);
3384 if (CONST_INT_P (len_rtx
))
3386 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3392 if (mode
!= VOIDmode
)
3393 target
= gen_reg_rtx (mode
);
3395 target
= gen_reg_rtx (GET_MODE (ret
));
3397 if (GET_MODE (target
) != GET_MODE (ret
))
3398 ret
= gen_lowpart (GET_MODE (target
), ret
);
3400 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3401 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3409 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3413 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3414 bytes from constant string DATA + OFFSET and return it as target
3418 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3419 enum machine_mode mode
)
3421 const char *str
= (const char *) data
;
3423 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3426 return c_readstr (str
+ offset
, mode
);
3429 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3430 NULL_RTX if we failed the caller should emit a normal call. */
3433 expand_builtin_strncpy (tree exp
, rtx target
)
3435 location_t loc
= EXPR_LOCATION (exp
);
3437 if (validate_arglist (exp
,
3438 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3440 tree dest
= CALL_EXPR_ARG (exp
, 0);
3441 tree src
= CALL_EXPR_ARG (exp
, 1);
3442 tree len
= CALL_EXPR_ARG (exp
, 2);
3443 tree slen
= c_strlen (src
, 1);
3445 /* We must be passed a constant len and src parameter. */
3446 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3449 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3451 /* We're required to pad with trailing zeros if the requested
3452 len is greater than strlen(s2)+1. In that case try to
3453 use store_by_pieces, if it fails, punt. */
3454 if (tree_int_cst_lt (slen
, len
))
3456 unsigned int dest_align
= get_pointer_alignment (dest
);
3457 const char *p
= c_getstr (src
);
3460 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3461 || !can_store_by_pieces (tree_to_uhwi (len
),
3462 builtin_strncpy_read_str
,
3463 CONST_CAST (char *, p
),
3467 dest_mem
= get_memory_rtx (dest
, len
);
3468 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3469 builtin_strncpy_read_str
,
3470 CONST_CAST (char *, p
), dest_align
, false, 0);
3471 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3472 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3479 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3480 bytes from constant string DATA + OFFSET and return it as target
3484 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3485 enum machine_mode mode
)
3487 const char *c
= (const char *) data
;
3488 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3490 memset (p
, *c
, GET_MODE_SIZE (mode
));
3492 return c_readstr (p
, mode
);
3495 /* Callback routine for store_by_pieces. Return the RTL of a register
3496 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3497 char value given in the RTL register data. For example, if mode is
3498 4 bytes wide, return the RTL for 0x01010101*data. */
3501 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3502 enum machine_mode mode
)
3508 size
= GET_MODE_SIZE (mode
);
3512 p
= XALLOCAVEC (char, size
);
3513 memset (p
, 1, size
);
3514 coeff
= c_readstr (p
, mode
);
3516 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3517 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3518 return force_reg (mode
, target
);
3521 /* Expand expression EXP, which is a call to the memset builtin. Return
3522 NULL_RTX if we failed the caller should emit a normal call, otherwise
3523 try to get the result in TARGET, if convenient (and in mode MODE if that's
3527 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3529 if (!validate_arglist (exp
,
3530 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3534 tree dest
= CALL_EXPR_ARG (exp
, 0);
3535 tree val
= CALL_EXPR_ARG (exp
, 1);
3536 tree len
= CALL_EXPR_ARG (exp
, 2);
3537 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3541 /* Helper function to do the actual work for expand_builtin_memset. The
3542 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3543 so that this can also be called without constructing an actual CALL_EXPR.
3544 The other arguments and return value are the same as for
3545 expand_builtin_memset. */
3548 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3549 rtx target
, enum machine_mode mode
, tree orig_exp
)
3552 enum built_in_function fcode
;
3553 enum machine_mode val_mode
;
3555 unsigned int dest_align
;
3556 rtx dest_mem
, dest_addr
, len_rtx
;
3557 HOST_WIDE_INT expected_size
= -1;
3558 unsigned int expected_align
= 0;
3560 dest_align
= get_pointer_alignment (dest
);
3562 /* If DEST is not a pointer type, don't do this operation in-line. */
3563 if (dest_align
== 0)
3566 if (currently_expanding_gimple_stmt
)
3567 stringop_block_profile (currently_expanding_gimple_stmt
,
3568 &expected_align
, &expected_size
);
3570 if (expected_align
< dest_align
)
3571 expected_align
= dest_align
;
3573 /* If the LEN parameter is zero, return DEST. */
3574 if (integer_zerop (len
))
3576 /* Evaluate and ignore VAL in case it has side-effects. */
3577 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3578 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3581 /* Stabilize the arguments in case we fail. */
3582 dest
= builtin_save_expr (dest
);
3583 val
= builtin_save_expr (val
);
3584 len
= builtin_save_expr (len
);
3586 len_rtx
= expand_normal (len
);
3587 dest_mem
= get_memory_rtx (dest
, len
);
3588 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3590 if (TREE_CODE (val
) != INTEGER_CST
)
3594 val_rtx
= expand_normal (val
);
3595 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3597 /* Assume that we can memset by pieces if we can store
3598 * the coefficients by pieces (in the required modes).
3599 * We can't pass builtin_memset_gen_str as that emits RTL. */
3601 if (tree_fits_uhwi_p (len
)
3602 && can_store_by_pieces (tree_to_uhwi (len
),
3603 builtin_memset_read_str
, &c
, dest_align
,
3606 val_rtx
= force_reg (val_mode
, val_rtx
);
3607 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3608 builtin_memset_gen_str
, val_rtx
, dest_align
,
3611 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3612 dest_align
, expected_align
,
3616 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3617 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3621 if (target_char_cast (val
, &c
))
3626 if (tree_fits_uhwi_p (len
)
3627 && can_store_by_pieces (tree_to_uhwi (len
),
3628 builtin_memset_read_str
, &c
, dest_align
,
3630 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3631 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3632 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3633 gen_int_mode (c
, val_mode
),
3634 dest_align
, expected_align
,
3638 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3639 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3643 set_mem_align (dest_mem
, dest_align
);
3644 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3645 CALL_EXPR_TAILCALL (orig_exp
)
3646 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3647 expected_align
, expected_size
);
3651 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3652 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3658 fndecl
= get_callee_fndecl (orig_exp
);
3659 fcode
= DECL_FUNCTION_CODE (fndecl
);
3660 if (fcode
== BUILT_IN_MEMSET
)
3661 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3663 else if (fcode
== BUILT_IN_BZERO
)
3664 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3668 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3669 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3670 return expand_call (fn
, target
, target
== const0_rtx
);
3673 /* Expand expression EXP, which is a call to the bzero builtin. Return
3674 NULL_RTX if we failed the caller should emit a normal call. */
3677 expand_builtin_bzero (tree exp
)
3680 location_t loc
= EXPR_LOCATION (exp
);
3682 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3685 dest
= CALL_EXPR_ARG (exp
, 0);
3686 size
= CALL_EXPR_ARG (exp
, 1);
3688 /* New argument list transforming bzero(ptr x, int y) to
3689 memset(ptr x, int 0, size_t y). This is done this way
3690 so that if it isn't expanded inline, we fallback to
3691 calling bzero instead of memset. */
3693 return expand_builtin_memset_args (dest
, integer_zero_node
,
3694 fold_convert_loc (loc
,
3695 size_type_node
, size
),
3696 const0_rtx
, VOIDmode
, exp
);
3699 /* Expand expression EXP, which is a call to the memcmp built-in function.
3700 Return NULL_RTX if we failed and the caller should emit a normal call,
3701 otherwise try to get the result in TARGET, if convenient (and in mode
3702 MODE, if that's convenient). */
3705 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3706 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3708 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3710 if (!validate_arglist (exp
,
3711 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3714 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3715 implementing memcmp because it will stop if it encounters two
3717 #if defined HAVE_cmpmemsi
3719 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3722 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3723 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3724 tree len
= CALL_EXPR_ARG (exp
, 2);
3726 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3727 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3728 enum machine_mode insn_mode
;
3731 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3735 /* If we don't have POINTER_TYPE, call the function. */
3736 if (arg1_align
== 0 || arg2_align
== 0)
3739 /* Make a place to write the result of the instruction. */
3742 && REG_P (result
) && GET_MODE (result
) == insn_mode
3743 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3744 result
= gen_reg_rtx (insn_mode
);
3746 arg1_rtx
= get_memory_rtx (arg1
, len
);
3747 arg2_rtx
= get_memory_rtx (arg2
, len
);
3748 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3750 /* Set MEM_SIZE as appropriate. */
3751 if (CONST_INT_P (arg3_rtx
))
3753 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3754 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3758 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3759 GEN_INT (MIN (arg1_align
, arg2_align
)));
3766 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3767 TYPE_MODE (integer_type_node
), 3,
3768 XEXP (arg1_rtx
, 0), Pmode
,
3769 XEXP (arg2_rtx
, 0), Pmode
,
3770 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3771 TYPE_UNSIGNED (sizetype
)),
3772 TYPE_MODE (sizetype
));
3774 /* Return the value in the proper mode for this function. */
3775 mode
= TYPE_MODE (TREE_TYPE (exp
));
3776 if (GET_MODE (result
) == mode
)
3778 else if (target
!= 0)
3780 convert_move (target
, result
, 0);
3784 return convert_to_mode (mode
, result
, 0);
3786 #endif /* HAVE_cmpmemsi. */
3791 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3792 if we failed the caller should emit a normal call, otherwise try to get
3793 the result in TARGET, if convenient. */
3796 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3798 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3801 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3802 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3803 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3805 rtx arg1_rtx
, arg2_rtx
;
3806 rtx result
, insn
= NULL_RTX
;
3808 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3809 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3811 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3812 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3814 /* If we don't have POINTER_TYPE, call the function. */
3815 if (arg1_align
== 0 || arg2_align
== 0)
3818 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3819 arg1
= builtin_save_expr (arg1
);
3820 arg2
= builtin_save_expr (arg2
);
3822 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3823 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3825 #ifdef HAVE_cmpstrsi
3826 /* Try to call cmpstrsi. */
3829 enum machine_mode insn_mode
3830 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3832 /* Make a place to write the result of the instruction. */
3835 && REG_P (result
) && GET_MODE (result
) == insn_mode
3836 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3837 result
= gen_reg_rtx (insn_mode
);
3839 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3840 GEN_INT (MIN (arg1_align
, arg2_align
)));
3843 #ifdef HAVE_cmpstrnsi
3844 /* Try to determine at least one length and call cmpstrnsi. */
3845 if (!insn
&& HAVE_cmpstrnsi
)
3850 enum machine_mode insn_mode
3851 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3852 tree len1
= c_strlen (arg1
, 1);
3853 tree len2
= c_strlen (arg2
, 1);
3856 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3858 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3860 /* If we don't have a constant length for the first, use the length
3861 of the second, if we know it. We don't require a constant for
3862 this case; some cost analysis could be done if both are available
3863 but neither is constant. For now, assume they're equally cheap,
3864 unless one has side effects. If both strings have constant lengths,
3871 else if (TREE_SIDE_EFFECTS (len1
))
3873 else if (TREE_SIDE_EFFECTS (len2
))
3875 else if (TREE_CODE (len1
) != INTEGER_CST
)
3877 else if (TREE_CODE (len2
) != INTEGER_CST
)
3879 else if (tree_int_cst_lt (len1
, len2
))
3884 /* If both arguments have side effects, we cannot optimize. */
3885 if (!len
|| TREE_SIDE_EFFECTS (len
))
3888 arg3_rtx
= expand_normal (len
);
3890 /* Make a place to write the result of the instruction. */
3893 && REG_P (result
) && GET_MODE (result
) == insn_mode
3894 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3895 result
= gen_reg_rtx (insn_mode
);
3897 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3898 GEN_INT (MIN (arg1_align
, arg2_align
)));
3904 enum machine_mode mode
;
3907 /* Return the value in the proper mode for this function. */
3908 mode
= TYPE_MODE (TREE_TYPE (exp
));
3909 if (GET_MODE (result
) == mode
)
3912 return convert_to_mode (mode
, result
, 0);
3913 convert_move (target
, result
, 0);
3917 /* Expand the library call ourselves using a stabilized argument
3918 list to avoid re-evaluating the function's arguments twice. */
3919 #ifdef HAVE_cmpstrnsi
3922 fndecl
= get_callee_fndecl (exp
);
3923 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3924 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3925 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3926 return expand_call (fn
, target
, target
== const0_rtx
);
3932 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3933 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3934 the result in TARGET, if convenient. */
3937 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3938 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3940 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3942 if (!validate_arglist (exp
,
3943 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3946 /* If c_strlen can determine an expression for one of the string
3947 lengths, and it doesn't have side effects, then emit cmpstrnsi
3948 using length MIN(strlen(string)+1, arg3). */
3949 #ifdef HAVE_cmpstrnsi
3952 tree len
, len1
, len2
;
3953 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3956 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3957 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3958 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3960 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3961 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3962 enum machine_mode insn_mode
3963 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3965 len1
= c_strlen (arg1
, 1);
3966 len2
= c_strlen (arg2
, 1);
3969 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3971 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3973 /* If we don't have a constant length for the first, use the length
3974 of the second, if we know it. We don't require a constant for
3975 this case; some cost analysis could be done if both are available
3976 but neither is constant. For now, assume they're equally cheap,
3977 unless one has side effects. If both strings have constant lengths,
3984 else if (TREE_SIDE_EFFECTS (len1
))
3986 else if (TREE_SIDE_EFFECTS (len2
))
3988 else if (TREE_CODE (len1
) != INTEGER_CST
)
3990 else if (TREE_CODE (len2
) != INTEGER_CST
)
3992 else if (tree_int_cst_lt (len1
, len2
))
3997 /* If both arguments have side effects, we cannot optimize. */
3998 if (!len
|| TREE_SIDE_EFFECTS (len
))
4001 /* The actual new length parameter is MIN(len,arg3). */
4002 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4003 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4005 /* If we don't have POINTER_TYPE, call the function. */
4006 if (arg1_align
== 0 || arg2_align
== 0)
4009 /* Make a place to write the result of the instruction. */
4012 && REG_P (result
) && GET_MODE (result
) == insn_mode
4013 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4014 result
= gen_reg_rtx (insn_mode
);
4016 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4017 arg1
= builtin_save_expr (arg1
);
4018 arg2
= builtin_save_expr (arg2
);
4019 len
= builtin_save_expr (len
);
4021 arg1_rtx
= get_memory_rtx (arg1
, len
);
4022 arg2_rtx
= get_memory_rtx (arg2
, len
);
4023 arg3_rtx
= expand_normal (len
);
4024 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4025 GEN_INT (MIN (arg1_align
, arg2_align
)));
4030 /* Return the value in the proper mode for this function. */
4031 mode
= TYPE_MODE (TREE_TYPE (exp
));
4032 if (GET_MODE (result
) == mode
)
4035 return convert_to_mode (mode
, result
, 0);
4036 convert_move (target
, result
, 0);
4040 /* Expand the library call ourselves using a stabilized argument
4041 list to avoid re-evaluating the function's arguments twice. */
4042 fndecl
= get_callee_fndecl (exp
);
4043 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4045 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4046 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4047 return expand_call (fn
, target
, target
== const0_rtx
);
4053 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4054 if that's convenient. */
4057 expand_builtin_saveregs (void)
4061 /* Don't do __builtin_saveregs more than once in a function.
4062 Save the result of the first call and reuse it. */
4063 if (saveregs_value
!= 0)
4064 return saveregs_value
;
4066 /* When this function is called, it means that registers must be
4067 saved on entry to this function. So we migrate the call to the
4068 first insn of this function. */
4072 /* Do whatever the machine needs done in this case. */
4073 val
= targetm
.calls
.expand_builtin_saveregs ();
4078 saveregs_value
= val
;
4080 /* Put the insns after the NOTE that starts the function. If this
4081 is inside a start_sequence, make the outer-level insn chain current, so
4082 the code is placed at the start of the function. */
4083 push_topmost_sequence ();
4084 emit_insn_after (seq
, entry_of_function ());
4085 pop_topmost_sequence ();
4090 /* Expand a call to __builtin_next_arg. */
4093 expand_builtin_next_arg (void)
4095 /* Checking arguments is already done in fold_builtin_next_arg
4096 that must be called before this function. */
4097 return expand_binop (ptr_mode
, add_optab
,
4098 crtl
->args
.internal_arg_pointer
,
4099 crtl
->args
.arg_offset_rtx
,
4100 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4103 /* Make it easier for the backends by protecting the valist argument
4104 from multiple evaluations. */
4107 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4109 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4111 /* The current way of determining the type of valist is completely
4112 bogus. We should have the information on the va builtin instead. */
4114 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4116 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4118 if (TREE_SIDE_EFFECTS (valist
))
4119 valist
= save_expr (valist
);
4121 /* For this case, the backends will be expecting a pointer to
4122 vatype, but it's possible we've actually been given an array
4123 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4125 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4127 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4128 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4133 tree pt
= build_pointer_type (vatype
);
4137 if (! TREE_SIDE_EFFECTS (valist
))
4140 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4141 TREE_SIDE_EFFECTS (valist
) = 1;
4144 if (TREE_SIDE_EFFECTS (valist
))
4145 valist
= save_expr (valist
);
4146 valist
= fold_build2_loc (loc
, MEM_REF
,
4147 vatype
, valist
, build_int_cst (pt
, 0));
4153 /* The "standard" definition of va_list is void*. */
4156 std_build_builtin_va_list (void)
4158 return ptr_type_node
;
4161 /* The "standard" abi va_list is va_list_type_node. */
4164 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4166 return va_list_type_node
;
4169 /* The "standard" type of va_list is va_list_type_node. */
4172 std_canonical_va_list_type (tree type
)
4176 if (INDIRECT_REF_P (type
))
4177 type
= TREE_TYPE (type
);
4178 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4179 type
= TREE_TYPE (type
);
4180 wtype
= va_list_type_node
;
4182 /* Treat structure va_list types. */
4183 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4184 htype
= TREE_TYPE (htype
);
4185 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4187 /* If va_list is an array type, the argument may have decayed
4188 to a pointer type, e.g. by being passed to another function.
4189 In that case, unwrap both types so that we can compare the
4190 underlying records. */
4191 if (TREE_CODE (htype
) == ARRAY_TYPE
4192 || POINTER_TYPE_P (htype
))
4194 wtype
= TREE_TYPE (wtype
);
4195 htype
= TREE_TYPE (htype
);
4198 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4199 return va_list_type_node
;
4204 /* The "standard" implementation of va_start: just assign `nextarg' to
4208 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4210 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4211 convert_move (va_r
, nextarg
, 0);
4214 /* Expand EXP, a call to __builtin_va_start. */
4217 expand_builtin_va_start (tree exp
)
4221 location_t loc
= EXPR_LOCATION (exp
);
4223 if (call_expr_nargs (exp
) < 2)
4225 error_at (loc
, "too few arguments to function %<va_start%>");
4229 if (fold_builtin_next_arg (exp
, true))
4232 nextarg
= expand_builtin_next_arg ();
4233 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4235 if (targetm
.expand_builtin_va_start
)
4236 targetm
.expand_builtin_va_start (valist
, nextarg
);
4238 std_expand_builtin_va_start (valist
, nextarg
);
4244 /* Return a dummy expression of type TYPE in order to keep going after an
4248 dummy_object (tree type
)
4250 tree t
= build_int_cst (build_pointer_type (type
), 0);
4251 return build2 (MEM_REF
, type
, t
, t
);
4254 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4255 builtin function, but a very special sort of operator. */
4257 enum gimplify_status
4258 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4260 tree promoted_type
, have_va_type
;
4261 tree valist
= TREE_OPERAND (*expr_p
, 0);
4262 tree type
= TREE_TYPE (*expr_p
);
4264 location_t loc
= EXPR_LOCATION (*expr_p
);
4266 /* Verify that valist is of the proper type. */
4267 have_va_type
= TREE_TYPE (valist
);
4268 if (have_va_type
== error_mark_node
)
4270 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4272 if (have_va_type
== NULL_TREE
)
4274 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4278 /* Generate a diagnostic for requesting data of a type that cannot
4279 be passed through `...' due to type promotion at the call site. */
4280 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4283 static bool gave_help
;
4286 /* Unfortunately, this is merely undefined, rather than a constraint
4287 violation, so we cannot make this an error. If this call is never
4288 executed, the program is still strictly conforming. */
4289 warned
= warning_at (loc
, 0,
4290 "%qT is promoted to %qT when passed through %<...%>",
4291 type
, promoted_type
);
4292 if (!gave_help
&& warned
)
4295 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4296 promoted_type
, type
);
4299 /* We can, however, treat "undefined" any way we please.
4300 Call abort to encourage the user to fix the program. */
4302 inform (loc
, "if this code is reached, the program will abort");
4303 /* Before the abort, allow the evaluation of the va_list
4304 expression to exit or longjmp. */
4305 gimplify_and_add (valist
, pre_p
);
4306 t
= build_call_expr_loc (loc
,
4307 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
4308 gimplify_and_add (t
, pre_p
);
4310 /* This is dead code, but go ahead and finish so that the
4311 mode of the result comes out right. */
4312 *expr_p
= dummy_object (type
);
4317 /* Make it easier for the backends by protecting the valist argument
4318 from multiple evaluations. */
4319 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4321 /* For this case, the backends will be expecting a pointer to
4322 TREE_TYPE (abi), but it's possible we've
4323 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4325 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4327 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4328 valist
= fold_convert_loc (loc
, p1
,
4329 build_fold_addr_expr_loc (loc
, valist
));
4332 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4335 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4337 if (!targetm
.gimplify_va_arg_expr
)
4338 /* FIXME: Once most targets are converted we should merely
4339 assert this is non-null. */
4342 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4347 /* Expand EXP, a call to __builtin_va_end. */
4350 expand_builtin_va_end (tree exp
)
4352 tree valist
= CALL_EXPR_ARG (exp
, 0);
4354 /* Evaluate for side effects, if needed. I hate macros that don't
4356 if (TREE_SIDE_EFFECTS (valist
))
4357 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4362 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4363 builtin rather than just as an assignment in stdarg.h because of the
4364 nastiness of array-type va_list types. */
4367 expand_builtin_va_copy (tree exp
)
4370 location_t loc
= EXPR_LOCATION (exp
);
4372 dst
= CALL_EXPR_ARG (exp
, 0);
4373 src
= CALL_EXPR_ARG (exp
, 1);
4375 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4376 src
= stabilize_va_list_loc (loc
, src
, 0);
4378 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4380 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4382 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4383 TREE_SIDE_EFFECTS (t
) = 1;
4384 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4388 rtx dstb
, srcb
, size
;
4390 /* Evaluate to pointers. */
4391 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4392 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4393 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4394 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4396 dstb
= convert_memory_address (Pmode
, dstb
);
4397 srcb
= convert_memory_address (Pmode
, srcb
);
4399 /* "Dereference" to BLKmode memories. */
4400 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4401 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4402 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4403 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4404 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4405 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4408 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4414 /* Expand a call to one of the builtin functions __builtin_frame_address or
4415 __builtin_return_address. */
4418 expand_builtin_frame_address (tree fndecl
, tree exp
)
4420 /* The argument must be a nonnegative integer constant.
4421 It counts the number of frames to scan up the stack.
4422 The value is the return address saved in that frame. */
4423 if (call_expr_nargs (exp
) == 0)
4424 /* Warning about missing arg was already issued. */
4426 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4428 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4429 error ("invalid argument to %<__builtin_frame_address%>");
4431 error ("invalid argument to %<__builtin_return_address%>");
4437 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4438 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4440 /* Some ports cannot access arbitrary stack frames. */
4443 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4444 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4446 warning (0, "unsupported argument to %<__builtin_return_address%>");
4450 /* For __builtin_frame_address, return what we've got. */
4451 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4455 && ! CONSTANT_P (tem
))
4456 tem
= copy_addr_to_reg (tem
);
4461 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4462 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4463 is the same as for allocate_dynamic_stack_space. */
4466 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4472 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4473 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4475 /* Emit normal call if we use mudflap. */
4480 = (alloca_with_align
4481 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4482 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4487 /* Compute the argument. */
4488 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4490 /* Compute the alignment. */
4491 align
= (alloca_with_align
4492 ? tree_to_hwi (CALL_EXPR_ARG (exp
, 1))
4493 : BIGGEST_ALIGNMENT
);
4495 /* Allocate the desired space. */
4496 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4497 result
= convert_memory_address (ptr_mode
, result
);
4502 /* Expand a call to bswap builtin in EXP.
4503 Return NULL_RTX if a normal call should be emitted rather than expanding the
4504 function in-line. If convenient, the result should be placed in TARGET.
4505 SUBTARGET may be used as the target for computing one of EXP's operands. */
4508 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4514 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4517 arg
= CALL_EXPR_ARG (exp
, 0);
4518 op0
= expand_expr (arg
,
4519 subtarget
&& GET_MODE (subtarget
) == target_mode
4520 ? subtarget
: NULL_RTX
,
4521 target_mode
, EXPAND_NORMAL
);
4522 if (GET_MODE (op0
) != target_mode
)
4523 op0
= convert_to_mode (target_mode
, op0
, 1);
4525 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4527 gcc_assert (target
);
4529 return convert_to_mode (target_mode
, target
, 1);
4532 /* Expand a call to a unary builtin in EXP.
4533 Return NULL_RTX if a normal call should be emitted rather than expanding the
4534 function in-line. If convenient, the result should be placed in TARGET.
4535 SUBTARGET may be used as the target for computing one of EXP's operands. */
4538 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4539 rtx subtarget
, optab op_optab
)
4543 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4546 /* Compute the argument. */
4547 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4549 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4550 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4551 VOIDmode
, EXPAND_NORMAL
);
4552 /* Compute op, into TARGET if possible.
4553 Set TARGET to wherever the result comes back. */
4554 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4555 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4556 gcc_assert (target
);
4558 return convert_to_mode (target_mode
, target
, 0);
4561 /* Expand a call to __builtin_expect. We just return our argument
4562 as the builtin_expect semantic should've been already executed by
4563 tree branch prediction pass. */
4566 expand_builtin_expect (tree exp
, rtx target
)
4570 if (call_expr_nargs (exp
) < 2)
4572 arg
= CALL_EXPR_ARG (exp
, 0);
4574 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4575 /* When guessing was done, the hints should be already stripped away. */
4576 gcc_assert (!flag_guess_branch_prob
4577 || optimize
== 0 || seen_error ());
4581 /* Expand a call to __builtin_assume_aligned. We just return our first
4582 argument as the builtin_assume_aligned semantic should've been already
4586 expand_builtin_assume_aligned (tree exp
, rtx target
)
4588 if (call_expr_nargs (exp
) < 2)
4590 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4592 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4593 && (call_expr_nargs (exp
) < 3
4594 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4599 expand_builtin_trap (void)
4604 rtx insn
= emit_insn (gen_trap ());
4605 /* For trap insns when not accumulating outgoing args force
4606 REG_ARGS_SIZE note to prevent crossjumping of calls with
4607 different args sizes. */
4608 if (!ACCUMULATE_OUTGOING_ARGS
)
4609 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4613 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4617 /* Expand a call to __builtin_unreachable. We do nothing except emit
4618 a barrier saying that control flow will not pass here.
4620 It is the responsibility of the program being compiled to ensure
4621 that control flow does never reach __builtin_unreachable. */
4623 expand_builtin_unreachable (void)
4628 /* Expand EXP, a call to fabs, fabsf or fabsl.
4629 Return NULL_RTX if a normal call should be emitted rather than expanding
4630 the function inline. If convenient, the result should be placed
4631 in TARGET. SUBTARGET may be used as the target for computing
4635 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4637 enum machine_mode mode
;
4641 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4644 arg
= CALL_EXPR_ARG (exp
, 0);
4645 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4646 mode
= TYPE_MODE (TREE_TYPE (arg
));
4647 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4648 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4651 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4652 Return NULL is a normal call should be emitted rather than expanding the
4653 function inline. If convenient, the result should be placed in TARGET.
4654 SUBTARGET may be used as the target for computing the operand. */
4657 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4662 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4665 arg
= CALL_EXPR_ARG (exp
, 0);
4666 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4668 arg
= CALL_EXPR_ARG (exp
, 1);
4669 op1
= expand_normal (arg
);
4671 return expand_copysign (op0
, op1
, target
);
4674 /* Create a new constant string literal and return a char* pointer to it.
4675 The STRING_CST value is the LEN characters at STR. */
4677 build_string_literal (int len
, const char *str
)
4679 tree t
, elem
, index
, type
;
4681 t
= build_string (len
, str
);
4682 elem
= build_type_variant (char_type_node
, 1, 0);
4683 index
= build_index_type (size_int (len
- 1));
4684 type
= build_array_type (elem
, index
);
4685 TREE_TYPE (t
) = type
;
4686 TREE_CONSTANT (t
) = 1;
4687 TREE_READONLY (t
) = 1;
4688 TREE_STATIC (t
) = 1;
4690 type
= build_pointer_type (elem
);
4691 t
= build1 (ADDR_EXPR
, type
,
4692 build4 (ARRAY_REF
, elem
,
4693 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4697 /* Expand a call to __builtin___clear_cache. */
4700 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4702 #ifndef HAVE_clear_cache
4703 #ifdef CLEAR_INSN_CACHE
4704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4705 does something. Just do the default expansion to a call to
4709 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4710 does nothing. There is no need to call it. Do nothing. */
4712 #endif /* CLEAR_INSN_CACHE */
4714 /* We have a "clear_cache" insn, and it will handle everything. */
4716 rtx begin_rtx
, end_rtx
;
4718 /* We must not expand to a library call. If we did, any
4719 fallback library function in libgcc that might contain a call to
4720 __builtin___clear_cache() would recurse infinitely. */
4721 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4723 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4727 if (HAVE_clear_cache
)
4729 struct expand_operand ops
[2];
4731 begin
= CALL_EXPR_ARG (exp
, 0);
4732 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4734 end
= CALL_EXPR_ARG (exp
, 1);
4735 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4737 create_address_operand (&ops
[0], begin_rtx
);
4738 create_address_operand (&ops
[1], end_rtx
);
4739 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4743 #endif /* HAVE_clear_cache */
4746 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4749 round_trampoline_addr (rtx tramp
)
4751 rtx temp
, addend
, mask
;
4753 /* If we don't need too much alignment, we'll have been guaranteed
4754 proper alignment by get_trampoline_type. */
4755 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4758 /* Round address up to desired boundary. */
4759 temp
= gen_reg_rtx (Pmode
);
4760 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4761 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4763 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4764 temp
, 0, OPTAB_LIB_WIDEN
);
4765 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4766 temp
, 0, OPTAB_LIB_WIDEN
);
4772 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4774 tree t_tramp
, t_func
, t_chain
;
4775 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4777 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4778 POINTER_TYPE
, VOID_TYPE
))
4781 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4782 t_func
= CALL_EXPR_ARG (exp
, 1);
4783 t_chain
= CALL_EXPR_ARG (exp
, 2);
4785 r_tramp
= expand_normal (t_tramp
);
4786 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4787 MEM_NOTRAP_P (m_tramp
) = 1;
4789 /* If ONSTACK, the TRAMP argument should be the address of a field
4790 within the local function's FRAME decl. Either way, let's see if
4791 we can fill in the MEM_ATTRs for this memory. */
4792 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4793 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4795 /* Creator of a heap trampoline is responsible for making sure the
4796 address is aligned to at least STACK_BOUNDARY. Normally malloc
4797 will ensure this anyhow. */
4798 tmp
= round_trampoline_addr (r_tramp
);
4801 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4802 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4803 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4806 /* The FUNC argument should be the address of the nested function.
4807 Extract the actual function decl to pass to the hook. */
4808 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4809 t_func
= TREE_OPERAND (t_func
, 0);
4810 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4812 r_chain
= expand_normal (t_chain
);
4814 /* Generate insns to initialize the trampoline. */
4815 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4819 trampolines_created
= 1;
4821 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4822 "trampoline generated for nested function %qD", t_func
);
4829 expand_builtin_adjust_trampoline (tree exp
)
4833 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4836 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4837 tramp
= round_trampoline_addr (tramp
);
4838 if (targetm
.calls
.trampoline_adjust_address
)
4839 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4844 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4845 function. The function first checks whether the back end provides
4846 an insn to implement signbit for the respective mode. If not, it
4847 checks whether the floating point format of the value is such that
4848 the sign bit can be extracted. If that is not the case, the
4849 function returns NULL_RTX to indicate that a normal call should be
4850 emitted rather than expanding the function in-line. EXP is the
4851 expression that is a call to the builtin function; if convenient,
4852 the result should be placed in TARGET. */
4854 expand_builtin_signbit (tree exp
, rtx target
)
4856 const struct real_format
*fmt
;
4857 enum machine_mode fmode
, imode
, rmode
;
4860 enum insn_code icode
;
4862 location_t loc
= EXPR_LOCATION (exp
);
4864 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4867 arg
= CALL_EXPR_ARG (exp
, 0);
4868 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4869 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4870 fmt
= REAL_MODE_FORMAT (fmode
);
4872 arg
= builtin_save_expr (arg
);
4874 /* Expand the argument yielding a RTX expression. */
4875 temp
= expand_normal (arg
);
4877 /* Check if the back end provides an insn that handles signbit for the
4879 icode
= optab_handler (signbit_optab
, fmode
);
4880 if (icode
!= CODE_FOR_nothing
)
4882 rtx last
= get_last_insn ();
4883 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4884 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4886 delete_insns_since (last
);
4889 /* For floating point formats without a sign bit, implement signbit
4891 bitpos
= fmt
->signbit_ro
;
4894 /* But we can't do this if the format supports signed zero. */
4895 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4898 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4899 build_real (TREE_TYPE (arg
), dconst0
));
4900 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4903 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4905 imode
= int_mode_for_mode (fmode
);
4906 if (imode
== BLKmode
)
4908 temp
= gen_lowpart (imode
, temp
);
4913 /* Handle targets with different FP word orders. */
4914 if (FLOAT_WORDS_BIG_ENDIAN
)
4915 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4917 word
= bitpos
/ BITS_PER_WORD
;
4918 temp
= operand_subword_force (temp
, word
, fmode
);
4919 bitpos
= bitpos
% BITS_PER_WORD
;
4922 /* Force the intermediate word_mode (or narrower) result into a
4923 register. This avoids attempting to create paradoxical SUBREGs
4924 of floating point modes below. */
4925 temp
= force_reg (imode
, temp
);
4927 /* If the bitpos is within the "result mode" lowpart, the operation
4928 can be implement with a single bitwise AND. Otherwise, we need
4929 a right shift and an AND. */
4931 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4933 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4935 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4936 temp
= gen_lowpart (rmode
, temp
);
4937 temp
= expand_binop (rmode
, and_optab
, temp
,
4938 immed_wide_int_const (mask
, rmode
),
4939 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4943 /* Perform a logical right shift to place the signbit in the least
4944 significant bit, then truncate the result to the desired mode
4945 and mask just this bit. */
4946 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4947 temp
= gen_lowpart (rmode
, temp
);
4948 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4949 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4955 /* Expand fork or exec calls. TARGET is the desired target of the
4956 call. EXP is the call. FN is the
4957 identificator of the actual function. IGNORE is nonzero if the
4958 value is to be ignored. */
4961 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4966 /* If we are not profiling, just call the function. */
4967 if (!profile_arc_flag
)
4970 /* Otherwise call the wrapper. This should be equivalent for the rest of
4971 compiler, so the code does not diverge, and the wrapper may run the
4972 code necessary for keeping the profiling sane. */
4974 switch (DECL_FUNCTION_CODE (fn
))
4977 id
= get_identifier ("__gcov_fork");
4980 case BUILT_IN_EXECL
:
4981 id
= get_identifier ("__gcov_execl");
4984 case BUILT_IN_EXECV
:
4985 id
= get_identifier ("__gcov_execv");
4988 case BUILT_IN_EXECLP
:
4989 id
= get_identifier ("__gcov_execlp");
4992 case BUILT_IN_EXECLE
:
4993 id
= get_identifier ("__gcov_execle");
4996 case BUILT_IN_EXECVP
:
4997 id
= get_identifier ("__gcov_execvp");
5000 case BUILT_IN_EXECVE
:
5001 id
= get_identifier ("__gcov_execve");
5008 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5009 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5010 DECL_EXTERNAL (decl
) = 1;
5011 TREE_PUBLIC (decl
) = 1;
5012 DECL_ARTIFICIAL (decl
) = 1;
5013 TREE_NOTHROW (decl
) = 1;
5014 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5015 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5016 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5017 return expand_call (call
, target
, ignore
);
5022 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5023 the pointer in these functions is void*, the tree optimizers may remove
5024 casts. The mode computed in expand_builtin isn't reliable either, due
5025 to __sync_bool_compare_and_swap.
5027 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5028 group of builtins. This gives us log2 of the mode size. */
5030 static inline enum machine_mode
5031 get_builtin_sync_mode (int fcode_diff
)
5033 /* The size is not negotiable, so ask not to get BLKmode in return
5034 if the target indicates that a smaller size would be better. */
5035 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5038 /* Expand the memory expression LOC and return the appropriate memory operand
5039 for the builtin_sync operations. */
5042 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5046 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5047 addr
= convert_memory_address (Pmode
, addr
);
5049 /* Note that we explicitly do not want any alias information for this
5050 memory, so that we kill all other live memories. Otherwise we don't
5051 satisfy the full barrier semantics of the intrinsic. */
5052 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5054 /* The alignment needs to be at least according to that of the mode. */
5055 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5056 get_pointer_alignment (loc
)));
5057 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5058 MEM_VOLATILE_P (mem
) = 1;
5063 /* Make sure an argument is in the right mode.
5064 EXP is the tree argument.
5065 MODE is the mode it should be in. */
5068 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5071 enum machine_mode old_mode
;
5073 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5074 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5075 of CONST_INTs, where we know the old_mode only from the call argument. */
5077 old_mode
= GET_MODE (val
);
5078 if (old_mode
== VOIDmode
)
5079 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5080 val
= convert_modes (mode
, old_mode
, val
, 1);
5085 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5086 EXP is the CALL_EXPR. CODE is the rtx code
5087 that corresponds to the arithmetic or logical operation from the name;
5088 an exception here is that NOT actually means NAND. TARGET is an optional
5089 place for us to store the results; AFTER is true if this is the
5090 fetch_and_xxx form. */
5093 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5094 enum rtx_code code
, bool after
,
5098 location_t loc
= EXPR_LOCATION (exp
);
5100 if (code
== NOT
&& warn_sync_nand
)
5102 tree fndecl
= get_callee_fndecl (exp
);
5103 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5105 static bool warned_f_a_n
, warned_n_a_f
;
5109 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5111 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5112 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5113 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5117 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5118 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5119 warned_f_a_n
= true;
5122 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5125 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5126 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5130 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5131 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5132 warned_n_a_f
= true;
5140 /* Expand the operands. */
5141 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5142 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5144 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5148 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5149 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5150 true if this is the boolean form. TARGET is a place for us to store the
5151 results; this is NOT optional if IS_BOOL is true. */
5154 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5155 bool is_bool
, rtx target
)
5157 rtx old_val
, new_val
, mem
;
5160 /* Expand the operands. */
5161 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5162 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5163 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5165 pbool
= poval
= NULL
;
5166 if (target
!= const0_rtx
)
5173 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5174 false, MEMMODEL_SEQ_CST
,
5181 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5182 general form is actually an atomic exchange, and some targets only
5183 support a reduced form with the second argument being a constant 1.
5184 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5188 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5193 /* Expand the operands. */
5194 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5195 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5197 return expand_sync_lock_test_and_set (target
, mem
, val
);
5200 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5203 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5207 /* Expand the operands. */
5208 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5210 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5213 /* Given an integer representing an ``enum memmodel'', verify its
5214 correctness and return the memory model enum. */
5216 static enum memmodel
5217 get_memmodel (tree exp
)
5220 unsigned HOST_WIDE_INT val
;
5222 /* If the parameter is not a constant, it's a run time value so we'll just
5223 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5224 if (TREE_CODE (exp
) != INTEGER_CST
)
5225 return MEMMODEL_SEQ_CST
;
5227 op
= expand_normal (exp
);
5230 if (targetm
.memmodel_check
)
5231 val
= targetm
.memmodel_check (val
);
5232 else if (val
& ~MEMMODEL_MASK
)
5234 warning (OPT_Winvalid_memory_model
,
5235 "Unknown architecture specifier in memory model to builtin.");
5236 return MEMMODEL_SEQ_CST
;
5239 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5241 warning (OPT_Winvalid_memory_model
,
5242 "invalid memory model argument to builtin");
5243 return MEMMODEL_SEQ_CST
;
5246 return (enum memmodel
) val
;
5249 /* Expand the __atomic_exchange intrinsic:
5250 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5251 EXP is the CALL_EXPR.
5252 TARGET is an optional place for us to store the results. */
5255 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5258 enum memmodel model
;
5260 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5261 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5263 error ("invalid memory model for %<__atomic_exchange%>");
5267 if (!flag_inline_atomics
)
5270 /* Expand the operands. */
5271 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5272 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5274 return expand_atomic_exchange (target
, mem
, val
, model
);
5277 /* Expand the __atomic_compare_exchange intrinsic:
5278 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5279 TYPE desired, BOOL weak,
5280 enum memmodel success,
5281 enum memmodel failure)
5282 EXP is the CALL_EXPR.
5283 TARGET is an optional place for us to store the results. */
5286 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5289 rtx expect
, desired
, mem
, oldval
;
5290 enum memmodel success
, failure
;
5294 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5295 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5297 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5298 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5300 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5304 if (failure
> success
)
5306 error ("failure memory model cannot be stronger than success "
5307 "memory model for %<__atomic_compare_exchange%>");
5311 if (!flag_inline_atomics
)
5314 /* Expand the operands. */
5315 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5317 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5318 expect
= convert_memory_address (Pmode
, expect
);
5319 expect
= gen_rtx_MEM (mode
, expect
);
5320 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5322 weak
= CALL_EXPR_ARG (exp
, 3);
5324 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5328 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5329 &oldval
, mem
, oldval
, desired
,
5330 is_weak
, success
, failure
))
5333 if (oldval
!= expect
)
5334 emit_move_insn (expect
, oldval
);
5339 /* Expand the __atomic_load intrinsic:
5340 TYPE __atomic_load (TYPE *object, enum memmodel)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5345 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5348 enum memmodel model
;
5350 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5351 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5352 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5354 error ("invalid memory model for %<__atomic_load%>");
5358 if (!flag_inline_atomics
)
5361 /* Expand the operand. */
5362 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5364 return expand_atomic_load (target
, mem
, model
);
5368 /* Expand the __atomic_store intrinsic:
5369 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5370 EXP is the CALL_EXPR.
5371 TARGET is an optional place for us to store the results. */
5374 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5377 enum memmodel model
;
5379 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5380 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5381 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5382 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5384 error ("invalid memory model for %<__atomic_store%>");
5388 if (!flag_inline_atomics
)
5391 /* Expand the operands. */
5392 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5393 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5395 return expand_atomic_store (mem
, val
, model
, false);
5398 /* Expand the __atomic_fetch_XXX intrinsic:
5399 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5400 EXP is the CALL_EXPR.
5401 TARGET is an optional place for us to store the results.
5402 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5403 FETCH_AFTER is true if returning the result of the operation.
5404 FETCH_AFTER is false if returning the value before the operation.
5405 IGNORE is true if the result is not used.
5406 EXT_CALL is the correct builtin for an external call if this cannot be
5407 resolved to an instruction sequence. */
5410 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5411 enum rtx_code code
, bool fetch_after
,
5412 bool ignore
, enum built_in_function ext_call
)
5415 enum memmodel model
;
5419 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5421 /* Expand the operands. */
5422 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5423 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5425 /* Only try generating instructions if inlining is turned on. */
5426 if (flag_inline_atomics
)
5428 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5433 /* Return if a different routine isn't needed for the library call. */
5434 if (ext_call
== BUILT_IN_NONE
)
5437 /* Change the call to the specified function. */
5438 fndecl
= get_callee_fndecl (exp
);
5439 addr
= CALL_EXPR_FN (exp
);
5442 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5443 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5445 /* Expand the call here so we can emit trailing code. */
5446 ret
= expand_call (exp
, target
, ignore
);
5448 /* Replace the original function just in case it matters. */
5449 TREE_OPERAND (addr
, 0) = fndecl
;
5451 /* Then issue the arithmetic correction to return the right result. */
5456 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5458 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5461 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5468 #ifndef HAVE_atomic_clear
5469 # define HAVE_atomic_clear 0
5470 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5473 /* Expand an atomic clear operation.
5474 void _atomic_clear (BOOL *obj, enum memmodel)
5475 EXP is the call expression. */
5478 expand_builtin_atomic_clear (tree exp
)
5480 enum machine_mode mode
;
5482 enum memmodel model
;
5484 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5485 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5486 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5488 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5489 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5491 error ("invalid memory model for %<__atomic_store%>");
5495 if (HAVE_atomic_clear
)
5497 emit_insn (gen_atomic_clear (mem
, model
));
5501 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5502 Failing that, a store is issued by __atomic_store. The only way this can
5503 fail is if the bool type is larger than a word size. Unlikely, but
5504 handle it anyway for completeness. Assume a single threaded model since
5505 there is no atomic support in this case, and no barriers are required. */
5506 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5508 emit_move_insn (mem
, const0_rtx
);
5512 /* Expand an atomic test_and_set operation.
5513 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5514 EXP is the call expression. */
5517 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5520 enum memmodel model
;
5521 enum machine_mode mode
;
5523 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5524 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5525 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5527 return expand_atomic_test_and_set (target
, mem
, model
);
5531 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5532 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5535 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5538 enum machine_mode mode
;
5539 unsigned int mode_align
, type_align
;
5541 if (TREE_CODE (arg0
) != INTEGER_CST
)
5544 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5545 mode
= mode_for_size (size
, MODE_INT
, 0);
5546 mode_align
= GET_MODE_ALIGNMENT (mode
);
5548 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5549 type_align
= mode_align
;
5552 tree ttype
= TREE_TYPE (arg1
);
5554 /* This function is usually invoked and folded immediately by the front
5555 end before anything else has a chance to look at it. The pointer
5556 parameter at this point is usually cast to a void *, so check for that
5557 and look past the cast. */
5558 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5559 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5560 arg1
= TREE_OPERAND (arg1
, 0);
5562 ttype
= TREE_TYPE (arg1
);
5563 gcc_assert (POINTER_TYPE_P (ttype
));
5565 /* Get the underlying type of the object. */
5566 ttype
= TREE_TYPE (ttype
);
5567 type_align
= TYPE_ALIGN (ttype
);
5570 /* If the object has smaller alignment, the the lock free routines cannot
5572 if (type_align
< mode_align
)
5573 return boolean_false_node
;
5575 /* Check if a compare_and_swap pattern exists for the mode which represents
5576 the required size. The pattern is not allowed to fail, so the existence
5577 of the pattern indicates support is present. */
5578 if (can_compare_and_swap_p (mode
, true))
5579 return boolean_true_node
;
5581 return boolean_false_node
;
5584 /* Return true if the parameters to call EXP represent an object which will
5585 always generate lock free instructions. The first argument represents the
5586 size of the object, and the second parameter is a pointer to the object
5587 itself. If NULL is passed for the object, then the result is based on
5588 typical alignment for an object of the specified size. Otherwise return
5592 expand_builtin_atomic_always_lock_free (tree exp
)
5595 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5596 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5598 if (TREE_CODE (arg0
) != INTEGER_CST
)
5600 error ("non-constant argument 1 to __atomic_always_lock_free");
5604 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5605 if (size
== boolean_true_node
)
5610 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5611 is lock free on this architecture. */
5614 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5616 if (!flag_inline_atomics
)
5619 /* If it isn't always lock free, don't generate a result. */
5620 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5621 return boolean_true_node
;
5626 /* Return true if the parameters to call EXP represent an object which will
5627 always generate lock free instructions. The first argument represents the
5628 size of the object, and the second parameter is a pointer to the object
5629 itself. If NULL is passed for the object, then the result is based on
5630 typical alignment for an object of the specified size. Otherwise return
5634 expand_builtin_atomic_is_lock_free (tree exp
)
5637 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5638 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5640 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5642 error ("non-integer argument 1 to __atomic_is_lock_free");
5646 if (!flag_inline_atomics
)
5649 /* If the value is known at compile time, return the RTX for it. */
5650 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5651 if (size
== boolean_true_node
)
5657 /* Expand the __atomic_thread_fence intrinsic:
5658 void __atomic_thread_fence (enum memmodel)
5659 EXP is the CALL_EXPR. */
5662 expand_builtin_atomic_thread_fence (tree exp
)
5664 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5665 expand_mem_thread_fence (model
);
5668 /* Expand the __atomic_signal_fence intrinsic:
5669 void __atomic_signal_fence (enum memmodel)
5670 EXP is the CALL_EXPR. */
5673 expand_builtin_atomic_signal_fence (tree exp
)
5675 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5676 expand_mem_signal_fence (model
);
5679 /* Expand the __sync_synchronize intrinsic. */
5682 expand_builtin_sync_synchronize (void)
5684 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5688 expand_builtin_thread_pointer (tree exp
, rtx target
)
5690 enum insn_code icode
;
5691 if (!validate_arglist (exp
, VOID_TYPE
))
5693 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5694 if (icode
!= CODE_FOR_nothing
)
5696 struct expand_operand op
;
5697 if (!REG_P (target
) || GET_MODE (target
) != Pmode
)
5698 target
= gen_reg_rtx (Pmode
);
5699 create_output_operand (&op
, target
, Pmode
);
5700 expand_insn (icode
, 1, &op
);
5703 error ("__builtin_thread_pointer is not supported on this target");
5708 expand_builtin_set_thread_pointer (tree exp
)
5710 enum insn_code icode
;
5711 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5713 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5714 if (icode
!= CODE_FOR_nothing
)
5716 struct expand_operand op
;
5717 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5718 Pmode
, EXPAND_NORMAL
);
5719 create_input_operand (&op
, val
, Pmode
);
5720 expand_insn (icode
, 1, &op
);
5723 error ("__builtin_set_thread_pointer is not supported on this target");
5727 /* Expand an expression EXP that calls a built-in function,
5728 with result going to TARGET if that's convenient
5729 (and in mode MODE if that's convenient).
5730 SUBTARGET may be used as the target for computing one of EXP's operands.
5731 IGNORE is nonzero if the value is to be ignored. */
5734 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5737 tree fndecl
= get_callee_fndecl (exp
);
5738 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5739 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5742 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5743 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5745 /* When not optimizing, generate calls to library functions for a certain
5748 && !called_as_built_in (fndecl
)
5749 && fcode
!= BUILT_IN_FORK
5750 && fcode
!= BUILT_IN_EXECL
5751 && fcode
!= BUILT_IN_EXECV
5752 && fcode
!= BUILT_IN_EXECLP
5753 && fcode
!= BUILT_IN_EXECLE
5754 && fcode
!= BUILT_IN_EXECVP
5755 && fcode
!= BUILT_IN_EXECVE
5756 && fcode
!= BUILT_IN_ALLOCA
5757 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5758 && fcode
!= BUILT_IN_FREE
)
5759 return expand_call (exp
, target
, ignore
);
5761 /* The built-in function expanders test for target == const0_rtx
5762 to determine whether the function's result will be ignored. */
5764 target
= const0_rtx
;
5766 /* If the result of a pure or const built-in function is ignored, and
5767 none of its arguments are volatile, we can avoid expanding the
5768 built-in call and just evaluate the arguments for side-effects. */
5769 if (target
== const0_rtx
5770 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5771 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5773 bool volatilep
= false;
5775 call_expr_arg_iterator iter
;
5777 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5778 if (TREE_THIS_VOLATILE (arg
))
5786 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5787 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5794 CASE_FLT_FN (BUILT_IN_FABS
):
5795 case BUILT_IN_FABSD32
:
5796 case BUILT_IN_FABSD64
:
5797 case BUILT_IN_FABSD128
:
5798 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5803 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5804 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5809 /* Just do a normal library call if we were unable to fold
5811 CASE_FLT_FN (BUILT_IN_CABS
):
5814 CASE_FLT_FN (BUILT_IN_EXP
):
5815 CASE_FLT_FN (BUILT_IN_EXP10
):
5816 CASE_FLT_FN (BUILT_IN_POW10
):
5817 CASE_FLT_FN (BUILT_IN_EXP2
):
5818 CASE_FLT_FN (BUILT_IN_EXPM1
):
5819 CASE_FLT_FN (BUILT_IN_LOGB
):
5820 CASE_FLT_FN (BUILT_IN_LOG
):
5821 CASE_FLT_FN (BUILT_IN_LOG10
):
5822 CASE_FLT_FN (BUILT_IN_LOG2
):
5823 CASE_FLT_FN (BUILT_IN_LOG1P
):
5824 CASE_FLT_FN (BUILT_IN_TAN
):
5825 CASE_FLT_FN (BUILT_IN_ASIN
):
5826 CASE_FLT_FN (BUILT_IN_ACOS
):
5827 CASE_FLT_FN (BUILT_IN_ATAN
):
5828 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5829 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5830 because of possible accuracy problems. */
5831 if (! flag_unsafe_math_optimizations
)
5833 CASE_FLT_FN (BUILT_IN_SQRT
):
5834 CASE_FLT_FN (BUILT_IN_FLOOR
):
5835 CASE_FLT_FN (BUILT_IN_CEIL
):
5836 CASE_FLT_FN (BUILT_IN_TRUNC
):
5837 CASE_FLT_FN (BUILT_IN_ROUND
):
5838 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5839 CASE_FLT_FN (BUILT_IN_RINT
):
5840 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5845 CASE_FLT_FN (BUILT_IN_FMA
):
5846 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5851 CASE_FLT_FN (BUILT_IN_ILOGB
):
5852 if (! flag_unsafe_math_optimizations
)
5854 CASE_FLT_FN (BUILT_IN_ISINF
):
5855 CASE_FLT_FN (BUILT_IN_FINITE
):
5856 case BUILT_IN_ISFINITE
:
5857 case BUILT_IN_ISNORMAL
:
5858 target
= expand_builtin_interclass_mathfn (exp
, target
);
5863 CASE_FLT_FN (BUILT_IN_ICEIL
):
5864 CASE_FLT_FN (BUILT_IN_LCEIL
):
5865 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5866 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5867 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5868 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5869 target
= expand_builtin_int_roundingfn (exp
, target
);
5874 CASE_FLT_FN (BUILT_IN_IRINT
):
5875 CASE_FLT_FN (BUILT_IN_LRINT
):
5876 CASE_FLT_FN (BUILT_IN_LLRINT
):
5877 CASE_FLT_FN (BUILT_IN_IROUND
):
5878 CASE_FLT_FN (BUILT_IN_LROUND
):
5879 CASE_FLT_FN (BUILT_IN_LLROUND
):
5880 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5885 CASE_FLT_FN (BUILT_IN_POWI
):
5886 target
= expand_builtin_powi (exp
, target
);
5891 CASE_FLT_FN (BUILT_IN_ATAN2
):
5892 CASE_FLT_FN (BUILT_IN_LDEXP
):
5893 CASE_FLT_FN (BUILT_IN_SCALB
):
5894 CASE_FLT_FN (BUILT_IN_SCALBN
):
5895 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5896 if (! flag_unsafe_math_optimizations
)
5899 CASE_FLT_FN (BUILT_IN_FMOD
):
5900 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5901 CASE_FLT_FN (BUILT_IN_DREM
):
5902 CASE_FLT_FN (BUILT_IN_POW
):
5903 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5908 CASE_FLT_FN (BUILT_IN_CEXPI
):
5909 target
= expand_builtin_cexpi (exp
, target
);
5910 gcc_assert (target
);
5913 CASE_FLT_FN (BUILT_IN_SIN
):
5914 CASE_FLT_FN (BUILT_IN_COS
):
5915 if (! flag_unsafe_math_optimizations
)
5917 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5922 CASE_FLT_FN (BUILT_IN_SINCOS
):
5923 if (! flag_unsafe_math_optimizations
)
5925 target
= expand_builtin_sincos (exp
);
5930 case BUILT_IN_APPLY_ARGS
:
5931 return expand_builtin_apply_args ();
5933 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5934 FUNCTION with a copy of the parameters described by
5935 ARGUMENTS, and ARGSIZE. It returns a block of memory
5936 allocated on the stack into which is stored all the registers
5937 that might possibly be used for returning the result of a
5938 function. ARGUMENTS is the value returned by
5939 __builtin_apply_args. ARGSIZE is the number of bytes of
5940 arguments that must be copied. ??? How should this value be
5941 computed? We'll also need a safe worst case value for varargs
5943 case BUILT_IN_APPLY
:
5944 if (!validate_arglist (exp
, POINTER_TYPE
,
5945 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5946 && !validate_arglist (exp
, REFERENCE_TYPE
,
5947 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5953 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5954 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5955 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5957 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5960 /* __builtin_return (RESULT) causes the function to return the
5961 value described by RESULT. RESULT is address of the block of
5962 memory returned by __builtin_apply. */
5963 case BUILT_IN_RETURN
:
5964 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5965 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5968 case BUILT_IN_SAVEREGS
:
5969 return expand_builtin_saveregs ();
5971 case BUILT_IN_VA_ARG_PACK
:
5972 /* All valid uses of __builtin_va_arg_pack () are removed during
5974 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
5977 case BUILT_IN_VA_ARG_PACK_LEN
:
5978 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5980 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
5983 /* Return the address of the first anonymous stack arg. */
5984 case BUILT_IN_NEXT_ARG
:
5985 if (fold_builtin_next_arg (exp
, false))
5987 return expand_builtin_next_arg ();
5989 case BUILT_IN_CLEAR_CACHE
:
5990 target
= expand_builtin___clear_cache (exp
);
5995 case BUILT_IN_CLASSIFY_TYPE
:
5996 return expand_builtin_classify_type (exp
);
5998 case BUILT_IN_CONSTANT_P
:
6001 case BUILT_IN_FRAME_ADDRESS
:
6002 case BUILT_IN_RETURN_ADDRESS
:
6003 return expand_builtin_frame_address (fndecl
, exp
);
6005 /* Returns the address of the area where the structure is returned.
6007 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6008 if (call_expr_nargs (exp
) != 0
6009 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6010 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6013 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6015 case BUILT_IN_ALLOCA
:
6016 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6017 /* If the allocation stems from the declaration of a variable-sized
6018 object, it cannot accumulate. */
6019 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6024 case BUILT_IN_STACK_SAVE
:
6025 return expand_stack_save ();
6027 case BUILT_IN_STACK_RESTORE
:
6028 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6031 case BUILT_IN_BSWAP16
:
6032 case BUILT_IN_BSWAP32
:
6033 case BUILT_IN_BSWAP64
:
6034 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6039 CASE_INT_FN (BUILT_IN_FFS
):
6040 target
= expand_builtin_unop (target_mode
, exp
, target
,
6041 subtarget
, ffs_optab
);
6046 CASE_INT_FN (BUILT_IN_CLZ
):
6047 target
= expand_builtin_unop (target_mode
, exp
, target
,
6048 subtarget
, clz_optab
);
6053 CASE_INT_FN (BUILT_IN_CTZ
):
6054 target
= expand_builtin_unop (target_mode
, exp
, target
,
6055 subtarget
, ctz_optab
);
6060 CASE_INT_FN (BUILT_IN_CLRSB
):
6061 target
= expand_builtin_unop (target_mode
, exp
, target
,
6062 subtarget
, clrsb_optab
);
6067 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6068 target
= expand_builtin_unop (target_mode
, exp
, target
,
6069 subtarget
, popcount_optab
);
6074 CASE_INT_FN (BUILT_IN_PARITY
):
6075 target
= expand_builtin_unop (target_mode
, exp
, target
,
6076 subtarget
, parity_optab
);
6081 case BUILT_IN_STRLEN
:
6082 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6087 case BUILT_IN_STRCPY
:
6088 target
= expand_builtin_strcpy (exp
, target
);
6093 case BUILT_IN_STRNCPY
:
6094 target
= expand_builtin_strncpy (exp
, target
);
6099 case BUILT_IN_STPCPY
:
6100 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6105 case BUILT_IN_MEMCPY
:
6106 target
= expand_builtin_memcpy (exp
, target
);
6111 case BUILT_IN_MEMPCPY
:
6112 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6117 case BUILT_IN_MEMSET
:
6118 target
= expand_builtin_memset (exp
, target
, mode
);
6123 case BUILT_IN_BZERO
:
6124 target
= expand_builtin_bzero (exp
);
6129 case BUILT_IN_STRCMP
:
6130 target
= expand_builtin_strcmp (exp
, target
);
6135 case BUILT_IN_STRNCMP
:
6136 target
= expand_builtin_strncmp (exp
, target
, mode
);
6142 case BUILT_IN_MEMCMP
:
6143 target
= expand_builtin_memcmp (exp
, target
, mode
);
6148 case BUILT_IN_SETJMP
:
6149 /* This should have been lowered to the builtins below. */
6152 case BUILT_IN_SETJMP_SETUP
:
6153 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6154 and the receiver label. */
6155 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6157 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6158 VOIDmode
, EXPAND_NORMAL
);
6159 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6160 rtx label_r
= label_rtx (label
);
6162 /* This is copied from the handling of non-local gotos. */
6163 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6164 nonlocal_goto_handler_labels
6165 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6166 nonlocal_goto_handler_labels
);
6167 /* ??? Do not let expand_label treat us as such since we would
6168 not want to be both on the list of non-local labels and on
6169 the list of forced labels. */
6170 FORCED_LABEL (label
) = 0;
6175 case BUILT_IN_SETJMP_DISPATCHER
:
6176 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6177 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6179 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6180 rtx label_r
= label_rtx (label
);
6182 /* Remove the dispatcher label from the list of non-local labels
6183 since the receiver labels have been added to it above. */
6184 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6189 case BUILT_IN_SETJMP_RECEIVER
:
6190 /* __builtin_setjmp_receiver is passed the receiver label. */
6191 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6193 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6194 rtx label_r
= label_rtx (label
);
6196 expand_builtin_setjmp_receiver (label_r
);
6201 /* __builtin_longjmp is passed a pointer to an array of five words.
6202 It's similar to the C library longjmp function but works with
6203 __builtin_setjmp above. */
6204 case BUILT_IN_LONGJMP
:
6205 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6207 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6208 VOIDmode
, EXPAND_NORMAL
);
6209 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6211 if (value
!= const1_rtx
)
6213 error ("%<__builtin_longjmp%> second argument must be 1");
6217 expand_builtin_longjmp (buf_addr
, value
);
6222 case BUILT_IN_NONLOCAL_GOTO
:
6223 target
= expand_builtin_nonlocal_goto (exp
);
6228 /* This updates the setjmp buffer that is its argument with the value
6229 of the current stack pointer. */
6230 case BUILT_IN_UPDATE_SETJMP_BUF
:
6231 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6234 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6236 expand_builtin_update_setjmp_buf (buf_addr
);
6242 expand_builtin_trap ();
6245 case BUILT_IN_UNREACHABLE
:
6246 expand_builtin_unreachable ();
6249 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6250 case BUILT_IN_SIGNBITD32
:
6251 case BUILT_IN_SIGNBITD64
:
6252 case BUILT_IN_SIGNBITD128
:
6253 target
= expand_builtin_signbit (exp
, target
);
6258 /* Various hooks for the DWARF 2 __throw routine. */
6259 case BUILT_IN_UNWIND_INIT
:
6260 expand_builtin_unwind_init ();
6262 case BUILT_IN_DWARF_CFA
:
6263 return virtual_cfa_rtx
;
6264 #ifdef DWARF2_UNWIND_INFO
6265 case BUILT_IN_DWARF_SP_COLUMN
:
6266 return expand_builtin_dwarf_sp_column ();
6267 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6268 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6271 case BUILT_IN_FROB_RETURN_ADDR
:
6272 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6273 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6274 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6275 case BUILT_IN_EH_RETURN
:
6276 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6277 CALL_EXPR_ARG (exp
, 1));
6279 #ifdef EH_RETURN_DATA_REGNO
6280 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6281 return expand_builtin_eh_return_data_regno (exp
);
6283 case BUILT_IN_EXTEND_POINTER
:
6284 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6285 case BUILT_IN_EH_POINTER
:
6286 return expand_builtin_eh_pointer (exp
);
6287 case BUILT_IN_EH_FILTER
:
6288 return expand_builtin_eh_filter (exp
);
6289 case BUILT_IN_EH_COPY_VALUES
:
6290 return expand_builtin_eh_copy_values (exp
);
6292 case BUILT_IN_VA_START
:
6293 return expand_builtin_va_start (exp
);
6294 case BUILT_IN_VA_END
:
6295 return expand_builtin_va_end (exp
);
6296 case BUILT_IN_VA_COPY
:
6297 return expand_builtin_va_copy (exp
);
6298 case BUILT_IN_EXPECT
:
6299 return expand_builtin_expect (exp
, target
);
6300 case BUILT_IN_ASSUME_ALIGNED
:
6301 return expand_builtin_assume_aligned (exp
, target
);
6302 case BUILT_IN_PREFETCH
:
6303 expand_builtin_prefetch (exp
);
6306 case BUILT_IN_INIT_TRAMPOLINE
:
6307 return expand_builtin_init_trampoline (exp
, true);
6308 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6309 return expand_builtin_init_trampoline (exp
, false);
6310 case BUILT_IN_ADJUST_TRAMPOLINE
:
6311 return expand_builtin_adjust_trampoline (exp
);
6314 case BUILT_IN_EXECL
:
6315 case BUILT_IN_EXECV
:
6316 case BUILT_IN_EXECLP
:
6317 case BUILT_IN_EXECLE
:
6318 case BUILT_IN_EXECVP
:
6319 case BUILT_IN_EXECVE
:
6320 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6325 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6326 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6327 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6328 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6329 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6330 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6331 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6336 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6337 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6338 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6339 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6340 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6341 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6342 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6347 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6348 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6349 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6350 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6351 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6352 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6353 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6358 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6359 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6360 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6361 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6362 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6363 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6364 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6369 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6370 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6371 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6372 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6373 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6374 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6375 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6380 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6381 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6382 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6383 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6384 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6385 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6386 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6391 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6392 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6393 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6394 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6395 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6396 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6397 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6402 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6403 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6404 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6405 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6406 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6407 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6408 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6413 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6414 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6415 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6416 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6417 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6418 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6419 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6424 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6425 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6426 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6427 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6428 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6429 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6430 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6435 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6436 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6437 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6438 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6439 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6440 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6441 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6446 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6447 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6448 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6449 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6450 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6451 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6452 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6457 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6458 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6459 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6460 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6461 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6462 if (mode
== VOIDmode
)
6463 mode
= TYPE_MODE (boolean_type_node
);
6464 if (!target
|| !register_operand (target
, mode
))
6465 target
= gen_reg_rtx (mode
);
6467 mode
= get_builtin_sync_mode
6468 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6469 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6474 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6475 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6476 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6477 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6478 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6479 mode
= get_builtin_sync_mode
6480 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6481 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6486 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6487 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6488 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6489 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6490 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6491 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6492 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6497 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6498 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6499 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6500 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6501 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6502 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6503 expand_builtin_sync_lock_release (mode
, exp
);
6506 case BUILT_IN_SYNC_SYNCHRONIZE
:
6507 expand_builtin_sync_synchronize ();
6510 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6511 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6512 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6513 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6514 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6515 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6516 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6523 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6524 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6525 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6527 unsigned int nargs
, z
;
6528 vec
<tree
, va_gc
> *vec
;
6531 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6532 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6536 /* If this is turned into an external library call, the weak parameter
6537 must be dropped to match the expected parameter list. */
6538 nargs
= call_expr_nargs (exp
);
6539 vec_alloc (vec
, nargs
- 1);
6540 for (z
= 0; z
< 3; z
++)
6541 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6542 /* Skip the boolean weak parameter. */
6543 for (z
= 4; z
< 6; z
++)
6544 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6545 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6549 case BUILT_IN_ATOMIC_LOAD_1
:
6550 case BUILT_IN_ATOMIC_LOAD_2
:
6551 case BUILT_IN_ATOMIC_LOAD_4
:
6552 case BUILT_IN_ATOMIC_LOAD_8
:
6553 case BUILT_IN_ATOMIC_LOAD_16
:
6554 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6555 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6560 case BUILT_IN_ATOMIC_STORE_1
:
6561 case BUILT_IN_ATOMIC_STORE_2
:
6562 case BUILT_IN_ATOMIC_STORE_4
:
6563 case BUILT_IN_ATOMIC_STORE_8
:
6564 case BUILT_IN_ATOMIC_STORE_16
:
6565 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6566 target
= expand_builtin_atomic_store (mode
, exp
);
6571 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6572 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6573 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6574 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6575 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6577 enum built_in_function lib
;
6578 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6579 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6580 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6581 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6587 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6588 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6589 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6590 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6591 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6593 enum built_in_function lib
;
6594 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6595 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6596 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6597 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6603 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6604 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6605 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6606 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6607 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6609 enum built_in_function lib
;
6610 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6611 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6612 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6613 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6619 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6620 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6621 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6622 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6623 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6625 enum built_in_function lib
;
6626 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6627 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6628 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6629 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6635 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6636 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6637 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6638 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6639 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6641 enum built_in_function lib
;
6642 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6643 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6644 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6645 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6651 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6652 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6653 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6654 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6655 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6657 enum built_in_function lib
;
6658 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6659 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6660 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6661 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6667 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6668 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6669 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6670 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6671 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6672 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6673 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6674 ignore
, BUILT_IN_NONE
);
6679 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6680 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6681 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6682 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6683 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6684 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6685 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6686 ignore
, BUILT_IN_NONE
);
6691 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6692 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6693 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6694 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6695 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6696 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6697 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6698 ignore
, BUILT_IN_NONE
);
6703 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6704 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6705 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6706 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6707 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6708 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6709 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6710 ignore
, BUILT_IN_NONE
);
6715 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6716 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6717 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6718 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6719 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6720 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6721 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6722 ignore
, BUILT_IN_NONE
);
6727 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6728 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6729 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6730 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6731 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6732 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6733 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6734 ignore
, BUILT_IN_NONE
);
6739 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6740 return expand_builtin_atomic_test_and_set (exp
, target
);
6742 case BUILT_IN_ATOMIC_CLEAR
:
6743 return expand_builtin_atomic_clear (exp
);
6745 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6746 return expand_builtin_atomic_always_lock_free (exp
);
6748 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6749 target
= expand_builtin_atomic_is_lock_free (exp
);
6754 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6755 expand_builtin_atomic_thread_fence (exp
);
6758 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6759 expand_builtin_atomic_signal_fence (exp
);
6762 case BUILT_IN_OBJECT_SIZE
:
6763 return expand_builtin_object_size (exp
);
6765 case BUILT_IN_MEMCPY_CHK
:
6766 case BUILT_IN_MEMPCPY_CHK
:
6767 case BUILT_IN_MEMMOVE_CHK
:
6768 case BUILT_IN_MEMSET_CHK
:
6769 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6774 case BUILT_IN_STRCPY_CHK
:
6775 case BUILT_IN_STPCPY_CHK
:
6776 case BUILT_IN_STRNCPY_CHK
:
6777 case BUILT_IN_STPNCPY_CHK
:
6778 case BUILT_IN_STRCAT_CHK
:
6779 case BUILT_IN_STRNCAT_CHK
:
6780 case BUILT_IN_SNPRINTF_CHK
:
6781 case BUILT_IN_VSNPRINTF_CHK
:
6782 maybe_emit_chk_warning (exp
, fcode
);
6785 case BUILT_IN_SPRINTF_CHK
:
6786 case BUILT_IN_VSPRINTF_CHK
:
6787 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6791 if (warn_free_nonheap_object
)
6792 maybe_emit_free_warning (exp
);
6795 case BUILT_IN_THREAD_POINTER
:
6796 return expand_builtin_thread_pointer (exp
, target
);
6798 case BUILT_IN_SET_THREAD_POINTER
:
6799 expand_builtin_set_thread_pointer (exp
);
6802 default: /* just do library call, if unknown builtin */
6806 /* The switch statement above can drop through to cause the function
6807 to be called normally. */
6808 return expand_call (exp
, target
, ignore
);
6811 /* Determine whether a tree node represents a call to a built-in
6812 function. If the tree T is a call to a built-in function with
6813 the right number of arguments of the appropriate types, return
6814 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6815 Otherwise the return value is END_BUILTINS. */
6817 enum built_in_function
6818 builtin_mathfn_code (const_tree t
)
6820 const_tree fndecl
, arg
, parmlist
;
6821 const_tree argtype
, parmtype
;
6822 const_call_expr_arg_iterator iter
;
6824 if (TREE_CODE (t
) != CALL_EXPR
6825 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6826 return END_BUILTINS
;
6828 fndecl
= get_callee_fndecl (t
);
6829 if (fndecl
== NULL_TREE
6830 || TREE_CODE (fndecl
) != FUNCTION_DECL
6831 || ! DECL_BUILT_IN (fndecl
)
6832 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6833 return END_BUILTINS
;
6835 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6836 init_const_call_expr_arg_iterator (t
, &iter
);
6837 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6839 /* If a function doesn't take a variable number of arguments,
6840 the last element in the list will have type `void'. */
6841 parmtype
= TREE_VALUE (parmlist
);
6842 if (VOID_TYPE_P (parmtype
))
6844 if (more_const_call_expr_args_p (&iter
))
6845 return END_BUILTINS
;
6846 return DECL_FUNCTION_CODE (fndecl
);
6849 if (! more_const_call_expr_args_p (&iter
))
6850 return END_BUILTINS
;
6852 arg
= next_const_call_expr_arg (&iter
);
6853 argtype
= TREE_TYPE (arg
);
6855 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6857 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6858 return END_BUILTINS
;
6860 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6862 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6863 return END_BUILTINS
;
6865 else if (POINTER_TYPE_P (parmtype
))
6867 if (! POINTER_TYPE_P (argtype
))
6868 return END_BUILTINS
;
6870 else if (INTEGRAL_TYPE_P (parmtype
))
6872 if (! INTEGRAL_TYPE_P (argtype
))
6873 return END_BUILTINS
;
6876 return END_BUILTINS
;
6879 /* Variable-length argument list. */
6880 return DECL_FUNCTION_CODE (fndecl
);
6883 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6884 evaluate to a constant. */
6887 fold_builtin_constant_p (tree arg
)
6889 /* We return 1 for a numeric type that's known to be a constant
6890 value at compile-time or for an aggregate type that's a
6891 literal constant. */
6894 /* If we know this is a constant, emit the constant of one. */
6895 if (CONSTANT_CLASS_P (arg
)
6896 || (TREE_CODE (arg
) == CONSTRUCTOR
6897 && TREE_CONSTANT (arg
)))
6898 return integer_one_node
;
6899 if (TREE_CODE (arg
) == ADDR_EXPR
)
6901 tree op
= TREE_OPERAND (arg
, 0);
6902 if (TREE_CODE (op
) == STRING_CST
6903 || (TREE_CODE (op
) == ARRAY_REF
6904 && integer_zerop (TREE_OPERAND (op
, 1))
6905 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6906 return integer_one_node
;
6909 /* If this expression has side effects, show we don't know it to be a
6910 constant. Likewise if it's a pointer or aggregate type since in
6911 those case we only want literals, since those are only optimized
6912 when generating RTL, not later.
6913 And finally, if we are compiling an initializer, not code, we
6914 need to return a definite result now; there's not going to be any
6915 more optimization done. */
6916 if (TREE_SIDE_EFFECTS (arg
)
6917 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6918 || POINTER_TYPE_P (TREE_TYPE (arg
))
6920 || folding_initializer
6921 || force_folding_builtin_constant_p
)
6922 return integer_zero_node
;
6927 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6928 return it as a truthvalue. */
6931 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6933 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6935 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6936 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6937 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6938 pred_type
= TREE_VALUE (arg_types
);
6939 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6941 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6942 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6943 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6945 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6946 build_int_cst (ret_type
, 0));
6949 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6950 NULL_TREE if no simplification is possible. */
6953 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
6955 tree inner
, fndecl
, inner_arg0
;
6956 enum tree_code code
;
6958 /* Distribute the expected value over short-circuiting operators.
6959 See through the cast from truthvalue_type_node to long. */
6961 while (TREE_CODE (inner_arg0
) == NOP_EXPR
6962 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
6963 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
6964 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
6966 /* If this is a builtin_expect within a builtin_expect keep the
6967 inner one. See through a comparison against a constant. It
6968 might have been added to create a thruthvalue. */
6971 if (COMPARISON_CLASS_P (inner
)
6972 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6973 inner
= TREE_OPERAND (inner
, 0);
6975 if (TREE_CODE (inner
) == CALL_EXPR
6976 && (fndecl
= get_callee_fndecl (inner
))
6977 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6978 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6982 code
= TREE_CODE (inner
);
6983 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6985 tree op0
= TREE_OPERAND (inner
, 0);
6986 tree op1
= TREE_OPERAND (inner
, 1);
6988 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
6989 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
6990 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
6992 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
6995 /* If the argument isn't invariant then there's nothing else we can do. */
6996 if (!TREE_CONSTANT (inner_arg0
))
6999 /* If we expect that a comparison against the argument will fold to
7000 a constant return the constant. In practice, this means a true
7001 constant or the address of a non-weak symbol. */
7004 if (TREE_CODE (inner
) == ADDR_EXPR
)
7008 inner
= TREE_OPERAND (inner
, 0);
7010 while (TREE_CODE (inner
) == COMPONENT_REF
7011 || TREE_CODE (inner
) == ARRAY_REF
);
7012 if ((TREE_CODE (inner
) == VAR_DECL
7013 || TREE_CODE (inner
) == FUNCTION_DECL
)
7014 && DECL_WEAK (inner
))
7018 /* Otherwise, ARG0 already has the proper type for the return value. */
7022 /* Fold a call to __builtin_classify_type with argument ARG. */
7025 fold_builtin_classify_type (tree arg
)
7028 return build_int_cst (integer_type_node
, no_type_class
);
7030 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7033 /* Fold a call to __builtin_strlen with argument ARG. */
7036 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7038 if (!validate_arg (arg
, POINTER_TYPE
))
7042 tree len
= c_strlen (arg
, 0);
7045 return fold_convert_loc (loc
, type
, len
);
7051 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7054 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7056 REAL_VALUE_TYPE real
;
7058 /* __builtin_inff is intended to be usable to define INFINITY on all
7059 targets. If an infinity is not available, INFINITY expands "to a
7060 positive constant of type float that overflows at translation
7061 time", footnote "In this case, using INFINITY will violate the
7062 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7063 Thus we pedwarn to ensure this constraint violation is
7065 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7066 pedwarn (loc
, 0, "target format does not support infinity");
7069 return build_real (type
, real
);
7072 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7075 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7077 REAL_VALUE_TYPE real
;
7080 if (!validate_arg (arg
, POINTER_TYPE
))
7082 str
= c_getstr (arg
);
7086 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7089 return build_real (type
, real
);
7092 /* Return true if the floating point expression T has an integer value.
7093 We also allow +Inf, -Inf and NaN to be considered integer values. */
7096 integer_valued_real_p (tree t
)
7098 switch (TREE_CODE (t
))
7105 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7110 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7117 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7118 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7121 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7122 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7125 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7129 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7130 if (TREE_CODE (type
) == INTEGER_TYPE
)
7132 if (TREE_CODE (type
) == REAL_TYPE
)
7133 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7138 switch (builtin_mathfn_code (t
))
7140 CASE_FLT_FN (BUILT_IN_CEIL
):
7141 CASE_FLT_FN (BUILT_IN_FLOOR
):
7142 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7143 CASE_FLT_FN (BUILT_IN_RINT
):
7144 CASE_FLT_FN (BUILT_IN_ROUND
):
7145 CASE_FLT_FN (BUILT_IN_TRUNC
):
7148 CASE_FLT_FN (BUILT_IN_FMIN
):
7149 CASE_FLT_FN (BUILT_IN_FMAX
):
7150 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7151 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7164 /* FNDECL is assumed to be a builtin where truncation can be propagated
7165 across (for instance floor((double)f) == (double)floorf (f).
7166 Do the transformation for a call with argument ARG. */
7169 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7171 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7173 if (!validate_arg (arg
, REAL_TYPE
))
7176 /* Integer rounding functions are idempotent. */
7177 if (fcode
== builtin_mathfn_code (arg
))
7180 /* If argument is already integer valued, and we don't need to worry
7181 about setting errno, there's no need to perform rounding. */
7182 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7187 tree arg0
= strip_float_extensions (arg
);
7188 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7189 tree newtype
= TREE_TYPE (arg0
);
7192 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7193 && (decl
= mathfn_built_in (newtype
, fcode
)))
7194 return fold_convert_loc (loc
, ftype
,
7195 build_call_expr_loc (loc
, decl
, 1,
7196 fold_convert_loc (loc
,
7203 /* FNDECL is assumed to be builtin which can narrow the FP type of
7204 the argument, for instance lround((double)f) -> lroundf (f).
7205 Do the transformation for a call with argument ARG. */
7208 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7210 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7212 if (!validate_arg (arg
, REAL_TYPE
))
7215 /* If argument is already integer valued, and we don't need to worry
7216 about setting errno, there's no need to perform rounding. */
7217 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7218 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7219 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7223 tree ftype
= TREE_TYPE (arg
);
7224 tree arg0
= strip_float_extensions (arg
);
7225 tree newtype
= TREE_TYPE (arg0
);
7228 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7229 && (decl
= mathfn_built_in (newtype
, fcode
)))
7230 return build_call_expr_loc (loc
, decl
, 1,
7231 fold_convert_loc (loc
, newtype
, arg0
));
7234 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7235 sizeof (int) == sizeof (long). */
7236 if (TYPE_PRECISION (integer_type_node
)
7237 == TYPE_PRECISION (long_integer_type_node
))
7239 tree newfn
= NULL_TREE
;
7242 CASE_FLT_FN (BUILT_IN_ICEIL
):
7243 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7246 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7247 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7250 CASE_FLT_FN (BUILT_IN_IROUND
):
7251 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7254 CASE_FLT_FN (BUILT_IN_IRINT
):
7255 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7264 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7265 return fold_convert_loc (loc
,
7266 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7270 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7271 sizeof (long long) == sizeof (long). */
7272 if (TYPE_PRECISION (long_long_integer_type_node
)
7273 == TYPE_PRECISION (long_integer_type_node
))
7275 tree newfn
= NULL_TREE
;
7278 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7279 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7282 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7283 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7286 CASE_FLT_FN (BUILT_IN_LLROUND
):
7287 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7290 CASE_FLT_FN (BUILT_IN_LLRINT
):
7291 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7300 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7301 return fold_convert_loc (loc
,
7302 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7309 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7310 return type. Return NULL_TREE if no simplification can be made. */
7313 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7317 if (!validate_arg (arg
, COMPLEX_TYPE
)
7318 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7321 /* Calculate the result when the argument is a constant. */
7322 if (TREE_CODE (arg
) == COMPLEX_CST
7323 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7327 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7329 tree real
= TREE_OPERAND (arg
, 0);
7330 tree imag
= TREE_OPERAND (arg
, 1);
7332 /* If either part is zero, cabs is fabs of the other. */
7333 if (real_zerop (real
))
7334 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7335 if (real_zerop (imag
))
7336 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7338 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7339 if (flag_unsafe_math_optimizations
7340 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7342 const REAL_VALUE_TYPE sqrt2_trunc
7343 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7345 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7346 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7347 build_real (type
, sqrt2_trunc
));
7351 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7352 if (TREE_CODE (arg
) == NEGATE_EXPR
7353 || TREE_CODE (arg
) == CONJ_EXPR
)
7354 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7356 /* Don't do this when optimizing for size. */
7357 if (flag_unsafe_math_optimizations
7358 && optimize
&& optimize_function_for_speed_p (cfun
))
7360 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7362 if (sqrtfn
!= NULL_TREE
)
7364 tree rpart
, ipart
, result
;
7366 arg
= builtin_save_expr (arg
);
7368 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7369 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7371 rpart
= builtin_save_expr (rpart
);
7372 ipart
= builtin_save_expr (ipart
);
7374 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7375 fold_build2_loc (loc
, MULT_EXPR
, type
,
7377 fold_build2_loc (loc
, MULT_EXPR
, type
,
7380 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7387 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7388 complex tree type of the result. If NEG is true, the imaginary
7389 zero is negative. */
7392 build_complex_cproj (tree type
, bool neg
)
7394 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7398 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7399 build_real (TREE_TYPE (type
), rzero
));
7402 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7403 return type. Return NULL_TREE if no simplification can be made. */
7406 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7408 if (!validate_arg (arg
, COMPLEX_TYPE
)
7409 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7412 /* If there are no infinities, return arg. */
7413 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7414 return non_lvalue_loc (loc
, arg
);
7416 /* Calculate the result when the argument is a constant. */
7417 if (TREE_CODE (arg
) == COMPLEX_CST
)
7419 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7420 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7422 if (real_isinf (real
) || real_isinf (imag
))
7423 return build_complex_cproj (type
, imag
->sign
);
7427 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7429 tree real
= TREE_OPERAND (arg
, 0);
7430 tree imag
= TREE_OPERAND (arg
, 1);
7435 /* If the real part is inf and the imag part is known to be
7436 nonnegative, return (inf + 0i). Remember side-effects are
7437 possible in the imag part. */
7438 if (TREE_CODE (real
) == REAL_CST
7439 && real_isinf (TREE_REAL_CST_PTR (real
))
7440 && tree_expr_nonnegative_p (imag
))
7441 return omit_one_operand_loc (loc
, type
,
7442 build_complex_cproj (type
, false),
7445 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7446 Remember side-effects are possible in the real part. */
7447 if (TREE_CODE (imag
) == REAL_CST
7448 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7450 omit_one_operand_loc (loc
, type
,
7451 build_complex_cproj (type
, TREE_REAL_CST_PTR
7452 (imag
)->sign
), arg
);
7458 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7459 Return NULL_TREE if no simplification can be made. */
7462 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7465 enum built_in_function fcode
;
7468 if (!validate_arg (arg
, REAL_TYPE
))
7471 /* Calculate the result when the argument is a constant. */
7472 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7475 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7476 fcode
= builtin_mathfn_code (arg
);
7477 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7479 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7480 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7481 CALL_EXPR_ARG (arg
, 0),
7482 build_real (type
, dconsthalf
));
7483 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7486 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7487 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7489 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7493 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7495 /* The inner root was either sqrt or cbrt. */
7496 /* This was a conditional expression but it triggered a bug
7498 REAL_VALUE_TYPE dconstroot
;
7499 if (BUILTIN_SQRT_P (fcode
))
7500 dconstroot
= dconsthalf
;
7502 dconstroot
= dconst_third ();
7504 /* Adjust for the outer root. */
7505 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7506 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7507 tree_root
= build_real (type
, dconstroot
);
7508 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7512 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7513 if (flag_unsafe_math_optimizations
7514 && (fcode
== BUILT_IN_POW
7515 || fcode
== BUILT_IN_POWF
7516 || fcode
== BUILT_IN_POWL
))
7518 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7519 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7520 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7522 if (!tree_expr_nonnegative_p (arg0
))
7523 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7524 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7525 build_real (type
, dconsthalf
));
7526 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7532 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7533 Return NULL_TREE if no simplification can be made. */
7536 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7538 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7541 if (!validate_arg (arg
, REAL_TYPE
))
7544 /* Calculate the result when the argument is a constant. */
7545 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7548 if (flag_unsafe_math_optimizations
)
7550 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7551 if (BUILTIN_EXPONENT_P (fcode
))
7553 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7554 const REAL_VALUE_TYPE third_trunc
=
7555 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7556 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7557 CALL_EXPR_ARG (arg
, 0),
7558 build_real (type
, third_trunc
));
7559 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7562 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7563 if (BUILTIN_SQRT_P (fcode
))
7565 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7569 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7571 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7573 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7574 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7575 tree_root
= build_real (type
, dconstroot
);
7576 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7580 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7581 if (BUILTIN_CBRT_P (fcode
))
7583 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7584 if (tree_expr_nonnegative_p (arg0
))
7586 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7591 REAL_VALUE_TYPE dconstroot
;
7593 real_arithmetic (&dconstroot
, MULT_EXPR
,
7594 dconst_third_ptr (), dconst_third_ptr ());
7595 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7596 tree_root
= build_real (type
, dconstroot
);
7597 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7602 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7603 if (fcode
== BUILT_IN_POW
7604 || fcode
== BUILT_IN_POWF
7605 || fcode
== BUILT_IN_POWL
)
7607 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7608 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7609 if (tree_expr_nonnegative_p (arg00
))
7611 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7612 const REAL_VALUE_TYPE dconstroot
7613 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7614 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7615 build_real (type
, dconstroot
));
7616 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7623 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7624 TYPE is the type of the return value. Return NULL_TREE if no
7625 simplification can be made. */
7628 fold_builtin_cos (location_t loc
,
7629 tree arg
, tree type
, tree fndecl
)
7633 if (!validate_arg (arg
, REAL_TYPE
))
7636 /* Calculate the result when the argument is a constant. */
7637 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7640 /* Optimize cos(-x) into cos (x). */
7641 if ((narg
= fold_strip_sign_ops (arg
)))
7642 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7647 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7648 Return NULL_TREE if no simplification can be made. */
7651 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7653 if (validate_arg (arg
, REAL_TYPE
))
7657 /* Calculate the result when the argument is a constant. */
7658 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7661 /* Optimize cosh(-x) into cosh (x). */
7662 if ((narg
= fold_strip_sign_ops (arg
)))
7663 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7669 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7670 argument ARG. TYPE is the type of the return value. Return
7671 NULL_TREE if no simplification can be made. */
7674 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7677 if (validate_arg (arg
, COMPLEX_TYPE
)
7678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7682 /* Calculate the result when the argument is a constant. */
7683 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7686 /* Optimize fn(-x) into fn(x). */
7687 if ((tmp
= fold_strip_sign_ops (arg
)))
7688 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7694 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7695 Return NULL_TREE if no simplification can be made. */
7698 fold_builtin_tan (tree arg
, tree type
)
7700 enum built_in_function fcode
;
7703 if (!validate_arg (arg
, REAL_TYPE
))
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7710 /* Optimize tan(atan(x)) = x. */
7711 fcode
= builtin_mathfn_code (arg
);
7712 if (flag_unsafe_math_optimizations
7713 && (fcode
== BUILT_IN_ATAN
7714 || fcode
== BUILT_IN_ATANF
7715 || fcode
== BUILT_IN_ATANL
))
7716 return CALL_EXPR_ARG (arg
, 0);
7721 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7722 NULL_TREE if no simplification can be made. */
7725 fold_builtin_sincos (location_t loc
,
7726 tree arg0
, tree arg1
, tree arg2
)
7731 if (!validate_arg (arg0
, REAL_TYPE
)
7732 || !validate_arg (arg1
, POINTER_TYPE
)
7733 || !validate_arg (arg2
, POINTER_TYPE
))
7736 type
= TREE_TYPE (arg0
);
7738 /* Calculate the result when the argument is a constant. */
7739 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7742 /* Canonicalize sincos to cexpi. */
7743 if (!targetm
.libc_has_function (function_c99_math_complex
))
7745 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7749 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7750 call
= builtin_save_expr (call
);
7752 return build2 (COMPOUND_EXPR
, void_type_node
,
7753 build2 (MODIFY_EXPR
, void_type_node
,
7754 build_fold_indirect_ref_loc (loc
, arg1
),
7755 build1 (IMAGPART_EXPR
, type
, call
)),
7756 build2 (MODIFY_EXPR
, void_type_node
,
7757 build_fold_indirect_ref_loc (loc
, arg2
),
7758 build1 (REALPART_EXPR
, type
, call
)));
7761 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7762 NULL_TREE if no simplification can be made. */
7765 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7768 tree realp
, imagp
, ifn
;
7771 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7772 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7775 /* Calculate the result when the argument is a constant. */
7776 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7779 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7781 /* In case we can figure out the real part of arg0 and it is constant zero
7783 if (!targetm
.libc_has_function (function_c99_math_complex
))
7785 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7789 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7790 && real_zerop (realp
))
7792 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7793 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7796 /* In case we can easily decompose real and imaginary parts split cexp
7797 to exp (r) * cexpi (i). */
7798 if (flag_unsafe_math_optimizations
7801 tree rfn
, rcall
, icall
;
7803 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7807 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7811 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7812 icall
= builtin_save_expr (icall
);
7813 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7814 rcall
= builtin_save_expr (rcall
);
7815 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7816 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7818 fold_build1_loc (loc
, REALPART_EXPR
,
7820 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7822 fold_build1_loc (loc
, IMAGPART_EXPR
,
7829 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7830 Return NULL_TREE if no simplification can be made. */
7833 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7835 if (!validate_arg (arg
, REAL_TYPE
))
7838 /* Optimize trunc of constant value. */
7839 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7841 REAL_VALUE_TYPE r
, x
;
7842 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7844 x
= TREE_REAL_CST (arg
);
7845 real_trunc (&r
, TYPE_MODE (type
), &x
);
7846 return build_real (type
, r
);
7849 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7852 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7853 Return NULL_TREE if no simplification can be made. */
7856 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7858 if (!validate_arg (arg
, REAL_TYPE
))
7861 /* Optimize floor of constant value. */
7862 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7866 x
= TREE_REAL_CST (arg
);
7867 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7869 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7872 real_floor (&r
, TYPE_MODE (type
), &x
);
7873 return build_real (type
, r
);
7877 /* Fold floor (x) where x is nonnegative to trunc (x). */
7878 if (tree_expr_nonnegative_p (arg
))
7880 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7882 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7885 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7888 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7889 Return NULL_TREE if no simplification can be made. */
7892 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7894 if (!validate_arg (arg
, REAL_TYPE
))
7897 /* Optimize ceil of constant value. */
7898 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7902 x
= TREE_REAL_CST (arg
);
7903 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7905 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7908 real_ceil (&r
, TYPE_MODE (type
), &x
);
7909 return build_real (type
, r
);
7913 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7916 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7917 Return NULL_TREE if no simplification can be made. */
7920 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7922 if (!validate_arg (arg
, REAL_TYPE
))
7925 /* Optimize round of constant value. */
7926 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7930 x
= TREE_REAL_CST (arg
);
7931 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7933 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7936 real_round (&r
, TYPE_MODE (type
), &x
);
7937 return build_real (type
, r
);
7941 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7944 /* Fold function call to builtin lround, lroundf or lroundl (or the
7945 corresponding long long versions) and other rounding functions. ARG
7946 is the argument to the call. Return NULL_TREE if no simplification
7950 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7952 if (!validate_arg (arg
, REAL_TYPE
))
7955 /* Optimize lround of constant value. */
7956 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7958 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7960 if (real_isfinite (&x
))
7962 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7963 tree ftype
= TREE_TYPE (arg
);
7968 switch (DECL_FUNCTION_CODE (fndecl
))
7970 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7971 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7972 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7973 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7976 CASE_FLT_FN (BUILT_IN_ICEIL
):
7977 CASE_FLT_FN (BUILT_IN_LCEIL
):
7978 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7979 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7982 CASE_FLT_FN (BUILT_IN_IROUND
):
7983 CASE_FLT_FN (BUILT_IN_LROUND
):
7984 CASE_FLT_FN (BUILT_IN_LLROUND
):
7985 real_round (&r
, TYPE_MODE (ftype
), &x
);
7992 val
= real_to_integer (&r
, &fail
,
7993 TYPE_PRECISION (itype
));
7995 return wide_int_to_tree (itype
, val
);
7999 switch (DECL_FUNCTION_CODE (fndecl
))
8001 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8002 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8003 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8004 if (tree_expr_nonnegative_p (arg
))
8005 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8006 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8011 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8014 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8015 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8016 the argument to the call. Return NULL_TREE if no simplification can
8020 fold_builtin_bitop (tree fndecl
, tree arg
)
8022 if (!validate_arg (arg
, INTEGER_TYPE
))
8025 /* Optimize for constant argument. */
8026 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8028 wide_int warg
= arg
;
8031 switch (DECL_FUNCTION_CODE (fndecl
))
8033 CASE_INT_FN (BUILT_IN_FFS
):
8034 result
= wi::ffs (warg
);
8037 CASE_INT_FN (BUILT_IN_CLZ
):
8038 result
= wi::clz (warg
);
8041 CASE_INT_FN (BUILT_IN_CTZ
):
8042 result
= wi::ctz (warg
);
8045 CASE_INT_FN (BUILT_IN_CLRSB
):
8046 result
= wi::clrsb (warg
);
8049 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8050 result
= wi::popcount (warg
);
8053 CASE_INT_FN (BUILT_IN_PARITY
):
8054 result
= wi::parity (warg
);
8061 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8067 /* Fold function call to builtin_bswap and the short, long and long long
8068 variants. Return NULL_TREE if no simplification can be made. */
8070 fold_builtin_bswap (tree fndecl
, tree arg
)
8072 if (! validate_arg (arg
, INTEGER_TYPE
))
8075 /* Optimize constant value. */
8076 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8078 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8080 switch (DECL_FUNCTION_CODE (fndecl
))
8082 case BUILT_IN_BSWAP16
:
8083 case BUILT_IN_BSWAP32
:
8084 case BUILT_IN_BSWAP64
:
8086 signop sgn
= TYPE_SIGN (type
);
8088 wide_int_to_tree (type
,
8089 wide_int::from (arg
, TYPE_PRECISION (type
),
8101 /* A subroutine of fold_builtin to fold the various logarithmic
8102 functions. Return NULL_TREE if no simplification can me made.
8103 FUNC is the corresponding MPFR logarithm function. */
8106 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8107 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8109 if (validate_arg (arg
, REAL_TYPE
))
8111 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8113 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8115 /* Calculate the result when the argument is a constant. */
8116 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8119 /* Special case, optimize logN(expN(x)) = x. */
8120 if (flag_unsafe_math_optimizations
8121 && ((func
== mpfr_log
8122 && (fcode
== BUILT_IN_EXP
8123 || fcode
== BUILT_IN_EXPF
8124 || fcode
== BUILT_IN_EXPL
))
8125 || (func
== mpfr_log2
8126 && (fcode
== BUILT_IN_EXP2
8127 || fcode
== BUILT_IN_EXP2F
8128 || fcode
== BUILT_IN_EXP2L
))
8129 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8130 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8132 /* Optimize logN(func()) for various exponential functions. We
8133 want to determine the value "x" and the power "exponent" in
8134 order to transform logN(x**exponent) into exponent*logN(x). */
8135 if (flag_unsafe_math_optimizations
)
8137 tree exponent
= 0, x
= 0;
8141 CASE_FLT_FN (BUILT_IN_EXP
):
8142 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8143 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8145 exponent
= CALL_EXPR_ARG (arg
, 0);
8147 CASE_FLT_FN (BUILT_IN_EXP2
):
8148 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8149 x
= build_real (type
, dconst2
);
8150 exponent
= CALL_EXPR_ARG (arg
, 0);
8152 CASE_FLT_FN (BUILT_IN_EXP10
):
8153 CASE_FLT_FN (BUILT_IN_POW10
):
8154 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8156 REAL_VALUE_TYPE dconst10
;
8157 real_from_integer (&dconst10
, VOIDmode
, 10, SIGNED
);
8158 x
= build_real (type
, dconst10
);
8160 exponent
= CALL_EXPR_ARG (arg
, 0);
8162 CASE_FLT_FN (BUILT_IN_SQRT
):
8163 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8164 x
= CALL_EXPR_ARG (arg
, 0);
8165 exponent
= build_real (type
, dconsthalf
);
8167 CASE_FLT_FN (BUILT_IN_CBRT
):
8168 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8169 x
= CALL_EXPR_ARG (arg
, 0);
8170 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8173 CASE_FLT_FN (BUILT_IN_POW
):
8174 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8175 x
= CALL_EXPR_ARG (arg
, 0);
8176 exponent
= CALL_EXPR_ARG (arg
, 1);
8182 /* Now perform the optimization. */
8185 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8186 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8194 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8195 NULL_TREE if no simplification can be made. */
8198 fold_builtin_hypot (location_t loc
, tree fndecl
,
8199 tree arg0
, tree arg1
, tree type
)
8201 tree res
, narg0
, narg1
;
8203 if (!validate_arg (arg0
, REAL_TYPE
)
8204 || !validate_arg (arg1
, REAL_TYPE
))
8207 /* Calculate the result when the argument is a constant. */
8208 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8211 /* If either argument to hypot has a negate or abs, strip that off.
8212 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8213 narg0
= fold_strip_sign_ops (arg0
);
8214 narg1
= fold_strip_sign_ops (arg1
);
8217 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8218 narg1
? narg1
: arg1
);
8221 /* If either argument is zero, hypot is fabs of the other. */
8222 if (real_zerop (arg0
))
8223 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8224 else if (real_zerop (arg1
))
8225 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8227 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8228 if (flag_unsafe_math_optimizations
8229 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8231 const REAL_VALUE_TYPE sqrt2_trunc
8232 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8233 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8234 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8235 build_real (type
, sqrt2_trunc
));
8242 /* Fold a builtin function call to pow, powf, or powl. Return
8243 NULL_TREE if no simplification can be made. */
8245 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8249 if (!validate_arg (arg0
, REAL_TYPE
)
8250 || !validate_arg (arg1
, REAL_TYPE
))
8253 /* Calculate the result when the argument is a constant. */
8254 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8257 /* Optimize pow(1.0,y) = 1.0. */
8258 if (real_onep (arg0
))
8259 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8261 if (TREE_CODE (arg1
) == REAL_CST
8262 && !TREE_OVERFLOW (arg1
))
8264 REAL_VALUE_TYPE cint
;
8268 c
= TREE_REAL_CST (arg1
);
8270 /* Optimize pow(x,0.0) = 1.0. */
8271 if (REAL_VALUES_EQUAL (c
, dconst0
))
8272 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8275 /* Optimize pow(x,1.0) = x. */
8276 if (REAL_VALUES_EQUAL (c
, dconst1
))
8279 /* Optimize pow(x,-1.0) = 1.0/x. */
8280 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8281 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8282 build_real (type
, dconst1
), arg0
);
8284 /* Optimize pow(x,0.5) = sqrt(x). */
8285 if (flag_unsafe_math_optimizations
8286 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8288 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8290 if (sqrtfn
!= NULL_TREE
)
8291 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8294 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8295 if (flag_unsafe_math_optimizations
)
8297 const REAL_VALUE_TYPE dconstroot
8298 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8300 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8302 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8303 if (cbrtfn
!= NULL_TREE
)
8304 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8308 /* Check for an integer exponent. */
8309 n
= real_to_integer (&c
);
8310 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8311 if (real_identical (&c
, &cint
))
8313 /* Attempt to evaluate pow at compile-time, unless this should
8314 raise an exception. */
8315 if (TREE_CODE (arg0
) == REAL_CST
8316 && !TREE_OVERFLOW (arg0
)
8318 || (!flag_trapping_math
&& !flag_errno_math
)
8319 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8324 x
= TREE_REAL_CST (arg0
);
8325 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8326 if (flag_unsafe_math_optimizations
|| !inexact
)
8327 return build_real (type
, x
);
8330 /* Strip sign ops from even integer powers. */
8331 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8333 tree narg0
= fold_strip_sign_ops (arg0
);
8335 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8340 if (flag_unsafe_math_optimizations
)
8342 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8344 /* Optimize pow(expN(x),y) = expN(x*y). */
8345 if (BUILTIN_EXPONENT_P (fcode
))
8347 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8348 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8349 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8350 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8353 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8354 if (BUILTIN_SQRT_P (fcode
))
8356 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8357 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8358 build_real (type
, dconsthalf
));
8359 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8362 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8363 if (BUILTIN_CBRT_P (fcode
))
8365 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8366 if (tree_expr_nonnegative_p (arg
))
8368 const REAL_VALUE_TYPE dconstroot
8369 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8370 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8371 build_real (type
, dconstroot
));
8372 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8376 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8377 if (fcode
== BUILT_IN_POW
8378 || fcode
== BUILT_IN_POWF
8379 || fcode
== BUILT_IN_POWL
)
8381 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8382 if (tree_expr_nonnegative_p (arg00
))
8384 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8385 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8386 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8394 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8395 Return NULL_TREE if no simplification can be made. */
8397 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8398 tree arg0
, tree arg1
, tree type
)
8400 if (!validate_arg (arg0
, REAL_TYPE
)
8401 || !validate_arg (arg1
, INTEGER_TYPE
))
8404 /* Optimize pow(1.0,y) = 1.0. */
8405 if (real_onep (arg0
))
8406 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8408 if (tree_fits_shwi_p (arg1
))
8410 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8412 /* Evaluate powi at compile-time. */
8413 if (TREE_CODE (arg0
) == REAL_CST
8414 && !TREE_OVERFLOW (arg0
))
8417 x
= TREE_REAL_CST (arg0
);
8418 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8419 return build_real (type
, x
);
8422 /* Optimize pow(x,0) = 1.0. */
8424 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8427 /* Optimize pow(x,1) = x. */
8431 /* Optimize pow(x,-1) = 1.0/x. */
8433 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8434 build_real (type
, dconst1
), arg0
);
8440 /* A subroutine of fold_builtin to fold the various exponent
8441 functions. Return NULL_TREE if no simplification can be made.
8442 FUNC is the corresponding MPFR exponent function. */
8445 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8446 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8448 if (validate_arg (arg
, REAL_TYPE
))
8450 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8453 /* Calculate the result when the argument is a constant. */
8454 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8457 /* Optimize expN(logN(x)) = x. */
8458 if (flag_unsafe_math_optimizations
)
8460 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8462 if ((func
== mpfr_exp
8463 && (fcode
== BUILT_IN_LOG
8464 || fcode
== BUILT_IN_LOGF
8465 || fcode
== BUILT_IN_LOGL
))
8466 || (func
== mpfr_exp2
8467 && (fcode
== BUILT_IN_LOG2
8468 || fcode
== BUILT_IN_LOG2F
8469 || fcode
== BUILT_IN_LOG2L
))
8470 || (func
== mpfr_exp10
8471 && (fcode
== BUILT_IN_LOG10
8472 || fcode
== BUILT_IN_LOG10F
8473 || fcode
== BUILT_IN_LOG10L
)))
8474 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8481 /* Return true if VAR is a VAR_DECL or a component thereof. */
8484 var_decl_component_p (tree var
)
8487 while (handled_component_p (inner
))
8488 inner
= TREE_OPERAND (inner
, 0);
8489 return SSA_VAR_P (inner
);
8492 /* Fold function call to builtin memset. Return
8493 NULL_TREE if no simplification can be made. */
8496 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8497 tree type
, bool ignore
)
8499 tree var
, ret
, etype
;
8500 unsigned HOST_WIDE_INT length
, cval
;
8502 if (! validate_arg (dest
, POINTER_TYPE
)
8503 || ! validate_arg (c
, INTEGER_TYPE
)
8504 || ! validate_arg (len
, INTEGER_TYPE
))
8507 if (! tree_fits_uhwi_p (len
))
8510 /* If the LEN parameter is zero, return DEST. */
8511 if (integer_zerop (len
))
8512 return omit_one_operand_loc (loc
, type
, dest
, c
);
8514 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8519 if (TREE_CODE (var
) != ADDR_EXPR
)
8522 var
= TREE_OPERAND (var
, 0);
8523 if (TREE_THIS_VOLATILE (var
))
8526 etype
= TREE_TYPE (var
);
8527 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8528 etype
= TREE_TYPE (etype
);
8530 if (!INTEGRAL_TYPE_P (etype
)
8531 && !POINTER_TYPE_P (etype
))
8534 if (! var_decl_component_p (var
))
8537 length
= tree_to_uhwi (len
);
8538 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8539 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8542 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8545 if (integer_zerop (c
))
8549 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8552 cval
= tree_to_hwi (c
);
8556 cval
|= (cval
<< 31) << 1;
8559 ret
= build_int_cst_type (etype
, cval
);
8560 var
= build_fold_indirect_ref_loc (loc
,
8561 fold_convert_loc (loc
,
8562 build_pointer_type (etype
),
8564 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8568 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8571 /* Fold function call to builtin memset. Return
8572 NULL_TREE if no simplification can be made. */
8575 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8577 if (! validate_arg (dest
, POINTER_TYPE
)
8578 || ! validate_arg (size
, INTEGER_TYPE
))
8584 /* New argument list transforming bzero(ptr x, int y) to
8585 memset(ptr x, int 0, size_t y). This is done this way
8586 so that if it isn't expanded inline, we fallback to
8587 calling bzero instead of memset. */
8589 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8590 fold_convert_loc (loc
, size_type_node
, size
),
8591 void_type_node
, ignore
);
8594 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8595 NULL_TREE if no simplification can be made.
8596 If ENDP is 0, return DEST (like memcpy).
8597 If ENDP is 1, return DEST+LEN (like mempcpy).
8598 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8599 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8603 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8604 tree len
, tree type
, bool ignore
, int endp
)
8606 tree destvar
, srcvar
, expr
;
8608 if (! validate_arg (dest
, POINTER_TYPE
)
8609 || ! validate_arg (src
, POINTER_TYPE
)
8610 || ! validate_arg (len
, INTEGER_TYPE
))
8613 /* If the LEN parameter is zero, return DEST. */
8614 if (integer_zerop (len
))
8615 return omit_one_operand_loc (loc
, type
, dest
, src
);
8617 /* If SRC and DEST are the same (and not volatile), return
8618 DEST{,+LEN,+LEN-1}. */
8619 if (operand_equal_p (src
, dest
, 0))
8623 tree srctype
, desttype
;
8624 unsigned int src_align
, dest_align
;
8629 src_align
= get_pointer_alignment (src
);
8630 dest_align
= get_pointer_alignment (dest
);
8632 /* Both DEST and SRC must be pointer types.
8633 ??? This is what old code did. Is the testing for pointer types
8636 If either SRC is readonly or length is 1, we can use memcpy. */
8637 if (!dest_align
|| !src_align
)
8639 if (readonly_data_expr (src
)
8640 || (tree_fits_uhwi_p (len
)
8641 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8642 >= (unsigned HOST_WIDE_INT
) tree_to_uhwi (len
))))
8644 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8647 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8650 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8651 if (TREE_CODE (src
) == ADDR_EXPR
8652 && TREE_CODE (dest
) == ADDR_EXPR
)
8654 tree src_base
, dest_base
, fn
;
8655 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8656 HOST_WIDE_INT size
= -1;
8657 HOST_WIDE_INT maxsize
= -1;
8659 srcvar
= TREE_OPERAND (src
, 0);
8660 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8662 destvar
= TREE_OPERAND (dest
, 0);
8663 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8665 if (tree_fits_uhwi_p (len
))
8666 maxsize
= tree_to_uhwi (len
);
8669 src_offset
/= BITS_PER_UNIT
;
8670 dest_offset
/= BITS_PER_UNIT
;
8671 if (SSA_VAR_P (src_base
)
8672 && SSA_VAR_P (dest_base
))
8674 if (operand_equal_p (src_base
, dest_base
, 0)
8675 && ranges_overlap_p (src_offset
, maxsize
,
8676 dest_offset
, maxsize
))
8679 else if (TREE_CODE (src_base
) == MEM_REF
8680 && TREE_CODE (dest_base
) == MEM_REF
)
8683 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8684 TREE_OPERAND (dest_base
, 0), 0))
8686 off
= mem_ref_offset (src_base
) + src_offset
;
8687 if (!wi::fits_shwi_p (off
))
8689 src_offset
= off
.to_shwi ();
8691 off
= mem_ref_offset (dest_base
) + dest_offset
;
8692 if (!wi::fits_shwi_p (off
))
8694 dest_offset
= off
.to_shwi ();
8695 if (ranges_overlap_p (src_offset
, maxsize
,
8696 dest_offset
, maxsize
))
8702 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8705 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8708 /* If the destination and source do not alias optimize into
8710 if ((is_gimple_min_invariant (dest
)
8711 || TREE_CODE (dest
) == SSA_NAME
)
8712 && (is_gimple_min_invariant (src
)
8713 || TREE_CODE (src
) == SSA_NAME
))
8716 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8717 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8718 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8721 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8724 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8731 if (!tree_fits_shwi_p (len
))
8734 This logic lose for arguments like (type *)malloc (sizeof (type)),
8735 since we strip the casts of up to VOID return value from malloc.
8736 Perhaps we ought to inherit type from non-VOID argument here? */
8739 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8740 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8742 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8743 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8745 tree tem
= TREE_OPERAND (src
, 0);
8747 if (tem
!= TREE_OPERAND (src
, 0))
8748 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8750 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8752 tree tem
= TREE_OPERAND (dest
, 0);
8754 if (tem
!= TREE_OPERAND (dest
, 0))
8755 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8757 srctype
= TREE_TYPE (TREE_TYPE (src
));
8758 if (TREE_CODE (srctype
) == ARRAY_TYPE
8759 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8761 srctype
= TREE_TYPE (srctype
);
8763 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8765 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8766 if (TREE_CODE (desttype
) == ARRAY_TYPE
8767 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8769 desttype
= TREE_TYPE (desttype
);
8771 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8773 if (TREE_ADDRESSABLE (srctype
)
8774 || TREE_ADDRESSABLE (desttype
))
8777 src_align
= get_pointer_alignment (src
);
8778 dest_align
= get_pointer_alignment (dest
);
8779 if (dest_align
< TYPE_ALIGN (desttype
)
8780 || src_align
< TYPE_ALIGN (srctype
))
8784 dest
= builtin_save_expr (dest
);
8786 /* Build accesses at offset zero with a ref-all character type. */
8787 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8788 ptr_mode
, true), 0);
8791 STRIP_NOPS (destvar
);
8792 if (TREE_CODE (destvar
) == ADDR_EXPR
8793 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8794 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8795 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8797 destvar
= NULL_TREE
;
8800 STRIP_NOPS (srcvar
);
8801 if (TREE_CODE (srcvar
) == ADDR_EXPR
8802 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8803 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8806 || src_align
>= TYPE_ALIGN (desttype
))
8807 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8809 else if (!STRICT_ALIGNMENT
)
8811 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8813 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8821 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8824 if (srcvar
== NULL_TREE
)
8827 if (src_align
>= TYPE_ALIGN (desttype
))
8828 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8831 if (STRICT_ALIGNMENT
)
8833 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8835 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8838 else if (destvar
== NULL_TREE
)
8841 if (dest_align
>= TYPE_ALIGN (srctype
))
8842 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8845 if (STRICT_ALIGNMENT
)
8847 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
8849 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
8853 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8859 if (endp
== 0 || endp
== 3)
8860 return omit_one_operand_loc (loc
, type
, dest
, expr
);
8866 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
8869 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8870 dest
= fold_convert_loc (loc
, type
, dest
);
8872 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
8876 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8877 If LEN is not NULL, it represents the length of the string to be
8878 copied. Return NULL_TREE if no simplification can be made. */
8881 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
8885 if (!validate_arg (dest
, POINTER_TYPE
)
8886 || !validate_arg (src
, POINTER_TYPE
))
8889 /* If SRC and DEST are the same (and not volatile), return DEST. */
8890 if (operand_equal_p (src
, dest
, 0))
8891 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8893 if (optimize_function_for_size_p (cfun
))
8896 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8902 len
= c_strlen (src
, 1);
8903 if (! len
|| TREE_SIDE_EFFECTS (len
))
8907 len
= fold_convert_loc (loc
, size_type_node
, len
);
8908 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
8909 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8910 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8913 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8914 Return NULL_TREE if no simplification can be made. */
8917 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8919 tree fn
, len
, lenp1
, call
, type
;
8921 if (!validate_arg (dest
, POINTER_TYPE
)
8922 || !validate_arg (src
, POINTER_TYPE
))
8925 len
= c_strlen (src
, 1);
8927 || TREE_CODE (len
) != INTEGER_CST
)
8930 if (optimize_function_for_size_p (cfun
)
8931 /* If length is zero it's small enough. */
8932 && !integer_zerop (len
))
8935 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8939 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
8940 fold_convert_loc (loc
, size_type_node
, len
),
8941 build_int_cst (size_type_node
, 1));
8942 /* We use dest twice in building our expression. Save it from
8943 multiple expansions. */
8944 dest
= builtin_save_expr (dest
);
8945 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8947 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8948 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8949 dest
= fold_convert_loc (loc
, type
, dest
);
8950 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8954 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8955 If SLEN is not NULL, it represents the length of the source string.
8956 Return NULL_TREE if no simplification can be made. */
8959 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
8960 tree src
, tree len
, tree slen
)
8964 if (!validate_arg (dest
, POINTER_TYPE
)
8965 || !validate_arg (src
, POINTER_TYPE
)
8966 || !validate_arg (len
, INTEGER_TYPE
))
8969 /* If the LEN parameter is zero, return DEST. */
8970 if (integer_zerop (len
))
8971 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8973 /* We can't compare slen with len as constants below if len is not a
8975 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8979 slen
= c_strlen (src
, 1);
8981 /* Now, we must be passed a constant src ptr parameter. */
8982 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8985 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
8987 /* We do not support simplification of this case, though we do
8988 support it when expanding trees into RTL. */
8989 /* FIXME: generate a call to __builtin_memset. */
8990 if (tree_int_cst_lt (slen
, len
))
8993 /* OK transform into builtin memcpy. */
8994 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8998 len
= fold_convert_loc (loc
, size_type_node
, len
);
8999 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9000 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9003 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9004 arguments to the call, and TYPE is its return type.
9005 Return NULL_TREE if no simplification can be made. */
9008 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9010 if (!validate_arg (arg1
, POINTER_TYPE
)
9011 || !validate_arg (arg2
, INTEGER_TYPE
)
9012 || !validate_arg (len
, INTEGER_TYPE
))
9018 if (TREE_CODE (arg2
) != INTEGER_CST
9019 || !tree_fits_uhwi_p (len
))
9022 p1
= c_getstr (arg1
);
9023 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9029 if (target_char_cast (arg2
, &c
))
9032 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
9035 return build_int_cst (TREE_TYPE (arg1
), 0);
9037 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9038 return fold_convert_loc (loc
, type
, tem
);
9044 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9045 Return NULL_TREE if no simplification can be made. */
9048 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9050 const char *p1
, *p2
;
9052 if (!validate_arg (arg1
, POINTER_TYPE
)
9053 || !validate_arg (arg2
, POINTER_TYPE
)
9054 || !validate_arg (len
, INTEGER_TYPE
))
9057 /* If the LEN parameter is zero, return zero. */
9058 if (integer_zerop (len
))
9059 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9062 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9063 if (operand_equal_p (arg1
, arg2
, 0))
9064 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9066 p1
= c_getstr (arg1
);
9067 p2
= c_getstr (arg2
);
9069 /* If all arguments are constant, and the value of len is not greater
9070 than the lengths of arg1 and arg2, evaluate at compile-time. */
9071 if (tree_fits_uhwi_p (len
) && p1
&& p2
9072 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9073 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9075 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
9078 return integer_one_node
;
9080 return integer_minus_one_node
;
9082 return integer_zero_node
;
9085 /* If len parameter is one, return an expression corresponding to
9086 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9087 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
9089 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9090 tree cst_uchar_ptr_node
9091 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9094 = fold_convert_loc (loc
, integer_type_node
,
9095 build1 (INDIRECT_REF
, cst_uchar_node
,
9096 fold_convert_loc (loc
,
9100 = fold_convert_loc (loc
, integer_type_node
,
9101 build1 (INDIRECT_REF
, cst_uchar_node
,
9102 fold_convert_loc (loc
,
9105 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9111 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9112 Return NULL_TREE if no simplification can be made. */
9115 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9117 const char *p1
, *p2
;
9119 if (!validate_arg (arg1
, POINTER_TYPE
)
9120 || !validate_arg (arg2
, POINTER_TYPE
))
9123 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9124 if (operand_equal_p (arg1
, arg2
, 0))
9125 return integer_zero_node
;
9127 p1
= c_getstr (arg1
);
9128 p2
= c_getstr (arg2
);
9132 const int i
= strcmp (p1
, p2
);
9134 return integer_minus_one_node
;
9136 return integer_one_node
;
9138 return integer_zero_node
;
9141 /* If the second arg is "", return *(const unsigned char*)arg1. */
9142 if (p2
&& *p2
== '\0')
9144 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9145 tree cst_uchar_ptr_node
9146 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9148 return fold_convert_loc (loc
, integer_type_node
,
9149 build1 (INDIRECT_REF
, cst_uchar_node
,
9150 fold_convert_loc (loc
,
9155 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9156 if (p1
&& *p1
== '\0')
9158 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9159 tree cst_uchar_ptr_node
9160 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9163 = fold_convert_loc (loc
, integer_type_node
,
9164 build1 (INDIRECT_REF
, cst_uchar_node
,
9165 fold_convert_loc (loc
,
9168 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9174 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9175 Return NULL_TREE if no simplification can be made. */
9178 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9180 const char *p1
, *p2
;
9182 if (!validate_arg (arg1
, POINTER_TYPE
)
9183 || !validate_arg (arg2
, POINTER_TYPE
)
9184 || !validate_arg (len
, INTEGER_TYPE
))
9187 /* If the LEN parameter is zero, return zero. */
9188 if (integer_zerop (len
))
9189 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9192 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9193 if (operand_equal_p (arg1
, arg2
, 0))
9194 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9196 p1
= c_getstr (arg1
);
9197 p2
= c_getstr (arg2
);
9199 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
9201 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
9203 return integer_one_node
;
9205 return integer_minus_one_node
;
9207 return integer_zero_node
;
9210 /* If the second arg is "", and the length is greater than zero,
9211 return *(const unsigned char*)arg1. */
9212 if (p2
&& *p2
== '\0'
9213 && TREE_CODE (len
) == INTEGER_CST
9214 && tree_int_cst_sgn (len
) == 1)
9216 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9217 tree cst_uchar_ptr_node
9218 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9220 return fold_convert_loc (loc
, integer_type_node
,
9221 build1 (INDIRECT_REF
, cst_uchar_node
,
9222 fold_convert_loc (loc
,
9227 /* If the first arg is "", and the length is greater than zero,
9228 return -*(const unsigned char*)arg2. */
9229 if (p1
&& *p1
== '\0'
9230 && TREE_CODE (len
) == INTEGER_CST
9231 && tree_int_cst_sgn (len
) == 1)
9233 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9234 tree cst_uchar_ptr_node
9235 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9237 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9238 build1 (INDIRECT_REF
, cst_uchar_node
,
9239 fold_convert_loc (loc
,
9242 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9245 /* If len parameter is one, return an expression corresponding to
9246 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9247 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
9249 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9250 tree cst_uchar_ptr_node
9251 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9253 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9254 build1 (INDIRECT_REF
, cst_uchar_node
,
9255 fold_convert_loc (loc
,
9258 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9259 build1 (INDIRECT_REF
, cst_uchar_node
,
9260 fold_convert_loc (loc
,
9263 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9269 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9270 ARG. Return NULL_TREE if no simplification can be made. */
9273 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9275 if (!validate_arg (arg
, REAL_TYPE
))
9278 /* If ARG is a compile-time constant, determine the result. */
9279 if (TREE_CODE (arg
) == REAL_CST
9280 && !TREE_OVERFLOW (arg
))
9284 c
= TREE_REAL_CST (arg
);
9285 return (REAL_VALUE_NEGATIVE (c
)
9286 ? build_one_cst (type
)
9287 : build_zero_cst (type
));
9290 /* If ARG is non-negative, the result is always zero. */
9291 if (tree_expr_nonnegative_p (arg
))
9292 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9294 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9295 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9296 return fold_convert (type
,
9297 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9298 build_real (TREE_TYPE (arg
), dconst0
)));
9303 /* Fold function call to builtin copysign, copysignf or copysignl with
9304 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9308 fold_builtin_copysign (location_t loc
, tree fndecl
,
9309 tree arg1
, tree arg2
, tree type
)
9313 if (!validate_arg (arg1
, REAL_TYPE
)
9314 || !validate_arg (arg2
, REAL_TYPE
))
9317 /* copysign(X,X) is X. */
9318 if (operand_equal_p (arg1
, arg2
, 0))
9319 return fold_convert_loc (loc
, type
, arg1
);
9321 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9322 if (TREE_CODE (arg1
) == REAL_CST
9323 && TREE_CODE (arg2
) == REAL_CST
9324 && !TREE_OVERFLOW (arg1
)
9325 && !TREE_OVERFLOW (arg2
))
9327 REAL_VALUE_TYPE c1
, c2
;
9329 c1
= TREE_REAL_CST (arg1
);
9330 c2
= TREE_REAL_CST (arg2
);
9331 /* c1.sign := c2.sign. */
9332 real_copysign (&c1
, &c2
);
9333 return build_real (type
, c1
);
9336 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9337 Remember to evaluate Y for side-effects. */
9338 if (tree_expr_nonnegative_p (arg2
))
9339 return omit_one_operand_loc (loc
, type
,
9340 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9343 /* Strip sign changing operations for the first argument. */
9344 tem
= fold_strip_sign_ops (arg1
);
9346 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9351 /* Fold a call to builtin isascii with argument ARG. */
9354 fold_builtin_isascii (location_t loc
, tree arg
)
9356 if (!validate_arg (arg
, INTEGER_TYPE
))
9360 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9361 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9362 build_int_cst (integer_type_node
,
9363 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9364 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9365 arg
, integer_zero_node
);
9369 /* Fold a call to builtin toascii with argument ARG. */
9372 fold_builtin_toascii (location_t loc
, tree arg
)
9374 if (!validate_arg (arg
, INTEGER_TYPE
))
9377 /* Transform toascii(c) -> (c & 0x7f). */
9378 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9379 build_int_cst (integer_type_node
, 0x7f));
9382 /* Fold a call to builtin isdigit with argument ARG. */
9385 fold_builtin_isdigit (location_t loc
, tree arg
)
9387 if (!validate_arg (arg
, INTEGER_TYPE
))
9391 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9392 /* According to the C standard, isdigit is unaffected by locale.
9393 However, it definitely is affected by the target character set. */
9394 unsigned HOST_WIDE_INT target_digit0
9395 = lang_hooks
.to_target_charset ('0');
9397 if (target_digit0
== 0)
9400 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9401 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9402 build_int_cst (unsigned_type_node
, target_digit0
));
9403 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9404 build_int_cst (unsigned_type_node
, 9));
9408 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9411 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9413 if (!validate_arg (arg
, REAL_TYPE
))
9416 arg
= fold_convert_loc (loc
, type
, arg
);
9417 if (TREE_CODE (arg
) == REAL_CST
)
9418 return fold_abs_const (arg
, type
);
9419 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9422 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9425 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9427 if (!validate_arg (arg
, INTEGER_TYPE
))
9430 arg
= fold_convert_loc (loc
, type
, arg
);
9431 if (TREE_CODE (arg
) == INTEGER_CST
)
9432 return fold_abs_const (arg
, type
);
9433 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9436 /* Fold a fma operation with arguments ARG[012]. */
9439 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9440 tree type
, tree arg0
, tree arg1
, tree arg2
)
9442 if (TREE_CODE (arg0
) == REAL_CST
9443 && TREE_CODE (arg1
) == REAL_CST
9444 && TREE_CODE (arg2
) == REAL_CST
)
9445 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9450 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9453 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9455 if (validate_arg (arg0
, REAL_TYPE
)
9456 && validate_arg (arg1
, REAL_TYPE
)
9457 && validate_arg (arg2
, REAL_TYPE
))
9459 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9463 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9464 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9465 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9470 /* Fold a call to builtin fmin or fmax. */
9473 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9474 tree type
, bool max
)
9476 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9478 /* Calculate the result when the argument is a constant. */
9479 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9484 /* If either argument is NaN, return the other one. Avoid the
9485 transformation if we get (and honor) a signalling NaN. Using
9486 omit_one_operand() ensures we create a non-lvalue. */
9487 if (TREE_CODE (arg0
) == REAL_CST
9488 && real_isnan (&TREE_REAL_CST (arg0
))
9489 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9490 || ! TREE_REAL_CST (arg0
).signalling
))
9491 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9492 if (TREE_CODE (arg1
) == REAL_CST
9493 && real_isnan (&TREE_REAL_CST (arg1
))
9494 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9495 || ! TREE_REAL_CST (arg1
).signalling
))
9496 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9498 /* Transform fmin/fmax(x,x) -> x. */
9499 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9500 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9502 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9503 functions to return the numeric arg if the other one is NaN.
9504 These tree codes don't honor that, so only transform if
9505 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9506 handled, so we don't have to worry about it either. */
9507 if (flag_finite_math_only
)
9508 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9509 fold_convert_loc (loc
, type
, arg0
),
9510 fold_convert_loc (loc
, type
, arg1
));
9515 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9518 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9520 if (validate_arg (arg
, COMPLEX_TYPE
)
9521 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9523 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9527 tree new_arg
= builtin_save_expr (arg
);
9528 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9529 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9530 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9537 /* Fold a call to builtin logb/ilogb. */
9540 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9542 if (! validate_arg (arg
, REAL_TYPE
))
9547 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9549 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9555 /* If arg is Inf or NaN and we're logb, return it. */
9556 if (TREE_CODE (rettype
) == REAL_TYPE
)
9558 /* For logb(-Inf) we have to return +Inf. */
9559 if (real_isinf (value
) && real_isneg (value
))
9561 REAL_VALUE_TYPE tem
;
9563 return build_real (rettype
, tem
);
9565 return fold_convert_loc (loc
, rettype
, arg
);
9567 /* Fall through... */
9569 /* Zero may set errno and/or raise an exception for logb, also
9570 for ilogb we don't know FP_ILOGB0. */
9573 /* For normal numbers, proceed iff radix == 2. In GCC,
9574 normalized significands are in the range [0.5, 1.0). We
9575 want the exponent as if they were [1.0, 2.0) so get the
9576 exponent and subtract 1. */
9577 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9578 return fold_convert_loc (loc
, rettype
,
9579 build_int_cst (integer_type_node
,
9580 REAL_EXP (value
)-1));
9588 /* Fold a call to builtin significand, if radix == 2. */
9591 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9593 if (! validate_arg (arg
, REAL_TYPE
))
9598 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9600 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9607 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9608 return fold_convert_loc (loc
, rettype
, arg
);
9610 /* For normal numbers, proceed iff radix == 2. */
9611 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9613 REAL_VALUE_TYPE result
= *value
;
9614 /* In GCC, normalized significands are in the range [0.5,
9615 1.0). We want them to be [1.0, 2.0) so set the
9617 SET_REAL_EXP (&result
, 1);
9618 return build_real (rettype
, result
);
9627 /* Fold a call to builtin frexp, we can assume the base is 2. */
9630 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9632 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9637 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9640 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9642 /* Proceed if a valid pointer type was passed in. */
9643 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9645 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9651 /* For +-0, return (*exp = 0, +-0). */
9652 exp
= integer_zero_node
;
9657 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9658 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9661 /* Since the frexp function always expects base 2, and in
9662 GCC normalized significands are already in the range
9663 [0.5, 1.0), we have exactly what frexp wants. */
9664 REAL_VALUE_TYPE frac_rvt
= *value
;
9665 SET_REAL_EXP (&frac_rvt
, 0);
9666 frac
= build_real (rettype
, frac_rvt
);
9667 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9674 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9675 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9676 TREE_SIDE_EFFECTS (arg1
) = 1;
9677 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9683 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9684 then we can assume the base is two. If it's false, then we have to
9685 check the mode of the TYPE parameter in certain cases. */
9688 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9689 tree type
, bool ldexp
)
9691 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9696 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9697 if (real_zerop (arg0
) || integer_zerop (arg1
)
9698 || (TREE_CODE (arg0
) == REAL_CST
9699 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9700 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9702 /* If both arguments are constant, then try to evaluate it. */
9703 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9704 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9705 && tree_fits_shwi_p (arg1
))
9707 /* Bound the maximum adjustment to twice the range of the
9708 mode's valid exponents. Use abs to ensure the range is
9709 positive as a sanity check. */
9710 const long max_exp_adj
= 2 *
9711 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9712 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9714 /* Get the user-requested adjustment. */
9715 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9717 /* The requested adjustment must be inside this range. This
9718 is a preliminary cap to avoid things like overflow, we
9719 may still fail to compute the result for other reasons. */
9720 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9722 REAL_VALUE_TYPE initial_result
;
9724 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9726 /* Ensure we didn't overflow. */
9727 if (! real_isinf (&initial_result
))
9729 const REAL_VALUE_TYPE trunc_result
9730 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9732 /* Only proceed if the target mode can hold the
9734 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9735 return build_real (type
, trunc_result
);
9744 /* Fold a call to builtin modf. */
9747 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9749 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9754 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9757 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9759 /* Proceed if a valid pointer type was passed in. */
9760 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9762 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9763 REAL_VALUE_TYPE trunc
, frac
;
9769 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9770 trunc
= frac
= *value
;
9773 /* For +-Inf, return (*arg1 = arg0, +-0). */
9775 frac
.sign
= value
->sign
;
9779 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9780 real_trunc (&trunc
, VOIDmode
, value
);
9781 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9782 /* If the original number was negative and already
9783 integral, then the fractional part is -0.0. */
9784 if (value
->sign
&& frac
.cl
== rvc_zero
)
9785 frac
.sign
= value
->sign
;
9789 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9790 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9791 build_real (rettype
, trunc
));
9792 TREE_SIDE_EFFECTS (arg1
) = 1;
9793 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9794 build_real (rettype
, frac
));
9800 /* Given a location LOC, an interclass builtin function decl FNDECL
9801 and its single argument ARG, return an folded expression computing
9802 the same, or NULL_TREE if we either couldn't or didn't want to fold
9803 (the latter happen if there's an RTL instruction available). */
9806 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9808 enum machine_mode mode
;
9810 if (!validate_arg (arg
, REAL_TYPE
))
9813 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9816 mode
= TYPE_MODE (TREE_TYPE (arg
));
9818 /* If there is no optab, try generic code. */
9819 switch (DECL_FUNCTION_CODE (fndecl
))
9823 CASE_FLT_FN (BUILT_IN_ISINF
):
9825 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9826 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9827 tree
const type
= TREE_TYPE (arg
);
9831 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9832 real_from_string (&r
, buf
);
9833 result
= build_call_expr (isgr_fn
, 2,
9834 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9835 build_real (type
, r
));
9838 CASE_FLT_FN (BUILT_IN_FINITE
):
9839 case BUILT_IN_ISFINITE
:
9841 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9842 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9843 tree
const type
= TREE_TYPE (arg
);
9847 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9848 real_from_string (&r
, buf
);
9849 result
= build_call_expr (isle_fn
, 2,
9850 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9851 build_real (type
, r
));
9852 /*result = fold_build2_loc (loc, UNGT_EXPR,
9853 TREE_TYPE (TREE_TYPE (fndecl)),
9854 fold_build1_loc (loc, ABS_EXPR, type, arg),
9855 build_real (type, r));
9856 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9857 TREE_TYPE (TREE_TYPE (fndecl)),
9861 case BUILT_IN_ISNORMAL
:
9863 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9864 islessequal(fabs(x),DBL_MAX). */
9865 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9866 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9867 tree
const type
= TREE_TYPE (arg
);
9868 REAL_VALUE_TYPE rmax
, rmin
;
9871 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9872 real_from_string (&rmax
, buf
);
9873 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9874 real_from_string (&rmin
, buf
);
9875 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9876 result
= build_call_expr (isle_fn
, 2, arg
,
9877 build_real (type
, rmax
));
9878 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9879 build_call_expr (isge_fn
, 2, arg
,
9880 build_real (type
, rmin
)));
9890 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9891 ARG is the argument for the call. */
9894 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9896 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9899 if (!validate_arg (arg
, REAL_TYPE
))
9902 switch (builtin_index
)
9904 case BUILT_IN_ISINF
:
9905 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9906 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9908 if (TREE_CODE (arg
) == REAL_CST
)
9910 r
= TREE_REAL_CST (arg
);
9911 if (real_isinf (&r
))
9912 return real_compare (GT_EXPR
, &r
, &dconst0
)
9913 ? integer_one_node
: integer_minus_one_node
;
9915 return integer_zero_node
;
9920 case BUILT_IN_ISINF_SIGN
:
9922 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9923 /* In a boolean context, GCC will fold the inner COND_EXPR to
9924 1. So e.g. "if (isinf_sign(x))" would be folded to just
9925 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9926 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9927 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9928 tree tmp
= NULL_TREE
;
9930 arg
= builtin_save_expr (arg
);
9932 if (signbit_fn
&& isinf_fn
)
9934 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9935 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9937 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9938 signbit_call
, integer_zero_node
);
9939 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9940 isinf_call
, integer_zero_node
);
9942 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9943 integer_minus_one_node
, integer_one_node
);
9944 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9952 case BUILT_IN_ISFINITE
:
9953 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9954 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9955 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9957 if (TREE_CODE (arg
) == REAL_CST
)
9959 r
= TREE_REAL_CST (arg
);
9960 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9965 case BUILT_IN_ISNAN
:
9966 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9967 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9969 if (TREE_CODE (arg
) == REAL_CST
)
9971 r
= TREE_REAL_CST (arg
);
9972 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9975 arg
= builtin_save_expr (arg
);
9976 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9983 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9984 This builtin will generate code to return the appropriate floating
9985 point classification depending on the value of the floating point
9986 number passed in. The possible return values must be supplied as
9987 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9988 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9989 one floating point argument which is "type generic". */
9992 fold_builtin_fpclassify (location_t loc
, tree exp
)
9994 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9995 arg
, type
, res
, tmp
;
9996 enum machine_mode mode
;
10000 /* Verify the required arguments in the original call. */
10001 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10002 INTEGER_TYPE
, INTEGER_TYPE
,
10003 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10006 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10007 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10008 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10009 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10010 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10011 arg
= CALL_EXPR_ARG (exp
, 5);
10012 type
= TREE_TYPE (arg
);
10013 mode
= TYPE_MODE (type
);
10014 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10016 /* fpclassify(x) ->
10017 isnan(x) ? FP_NAN :
10018 (fabs(x) == Inf ? FP_INFINITE :
10019 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10020 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10022 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10023 build_real (type
, dconst0
));
10024 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10025 tmp
, fp_zero
, fp_subnormal
);
10027 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10028 real_from_string (&r
, buf
);
10029 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10030 arg
, build_real (type
, r
));
10031 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10033 if (HONOR_INFINITIES (mode
))
10036 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10037 build_real (type
, r
));
10038 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10042 if (HONOR_NANS (mode
))
10044 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10045 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10051 /* Fold a call to an unordered comparison function such as
10052 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10053 being called and ARG0 and ARG1 are the arguments for the call.
10054 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10055 the opposite of the desired result. UNORDERED_CODE is used
10056 for modes that can hold NaNs and ORDERED_CODE is used for
10060 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10061 enum tree_code unordered_code
,
10062 enum tree_code ordered_code
)
10064 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10065 enum tree_code code
;
10067 enum tree_code code0
, code1
;
10068 tree cmp_type
= NULL_TREE
;
10070 type0
= TREE_TYPE (arg0
);
10071 type1
= TREE_TYPE (arg1
);
10073 code0
= TREE_CODE (type0
);
10074 code1
= TREE_CODE (type1
);
10076 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10077 /* Choose the wider of two real types. */
10078 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10080 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10082 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10085 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10086 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10088 if (unordered_code
== UNORDERED_EXPR
)
10090 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10091 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10092 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10095 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10097 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10098 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10101 /* Fold a call to built-in function FNDECL with 0 arguments.
10102 IGNORE is true if the result of the function call is ignored. This
10103 function returns NULL_TREE if no simplification was possible. */
10106 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10108 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10109 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10112 CASE_FLT_FN (BUILT_IN_INF
):
10113 case BUILT_IN_INFD32
:
10114 case BUILT_IN_INFD64
:
10115 case BUILT_IN_INFD128
:
10116 return fold_builtin_inf (loc
, type
, true);
10118 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10119 return fold_builtin_inf (loc
, type
, false);
10121 case BUILT_IN_CLASSIFY_TYPE
:
10122 return fold_builtin_classify_type (NULL_TREE
);
10124 case BUILT_IN_UNREACHABLE
:
10125 if (flag_sanitize
& SANITIZE_UNREACHABLE
10126 && (current_function_decl
== NULL
10127 || !lookup_attribute ("no_sanitize_undefined",
10128 DECL_ATTRIBUTES (current_function_decl
))))
10129 return ubsan_instrument_unreachable (loc
);
10138 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10139 IGNORE is true if the result of the function call is ignored. This
10140 function returns NULL_TREE if no simplification was possible. */
10143 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10145 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10146 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10149 case BUILT_IN_CONSTANT_P
:
10151 tree val
= fold_builtin_constant_p (arg0
);
10153 /* Gimplification will pull the CALL_EXPR for the builtin out of
10154 an if condition. When not optimizing, we'll not CSE it back.
10155 To avoid link error types of regressions, return false now. */
10156 if (!val
&& !optimize
)
10157 val
= integer_zero_node
;
10162 case BUILT_IN_CLASSIFY_TYPE
:
10163 return fold_builtin_classify_type (arg0
);
10165 case BUILT_IN_STRLEN
:
10166 return fold_builtin_strlen (loc
, type
, arg0
);
10168 CASE_FLT_FN (BUILT_IN_FABS
):
10169 case BUILT_IN_FABSD32
:
10170 case BUILT_IN_FABSD64
:
10171 case BUILT_IN_FABSD128
:
10172 return fold_builtin_fabs (loc
, arg0
, type
);
10175 case BUILT_IN_LABS
:
10176 case BUILT_IN_LLABS
:
10177 case BUILT_IN_IMAXABS
:
10178 return fold_builtin_abs (loc
, arg0
, type
);
10180 CASE_FLT_FN (BUILT_IN_CONJ
):
10181 if (validate_arg (arg0
, COMPLEX_TYPE
)
10182 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10183 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10186 CASE_FLT_FN (BUILT_IN_CREAL
):
10187 if (validate_arg (arg0
, COMPLEX_TYPE
)
10188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10189 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10192 CASE_FLT_FN (BUILT_IN_CIMAG
):
10193 if (validate_arg (arg0
, COMPLEX_TYPE
)
10194 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10195 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10198 CASE_FLT_FN (BUILT_IN_CCOS
):
10199 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10201 CASE_FLT_FN (BUILT_IN_CCOSH
):
10202 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10204 CASE_FLT_FN (BUILT_IN_CPROJ
):
10205 return fold_builtin_cproj (loc
, arg0
, type
);
10207 CASE_FLT_FN (BUILT_IN_CSIN
):
10208 if (validate_arg (arg0
, COMPLEX_TYPE
)
10209 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10210 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10213 CASE_FLT_FN (BUILT_IN_CSINH
):
10214 if (validate_arg (arg0
, COMPLEX_TYPE
)
10215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10216 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10219 CASE_FLT_FN (BUILT_IN_CTAN
):
10220 if (validate_arg (arg0
, COMPLEX_TYPE
)
10221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10222 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10225 CASE_FLT_FN (BUILT_IN_CTANH
):
10226 if (validate_arg (arg0
, COMPLEX_TYPE
)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10228 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10231 CASE_FLT_FN (BUILT_IN_CLOG
):
10232 if (validate_arg (arg0
, COMPLEX_TYPE
)
10233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10234 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10237 CASE_FLT_FN (BUILT_IN_CSQRT
):
10238 if (validate_arg (arg0
, COMPLEX_TYPE
)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10240 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10243 CASE_FLT_FN (BUILT_IN_CASIN
):
10244 if (validate_arg (arg0
, COMPLEX_TYPE
)
10245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10246 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10249 CASE_FLT_FN (BUILT_IN_CACOS
):
10250 if (validate_arg (arg0
, COMPLEX_TYPE
)
10251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10252 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10255 CASE_FLT_FN (BUILT_IN_CATAN
):
10256 if (validate_arg (arg0
, COMPLEX_TYPE
)
10257 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10258 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10261 CASE_FLT_FN (BUILT_IN_CASINH
):
10262 if (validate_arg (arg0
, COMPLEX_TYPE
)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10264 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10267 CASE_FLT_FN (BUILT_IN_CACOSH
):
10268 if (validate_arg (arg0
, COMPLEX_TYPE
)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10270 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10273 CASE_FLT_FN (BUILT_IN_CATANH
):
10274 if (validate_arg (arg0
, COMPLEX_TYPE
)
10275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10276 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10279 CASE_FLT_FN (BUILT_IN_CABS
):
10280 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10282 CASE_FLT_FN (BUILT_IN_CARG
):
10283 return fold_builtin_carg (loc
, arg0
, type
);
10285 CASE_FLT_FN (BUILT_IN_SQRT
):
10286 return fold_builtin_sqrt (loc
, arg0
, type
);
10288 CASE_FLT_FN (BUILT_IN_CBRT
):
10289 return fold_builtin_cbrt (loc
, arg0
, type
);
10291 CASE_FLT_FN (BUILT_IN_ASIN
):
10292 if (validate_arg (arg0
, REAL_TYPE
))
10293 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10294 &dconstm1
, &dconst1
, true);
10297 CASE_FLT_FN (BUILT_IN_ACOS
):
10298 if (validate_arg (arg0
, REAL_TYPE
))
10299 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10300 &dconstm1
, &dconst1
, true);
10303 CASE_FLT_FN (BUILT_IN_ATAN
):
10304 if (validate_arg (arg0
, REAL_TYPE
))
10305 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10308 CASE_FLT_FN (BUILT_IN_ASINH
):
10309 if (validate_arg (arg0
, REAL_TYPE
))
10310 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10313 CASE_FLT_FN (BUILT_IN_ACOSH
):
10314 if (validate_arg (arg0
, REAL_TYPE
))
10315 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10316 &dconst1
, NULL
, true);
10319 CASE_FLT_FN (BUILT_IN_ATANH
):
10320 if (validate_arg (arg0
, REAL_TYPE
))
10321 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10322 &dconstm1
, &dconst1
, false);
10325 CASE_FLT_FN (BUILT_IN_SIN
):
10326 if (validate_arg (arg0
, REAL_TYPE
))
10327 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10330 CASE_FLT_FN (BUILT_IN_COS
):
10331 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10333 CASE_FLT_FN (BUILT_IN_TAN
):
10334 return fold_builtin_tan (arg0
, type
);
10336 CASE_FLT_FN (BUILT_IN_CEXP
):
10337 return fold_builtin_cexp (loc
, arg0
, type
);
10339 CASE_FLT_FN (BUILT_IN_CEXPI
):
10340 if (validate_arg (arg0
, REAL_TYPE
))
10341 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10344 CASE_FLT_FN (BUILT_IN_SINH
):
10345 if (validate_arg (arg0
, REAL_TYPE
))
10346 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10349 CASE_FLT_FN (BUILT_IN_COSH
):
10350 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10352 CASE_FLT_FN (BUILT_IN_TANH
):
10353 if (validate_arg (arg0
, REAL_TYPE
))
10354 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10357 CASE_FLT_FN (BUILT_IN_ERF
):
10358 if (validate_arg (arg0
, REAL_TYPE
))
10359 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10362 CASE_FLT_FN (BUILT_IN_ERFC
):
10363 if (validate_arg (arg0
, REAL_TYPE
))
10364 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10367 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10368 if (validate_arg (arg0
, REAL_TYPE
))
10369 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10372 CASE_FLT_FN (BUILT_IN_EXP
):
10373 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10375 CASE_FLT_FN (BUILT_IN_EXP2
):
10376 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10378 CASE_FLT_FN (BUILT_IN_EXP10
):
10379 CASE_FLT_FN (BUILT_IN_POW10
):
10380 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10382 CASE_FLT_FN (BUILT_IN_EXPM1
):
10383 if (validate_arg (arg0
, REAL_TYPE
))
10384 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10387 CASE_FLT_FN (BUILT_IN_LOG
):
10388 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10390 CASE_FLT_FN (BUILT_IN_LOG2
):
10391 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10393 CASE_FLT_FN (BUILT_IN_LOG10
):
10394 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10396 CASE_FLT_FN (BUILT_IN_LOG1P
):
10397 if (validate_arg (arg0
, REAL_TYPE
))
10398 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10399 &dconstm1
, NULL
, false);
10402 CASE_FLT_FN (BUILT_IN_J0
):
10403 if (validate_arg (arg0
, REAL_TYPE
))
10404 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10408 CASE_FLT_FN (BUILT_IN_J1
):
10409 if (validate_arg (arg0
, REAL_TYPE
))
10410 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10414 CASE_FLT_FN (BUILT_IN_Y0
):
10415 if (validate_arg (arg0
, REAL_TYPE
))
10416 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10417 &dconst0
, NULL
, false);
10420 CASE_FLT_FN (BUILT_IN_Y1
):
10421 if (validate_arg (arg0
, REAL_TYPE
))
10422 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10423 &dconst0
, NULL
, false);
10426 CASE_FLT_FN (BUILT_IN_NAN
):
10427 case BUILT_IN_NAND32
:
10428 case BUILT_IN_NAND64
:
10429 case BUILT_IN_NAND128
:
10430 return fold_builtin_nan (arg0
, type
, true);
10432 CASE_FLT_FN (BUILT_IN_NANS
):
10433 return fold_builtin_nan (arg0
, type
, false);
10435 CASE_FLT_FN (BUILT_IN_FLOOR
):
10436 return fold_builtin_floor (loc
, fndecl
, arg0
);
10438 CASE_FLT_FN (BUILT_IN_CEIL
):
10439 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10441 CASE_FLT_FN (BUILT_IN_TRUNC
):
10442 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10444 CASE_FLT_FN (BUILT_IN_ROUND
):
10445 return fold_builtin_round (loc
, fndecl
, arg0
);
10447 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10448 CASE_FLT_FN (BUILT_IN_RINT
):
10449 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10451 CASE_FLT_FN (BUILT_IN_ICEIL
):
10452 CASE_FLT_FN (BUILT_IN_LCEIL
):
10453 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10454 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10455 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10456 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10457 CASE_FLT_FN (BUILT_IN_IROUND
):
10458 CASE_FLT_FN (BUILT_IN_LROUND
):
10459 CASE_FLT_FN (BUILT_IN_LLROUND
):
10460 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10462 CASE_FLT_FN (BUILT_IN_IRINT
):
10463 CASE_FLT_FN (BUILT_IN_LRINT
):
10464 CASE_FLT_FN (BUILT_IN_LLRINT
):
10465 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10467 case BUILT_IN_BSWAP16
:
10468 case BUILT_IN_BSWAP32
:
10469 case BUILT_IN_BSWAP64
:
10470 return fold_builtin_bswap (fndecl
, arg0
);
10472 CASE_INT_FN (BUILT_IN_FFS
):
10473 CASE_INT_FN (BUILT_IN_CLZ
):
10474 CASE_INT_FN (BUILT_IN_CTZ
):
10475 CASE_INT_FN (BUILT_IN_CLRSB
):
10476 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10477 CASE_INT_FN (BUILT_IN_PARITY
):
10478 return fold_builtin_bitop (fndecl
, arg0
);
10480 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10481 return fold_builtin_signbit (loc
, arg0
, type
);
10483 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10484 return fold_builtin_significand (loc
, arg0
, type
);
10486 CASE_FLT_FN (BUILT_IN_ILOGB
):
10487 CASE_FLT_FN (BUILT_IN_LOGB
):
10488 return fold_builtin_logb (loc
, arg0
, type
);
10490 case BUILT_IN_ISASCII
:
10491 return fold_builtin_isascii (loc
, arg0
);
10493 case BUILT_IN_TOASCII
:
10494 return fold_builtin_toascii (loc
, arg0
);
10496 case BUILT_IN_ISDIGIT
:
10497 return fold_builtin_isdigit (loc
, arg0
);
10499 CASE_FLT_FN (BUILT_IN_FINITE
):
10500 case BUILT_IN_FINITED32
:
10501 case BUILT_IN_FINITED64
:
10502 case BUILT_IN_FINITED128
:
10503 case BUILT_IN_ISFINITE
:
10505 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10508 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10511 CASE_FLT_FN (BUILT_IN_ISINF
):
10512 case BUILT_IN_ISINFD32
:
10513 case BUILT_IN_ISINFD64
:
10514 case BUILT_IN_ISINFD128
:
10516 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10519 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10522 case BUILT_IN_ISNORMAL
:
10523 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10525 case BUILT_IN_ISINF_SIGN
:
10526 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10528 CASE_FLT_FN (BUILT_IN_ISNAN
):
10529 case BUILT_IN_ISNAND32
:
10530 case BUILT_IN_ISNAND64
:
10531 case BUILT_IN_ISNAND128
:
10532 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10534 case BUILT_IN_PRINTF
:
10535 case BUILT_IN_PRINTF_UNLOCKED
:
10536 case BUILT_IN_VPRINTF
:
10537 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10539 case BUILT_IN_FREE
:
10540 if (integer_zerop (arg0
))
10541 return build_empty_stmt (loc
);
10552 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10553 IGNORE is true if the result of the function call is ignored. This
10554 function returns NULL_TREE if no simplification was possible. */
10557 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10559 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10560 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10564 CASE_FLT_FN (BUILT_IN_JN
):
10565 if (validate_arg (arg0
, INTEGER_TYPE
)
10566 && validate_arg (arg1
, REAL_TYPE
))
10567 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10570 CASE_FLT_FN (BUILT_IN_YN
):
10571 if (validate_arg (arg0
, INTEGER_TYPE
)
10572 && validate_arg (arg1
, REAL_TYPE
))
10573 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10577 CASE_FLT_FN (BUILT_IN_DREM
):
10578 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10579 if (validate_arg (arg0
, REAL_TYPE
)
10580 && validate_arg (arg1
, REAL_TYPE
))
10581 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10584 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10585 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10586 if (validate_arg (arg0
, REAL_TYPE
)
10587 && validate_arg (arg1
, POINTER_TYPE
))
10588 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10591 CASE_FLT_FN (BUILT_IN_ATAN2
):
10592 if (validate_arg (arg0
, REAL_TYPE
)
10593 && validate_arg (arg1
, REAL_TYPE
))
10594 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10597 CASE_FLT_FN (BUILT_IN_FDIM
):
10598 if (validate_arg (arg0
, REAL_TYPE
)
10599 && validate_arg (arg1
, REAL_TYPE
))
10600 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10603 CASE_FLT_FN (BUILT_IN_HYPOT
):
10604 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10606 CASE_FLT_FN (BUILT_IN_CPOW
):
10607 if (validate_arg (arg0
, COMPLEX_TYPE
)
10608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10609 && validate_arg (arg1
, COMPLEX_TYPE
)
10610 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10611 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10614 CASE_FLT_FN (BUILT_IN_LDEXP
):
10615 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10616 CASE_FLT_FN (BUILT_IN_SCALBN
):
10617 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10618 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10619 type
, /*ldexp=*/false);
10621 CASE_FLT_FN (BUILT_IN_FREXP
):
10622 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10624 CASE_FLT_FN (BUILT_IN_MODF
):
10625 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10627 case BUILT_IN_BZERO
:
10628 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10630 case BUILT_IN_FPUTS
:
10631 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10633 case BUILT_IN_FPUTS_UNLOCKED
:
10634 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10636 case BUILT_IN_STRSTR
:
10637 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10639 case BUILT_IN_STRCAT
:
10640 return fold_builtin_strcat (loc
, arg0
, arg1
);
10642 case BUILT_IN_STRSPN
:
10643 return fold_builtin_strspn (loc
, arg0
, arg1
);
10645 case BUILT_IN_STRCSPN
:
10646 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10648 case BUILT_IN_STRCHR
:
10649 case BUILT_IN_INDEX
:
10650 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10652 case BUILT_IN_STRRCHR
:
10653 case BUILT_IN_RINDEX
:
10654 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10656 case BUILT_IN_STRCPY
:
10657 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10659 case BUILT_IN_STPCPY
:
10662 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10666 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10669 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10672 case BUILT_IN_STRCMP
:
10673 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10675 case BUILT_IN_STRPBRK
:
10676 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10678 case BUILT_IN_EXPECT
:
10679 return fold_builtin_expect (loc
, arg0
, arg1
);
10681 CASE_FLT_FN (BUILT_IN_POW
):
10682 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10684 CASE_FLT_FN (BUILT_IN_POWI
):
10685 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10687 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10688 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10690 CASE_FLT_FN (BUILT_IN_FMIN
):
10691 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10693 CASE_FLT_FN (BUILT_IN_FMAX
):
10694 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10696 case BUILT_IN_ISGREATER
:
10697 return fold_builtin_unordered_cmp (loc
, fndecl
,
10698 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10699 case BUILT_IN_ISGREATEREQUAL
:
10700 return fold_builtin_unordered_cmp (loc
, fndecl
,
10701 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10702 case BUILT_IN_ISLESS
:
10703 return fold_builtin_unordered_cmp (loc
, fndecl
,
10704 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10705 case BUILT_IN_ISLESSEQUAL
:
10706 return fold_builtin_unordered_cmp (loc
, fndecl
,
10707 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10708 case BUILT_IN_ISLESSGREATER
:
10709 return fold_builtin_unordered_cmp (loc
, fndecl
,
10710 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10711 case BUILT_IN_ISUNORDERED
:
10712 return fold_builtin_unordered_cmp (loc
, fndecl
,
10713 arg0
, arg1
, UNORDERED_EXPR
,
10716 /* We do the folding for va_start in the expander. */
10717 case BUILT_IN_VA_START
:
10720 case BUILT_IN_SPRINTF
:
10721 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10723 case BUILT_IN_OBJECT_SIZE
:
10724 return fold_builtin_object_size (arg0
, arg1
);
10726 case BUILT_IN_PRINTF
:
10727 case BUILT_IN_PRINTF_UNLOCKED
:
10728 case BUILT_IN_VPRINTF
:
10729 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10731 case BUILT_IN_PRINTF_CHK
:
10732 case BUILT_IN_VPRINTF_CHK
:
10733 if (!validate_arg (arg0
, INTEGER_TYPE
)
10734 || TREE_SIDE_EFFECTS (arg0
))
10737 return fold_builtin_printf (loc
, fndecl
,
10738 arg1
, NULL_TREE
, ignore
, fcode
);
10741 case BUILT_IN_FPRINTF
:
10742 case BUILT_IN_FPRINTF_UNLOCKED
:
10743 case BUILT_IN_VFPRINTF
:
10744 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10747 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10748 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10750 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10751 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10759 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10760 and ARG2. IGNORE is true if the result of the function call is ignored.
10761 This function returns NULL_TREE if no simplification was possible. */
10764 fold_builtin_3 (location_t loc
, tree fndecl
,
10765 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10767 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10768 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10772 CASE_FLT_FN (BUILT_IN_SINCOS
):
10773 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10775 CASE_FLT_FN (BUILT_IN_FMA
):
10776 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10779 CASE_FLT_FN (BUILT_IN_REMQUO
):
10780 if (validate_arg (arg0
, REAL_TYPE
)
10781 && validate_arg (arg1
, REAL_TYPE
)
10782 && validate_arg (arg2
, POINTER_TYPE
))
10783 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10786 case BUILT_IN_MEMSET
:
10787 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10789 case BUILT_IN_BCOPY
:
10790 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10791 void_type_node
, true, /*endp=*/3);
10793 case BUILT_IN_MEMCPY
:
10794 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10795 type
, ignore
, /*endp=*/0);
10797 case BUILT_IN_MEMPCPY
:
10798 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10799 type
, ignore
, /*endp=*/1);
10801 case BUILT_IN_MEMMOVE
:
10802 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10803 type
, ignore
, /*endp=*/3);
10805 case BUILT_IN_STRNCAT
:
10806 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10808 case BUILT_IN_STRNCPY
:
10809 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10811 case BUILT_IN_STRNCMP
:
10812 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10814 case BUILT_IN_MEMCHR
:
10815 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10817 case BUILT_IN_BCMP
:
10818 case BUILT_IN_MEMCMP
:
10819 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10821 case BUILT_IN_SPRINTF
:
10822 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10824 case BUILT_IN_SNPRINTF
:
10825 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10827 case BUILT_IN_STRCPY_CHK
:
10828 case BUILT_IN_STPCPY_CHK
:
10829 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10832 case BUILT_IN_STRCAT_CHK
:
10833 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10835 case BUILT_IN_PRINTF_CHK
:
10836 case BUILT_IN_VPRINTF_CHK
:
10837 if (!validate_arg (arg0
, INTEGER_TYPE
)
10838 || TREE_SIDE_EFFECTS (arg0
))
10841 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10844 case BUILT_IN_FPRINTF
:
10845 case BUILT_IN_FPRINTF_UNLOCKED
:
10846 case BUILT_IN_VFPRINTF
:
10847 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10850 case BUILT_IN_FPRINTF_CHK
:
10851 case BUILT_IN_VFPRINTF_CHK
:
10852 if (!validate_arg (arg1
, INTEGER_TYPE
)
10853 || TREE_SIDE_EFFECTS (arg1
))
10856 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10865 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10866 ARG2, and ARG3. IGNORE is true if the result of the function call is
10867 ignored. This function returns NULL_TREE if no simplification was
10871 fold_builtin_4 (location_t loc
, tree fndecl
,
10872 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10874 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10878 case BUILT_IN_MEMCPY_CHK
:
10879 case BUILT_IN_MEMPCPY_CHK
:
10880 case BUILT_IN_MEMMOVE_CHK
:
10881 case BUILT_IN_MEMSET_CHK
:
10882 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
10884 DECL_FUNCTION_CODE (fndecl
));
10886 case BUILT_IN_STRNCPY_CHK
:
10887 case BUILT_IN_STPNCPY_CHK
:
10888 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
10891 case BUILT_IN_STRNCAT_CHK
:
10892 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10894 case BUILT_IN_SNPRINTF
:
10895 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
10897 case BUILT_IN_FPRINTF_CHK
:
10898 case BUILT_IN_VFPRINTF_CHK
:
10899 if (!validate_arg (arg1
, INTEGER_TYPE
)
10900 || TREE_SIDE_EFFECTS (arg1
))
10903 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10913 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10914 arguments, where NARGS <= 4. IGNORE is true if the result of the
10915 function call is ignored. This function returns NULL_TREE if no
10916 simplification was possible. Note that this only folds builtins with
10917 fixed argument patterns. Foldings that do varargs-to-varargs
10918 transformations, or that match calls with more than 4 arguments,
10919 need to be handled with fold_builtin_varargs instead. */
10921 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10924 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10926 tree ret
= NULL_TREE
;
10931 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10934 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10937 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10940 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10943 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10951 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10952 SET_EXPR_LOCATION (ret
, loc
);
10953 TREE_NO_WARNING (ret
) = 1;
10959 /* Builtins with folding operations that operate on "..." arguments
10960 need special handling; we need to store the arguments in a convenient
10961 data structure before attempting any folding. Fortunately there are
10962 only a few builtins that fall into this category. FNDECL is the
10963 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10964 result of the function call is ignored. */
10967 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
10968 bool ignore ATTRIBUTE_UNUSED
)
10970 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10971 tree ret
= NULL_TREE
;
10975 case BUILT_IN_SPRINTF_CHK
:
10976 case BUILT_IN_VSPRINTF_CHK
:
10977 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
10980 case BUILT_IN_SNPRINTF_CHK
:
10981 case BUILT_IN_VSNPRINTF_CHK
:
10982 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
10985 case BUILT_IN_FPCLASSIFY
:
10986 ret
= fold_builtin_fpclassify (loc
, exp
);
10994 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10995 SET_EXPR_LOCATION (ret
, loc
);
10996 TREE_NO_WARNING (ret
) = 1;
11002 /* Return true if FNDECL shouldn't be folded right now.
11003 If a built-in function has an inline attribute always_inline
11004 wrapper, defer folding it after always_inline functions have
11005 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11006 might not be performed. */
11009 avoid_folding_inline_builtin (tree fndecl
)
11011 return (DECL_DECLARED_INLINE_P (fndecl
)
11012 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11014 && !cfun
->always_inline_functions_inlined
11015 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11018 /* A wrapper function for builtin folding that prevents warnings for
11019 "statement without effect" and the like, caused by removing the
11020 call node earlier than the warning is generated. */
11023 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11025 tree ret
= NULL_TREE
;
11026 tree fndecl
= get_callee_fndecl (exp
);
11028 && TREE_CODE (fndecl
) == FUNCTION_DECL
11029 && DECL_BUILT_IN (fndecl
)
11030 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11031 yet. Defer folding until we see all the arguments
11032 (after inlining). */
11033 && !CALL_EXPR_VA_ARG_PACK (exp
))
11035 int nargs
= call_expr_nargs (exp
);
11037 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11038 instead last argument is __builtin_va_arg_pack (). Defer folding
11039 even in that case, until arguments are finalized. */
11040 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11042 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11044 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11045 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11046 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11050 if (avoid_folding_inline_builtin (fndecl
))
11053 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11054 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11055 CALL_EXPR_ARGP (exp
), ignore
);
11058 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11060 tree
*args
= CALL_EXPR_ARGP (exp
);
11061 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11064 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11072 /* Conveniently construct a function call expression. FNDECL names the
11073 function to be called and N arguments are passed in the array
11077 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11079 tree fntype
= TREE_TYPE (fndecl
);
11080 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11082 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11085 /* Conveniently construct a function call expression. FNDECL names the
11086 function to be called and the arguments are passed in the vector
11090 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11092 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11093 vec_safe_address (vec
));
11097 /* Conveniently construct a function call expression. FNDECL names the
11098 function to be called, N is the number of arguments, and the "..."
11099 parameters are the argument expressions. */
11102 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11105 tree
*argarray
= XALLOCAVEC (tree
, n
);
11109 for (i
= 0; i
< n
; i
++)
11110 argarray
[i
] = va_arg (ap
, tree
);
11112 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11115 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11116 varargs macros aren't supported by all bootstrap compilers. */
11119 build_call_expr (tree fndecl
, int n
, ...)
11122 tree
*argarray
= XALLOCAVEC (tree
, n
);
11126 for (i
= 0; i
< n
; i
++)
11127 argarray
[i
] = va_arg (ap
, tree
);
11129 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11132 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11133 N arguments are passed in the array ARGARRAY. */
11136 fold_builtin_call_array (location_t loc
, tree type
,
11141 tree ret
= NULL_TREE
;
11144 if (TREE_CODE (fn
) == ADDR_EXPR
)
11146 tree fndecl
= TREE_OPERAND (fn
, 0);
11147 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11148 && DECL_BUILT_IN (fndecl
))
11150 /* If last argument is __builtin_va_arg_pack (), arguments to this
11151 function are not finalized yet. Defer folding until they are. */
11152 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11154 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11156 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11157 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11158 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11159 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11161 if (avoid_folding_inline_builtin (fndecl
))
11162 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11163 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11165 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11169 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11171 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11173 /* First try the transformations that don't require consing up
11175 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11180 /* If we got this far, we need to build an exp. */
11181 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11182 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11183 return ret
? ret
: exp
;
11187 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11190 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11191 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11192 of arguments in ARGS to be omitted. OLDNARGS is the number of
11193 elements in ARGS. */
11196 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11197 int skip
, tree fndecl
, int n
, va_list newargs
)
11199 int nargs
= oldnargs
- skip
+ n
;
11206 buffer
= XALLOCAVEC (tree
, nargs
);
11207 for (i
= 0; i
< n
; i
++)
11208 buffer
[i
] = va_arg (newargs
, tree
);
11209 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11210 buffer
[i
] = args
[j
];
11213 buffer
= args
+ skip
;
11215 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11218 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11219 list ARGS along with N new arguments specified as the "..."
11220 parameters. SKIP is the number of arguments in ARGS to be omitted.
11221 OLDNARGS is the number of elements in ARGS. */
11224 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11225 int skip
, tree fndecl
, int n
, ...)
11231 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11237 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11238 along with N new arguments specified as the "..." parameters. SKIP
11239 is the number of arguments in EXP to be omitted. This function is used
11240 to do varargs-to-varargs transformations. */
11243 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11249 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11250 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11256 /* Validate a single argument ARG against a tree code CODE representing
11260 validate_arg (const_tree arg
, enum tree_code code
)
11264 else if (code
== POINTER_TYPE
)
11265 return POINTER_TYPE_P (TREE_TYPE (arg
));
11266 else if (code
== INTEGER_TYPE
)
11267 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11268 return code
== TREE_CODE (TREE_TYPE (arg
));
11271 /* This function validates the types of a function call argument list
11272 against a specified list of tree_codes. If the last specifier is a 0,
11273 that represents an ellipses, otherwise the last specifier must be a
11276 This is the GIMPLE version of validate_arglist. Eventually we want to
11277 completely convert builtins.c to work from GIMPLEs and the tree based
11278 validate_arglist will then be removed. */
11281 validate_gimple_arglist (const_gimple call
, ...)
11283 enum tree_code code
;
11289 va_start (ap
, call
);
11294 code
= (enum tree_code
) va_arg (ap
, int);
11298 /* This signifies an ellipses, any further arguments are all ok. */
11302 /* This signifies an endlink, if no arguments remain, return
11303 true, otherwise return false. */
11304 res
= (i
== gimple_call_num_args (call
));
11307 /* If no parameters remain or the parameter's code does not
11308 match the specified code, return false. Otherwise continue
11309 checking any remaining arguments. */
11310 arg
= gimple_call_arg (call
, i
++);
11311 if (!validate_arg (arg
, code
))
11318 /* We need gotos here since we can only have one VA_CLOSE in a
11326 /* This function validates the types of a function call argument list
11327 against a specified list of tree_codes. If the last specifier is a 0,
11328 that represents an ellipses, otherwise the last specifier must be a
11332 validate_arglist (const_tree callexpr
, ...)
11334 enum tree_code code
;
11337 const_call_expr_arg_iterator iter
;
11340 va_start (ap
, callexpr
);
11341 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11345 code
= (enum tree_code
) va_arg (ap
, int);
11349 /* This signifies an ellipses, any further arguments are all ok. */
11353 /* This signifies an endlink, if no arguments remain, return
11354 true, otherwise return false. */
11355 res
= !more_const_call_expr_args_p (&iter
);
11358 /* If no parameters remain or the parameter's code does not
11359 match the specified code, return false. Otherwise continue
11360 checking any remaining arguments. */
11361 arg
= next_const_call_expr_arg (&iter
);
11362 if (!validate_arg (arg
, code
))
11369 /* We need gotos here since we can only have one VA_CLOSE in a
11377 /* Default target-specific builtin expander that does nothing. */
11380 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11381 rtx target ATTRIBUTE_UNUSED
,
11382 rtx subtarget ATTRIBUTE_UNUSED
,
11383 enum machine_mode mode ATTRIBUTE_UNUSED
,
11384 int ignore ATTRIBUTE_UNUSED
)
11389 /* Returns true is EXP represents data that would potentially reside
11390 in a readonly section. */
11393 readonly_data_expr (tree exp
)
11397 if (TREE_CODE (exp
) != ADDR_EXPR
)
11400 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11404 /* Make sure we call decl_readonly_section only for trees it
11405 can handle (since it returns true for everything it doesn't
11407 if (TREE_CODE (exp
) == STRING_CST
11408 || TREE_CODE (exp
) == CONSTRUCTOR
11409 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11410 return decl_readonly_section (exp
, 0);
11415 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11416 to the call, and TYPE is its return type.
11418 Return NULL_TREE if no simplification was possible, otherwise return the
11419 simplified form of the call as a tree.
11421 The simplified form may be a constant or other expression which
11422 computes the same value, but in a more efficient manner (including
11423 calls to other builtin functions).
11425 The call may contain arguments which need to be evaluated, but
11426 which are not useful to determine the result of the call. In
11427 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11428 COMPOUND_EXPR will be an argument which must be evaluated.
11429 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11430 COMPOUND_EXPR in the chain will contain the tree for the simplified
11431 form of the builtin function call. */
11434 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11436 if (!validate_arg (s1
, POINTER_TYPE
)
11437 || !validate_arg (s2
, POINTER_TYPE
))
11442 const char *p1
, *p2
;
11444 p2
= c_getstr (s2
);
11448 p1
= c_getstr (s1
);
11451 const char *r
= strstr (p1
, p2
);
11455 return build_int_cst (TREE_TYPE (s1
), 0);
11457 /* Return an offset into the constant string argument. */
11458 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11459 return fold_convert_loc (loc
, type
, tem
);
11462 /* The argument is const char *, and the result is char *, so we need
11463 a type conversion here to avoid a warning. */
11465 return fold_convert_loc (loc
, type
, s1
);
11470 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11474 /* New argument list transforming strstr(s1, s2) to
11475 strchr(s1, s2[0]). */
11476 return build_call_expr_loc (loc
, fn
, 2, s1
,
11477 build_int_cst (integer_type_node
, p2
[0]));
11481 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11482 the call, and TYPE is its return type.
11484 Return NULL_TREE if no simplification was possible, otherwise return the
11485 simplified form of the call as a tree.
11487 The simplified form may be a constant or other expression which
11488 computes the same value, but in a more efficient manner (including
11489 calls to other builtin functions).
11491 The call may contain arguments which need to be evaluated, but
11492 which are not useful to determine the result of the call. In
11493 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11494 COMPOUND_EXPR will be an argument which must be evaluated.
11495 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11496 COMPOUND_EXPR in the chain will contain the tree for the simplified
11497 form of the builtin function call. */
11500 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11502 if (!validate_arg (s1
, POINTER_TYPE
)
11503 || !validate_arg (s2
, INTEGER_TYPE
))
11509 if (TREE_CODE (s2
) != INTEGER_CST
)
11512 p1
= c_getstr (s1
);
11519 if (target_char_cast (s2
, &c
))
11522 r
= strchr (p1
, c
);
11525 return build_int_cst (TREE_TYPE (s1
), 0);
11527 /* Return an offset into the constant string argument. */
11528 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11529 return fold_convert_loc (loc
, type
, tem
);
11535 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11536 the call, and TYPE is its return type.
11538 Return NULL_TREE if no simplification was possible, otherwise return the
11539 simplified form of the call as a tree.
11541 The simplified form may be a constant or other expression which
11542 computes the same value, but in a more efficient manner (including
11543 calls to other builtin functions).
11545 The call may contain arguments which need to be evaluated, but
11546 which are not useful to determine the result of the call. In
11547 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11548 COMPOUND_EXPR will be an argument which must be evaluated.
11549 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11550 COMPOUND_EXPR in the chain will contain the tree for the simplified
11551 form of the builtin function call. */
11554 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11556 if (!validate_arg (s1
, POINTER_TYPE
)
11557 || !validate_arg (s2
, INTEGER_TYPE
))
11564 if (TREE_CODE (s2
) != INTEGER_CST
)
11567 p1
= c_getstr (s1
);
11574 if (target_char_cast (s2
, &c
))
11577 r
= strrchr (p1
, c
);
11580 return build_int_cst (TREE_TYPE (s1
), 0);
11582 /* Return an offset into the constant string argument. */
11583 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11584 return fold_convert_loc (loc
, type
, tem
);
11587 if (! integer_zerop (s2
))
11590 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11594 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11595 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11599 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11600 to the call, and TYPE is its return type.
11602 Return NULL_TREE if no simplification was possible, otherwise return the
11603 simplified form of the call as a tree.
11605 The simplified form may be a constant or other expression which
11606 computes the same value, but in a more efficient manner (including
11607 calls to other builtin functions).
11609 The call may contain arguments which need to be evaluated, but
11610 which are not useful to determine the result of the call. In
11611 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11612 COMPOUND_EXPR will be an argument which must be evaluated.
11613 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11614 COMPOUND_EXPR in the chain will contain the tree for the simplified
11615 form of the builtin function call. */
11618 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11620 if (!validate_arg (s1
, POINTER_TYPE
)
11621 || !validate_arg (s2
, POINTER_TYPE
))
11626 const char *p1
, *p2
;
11628 p2
= c_getstr (s2
);
11632 p1
= c_getstr (s1
);
11635 const char *r
= strpbrk (p1
, p2
);
11639 return build_int_cst (TREE_TYPE (s1
), 0);
11641 /* Return an offset into the constant string argument. */
11642 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11643 return fold_convert_loc (loc
, type
, tem
);
11647 /* strpbrk(x, "") == NULL.
11648 Evaluate and ignore s1 in case it had side-effects. */
11649 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11652 return NULL_TREE
; /* Really call strpbrk. */
11654 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11658 /* New argument list transforming strpbrk(s1, s2) to
11659 strchr(s1, s2[0]). */
11660 return build_call_expr_loc (loc
, fn
, 2, s1
,
11661 build_int_cst (integer_type_node
, p2
[0]));
11665 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11668 Return NULL_TREE if no simplification was possible, otherwise return the
11669 simplified form of the call as a tree.
11671 The simplified form may be a constant or other expression which
11672 computes the same value, but in a more efficient manner (including
11673 calls to other builtin functions).
11675 The call may contain arguments which need to be evaluated, but
11676 which are not useful to determine the result of the call. In
11677 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11678 COMPOUND_EXPR will be an argument which must be evaluated.
11679 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11680 COMPOUND_EXPR in the chain will contain the tree for the simplified
11681 form of the builtin function call. */
11684 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11686 if (!validate_arg (dst
, POINTER_TYPE
)
11687 || !validate_arg (src
, POINTER_TYPE
))
11691 const char *p
= c_getstr (src
);
11693 /* If the string length is zero, return the dst parameter. */
11694 if (p
&& *p
== '\0')
11697 if (optimize_insn_for_speed_p ())
11699 /* See if we can store by pieces into (dst + strlen(dst)). */
11701 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11702 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11704 if (!strlen_fn
|| !strcpy_fn
)
11707 /* If we don't have a movstr we don't want to emit an strcpy
11708 call. We have to do that if the length of the source string
11709 isn't computable (in that case we can use memcpy probably
11710 later expanding to a sequence of mov instructions). If we
11711 have movstr instructions we can emit strcpy calls. */
11714 tree len
= c_strlen (src
, 1);
11715 if (! len
|| TREE_SIDE_EFFECTS (len
))
11719 /* Stabilize the argument list. */
11720 dst
= builtin_save_expr (dst
);
11722 /* Create strlen (dst). */
11723 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11724 /* Create (dst p+ strlen (dst)). */
11726 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11727 newdst
= builtin_save_expr (newdst
);
11729 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11730 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11736 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11737 arguments to the call.
11739 Return NULL_TREE if no simplification was possible, otherwise return the
11740 simplified form of the call as a tree.
11742 The simplified form may be a constant or other expression which
11743 computes the same value, but in a more efficient manner (including
11744 calls to other builtin functions).
11746 The call may contain arguments which need to be evaluated, but
11747 which are not useful to determine the result of the call. In
11748 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11749 COMPOUND_EXPR will be an argument which must be evaluated.
11750 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11751 COMPOUND_EXPR in the chain will contain the tree for the simplified
11752 form of the builtin function call. */
11755 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11757 if (!validate_arg (dst
, POINTER_TYPE
)
11758 || !validate_arg (src
, POINTER_TYPE
)
11759 || !validate_arg (len
, INTEGER_TYPE
))
11763 const char *p
= c_getstr (src
);
11765 /* If the requested length is zero, or the src parameter string
11766 length is zero, return the dst parameter. */
11767 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11768 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11770 /* If the requested len is greater than or equal to the string
11771 length, call strcat. */
11772 if (TREE_CODE (len
) == INTEGER_CST
&& p
11773 && compare_tree_int (len
, strlen (p
)) >= 0)
11775 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11777 /* If the replacement _DECL isn't initialized, don't do the
11782 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11788 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11791 Return NULL_TREE if no simplification was possible, otherwise return the
11792 simplified form of the call as a tree.
11794 The simplified form may be a constant or other expression which
11795 computes the same value, but in a more efficient manner (including
11796 calls to other builtin functions).
11798 The call may contain arguments which need to be evaluated, but
11799 which are not useful to determine the result of the call. In
11800 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11801 COMPOUND_EXPR will be an argument which must be evaluated.
11802 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11803 COMPOUND_EXPR in the chain will contain the tree for the simplified
11804 form of the builtin function call. */
11807 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11809 if (!validate_arg (s1
, POINTER_TYPE
)
11810 || !validate_arg (s2
, POINTER_TYPE
))
11814 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11816 /* If both arguments are constants, evaluate at compile-time. */
11819 const size_t r
= strspn (p1
, p2
);
11820 return build_int_cst (size_type_node
, r
);
11823 /* If either argument is "", return NULL_TREE. */
11824 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11825 /* Evaluate and ignore both arguments in case either one has
11827 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11833 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11836 Return NULL_TREE if no simplification was possible, otherwise return the
11837 simplified form of the call as a tree.
11839 The simplified form may be a constant or other expression which
11840 computes the same value, but in a more efficient manner (including
11841 calls to other builtin functions).
11843 The call may contain arguments which need to be evaluated, but
11844 which are not useful to determine the result of the call. In
11845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11846 COMPOUND_EXPR will be an argument which must be evaluated.
11847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11848 COMPOUND_EXPR in the chain will contain the tree for the simplified
11849 form of the builtin function call. */
11852 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11854 if (!validate_arg (s1
, POINTER_TYPE
)
11855 || !validate_arg (s2
, POINTER_TYPE
))
11859 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11861 /* If both arguments are constants, evaluate at compile-time. */
11864 const size_t r
= strcspn (p1
, p2
);
11865 return build_int_cst (size_type_node
, r
);
11868 /* If the first argument is "", return NULL_TREE. */
11869 if (p1
&& *p1
== '\0')
11871 /* Evaluate and ignore argument s2 in case it has
11873 return omit_one_operand_loc (loc
, size_type_node
,
11874 size_zero_node
, s2
);
11877 /* If the second argument is "", return __builtin_strlen(s1). */
11878 if (p2
&& *p2
== '\0')
11880 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11882 /* If the replacement _DECL isn't initialized, don't do the
11887 return build_call_expr_loc (loc
, fn
, 1, s1
);
11893 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11894 to the call. IGNORE is true if the value returned
11895 by the builtin will be ignored. UNLOCKED is true is true if this
11896 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11897 the known length of the string. Return NULL_TREE if no simplification
11901 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11902 bool ignore
, bool unlocked
, tree len
)
11904 /* If we're using an unlocked function, assume the other unlocked
11905 functions exist explicitly. */
11906 tree
const fn_fputc
= (unlocked
11907 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
11908 : builtin_decl_implicit (BUILT_IN_FPUTC
));
11909 tree
const fn_fwrite
= (unlocked
11910 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
11911 : builtin_decl_implicit (BUILT_IN_FWRITE
));
11913 /* If the return value is used, don't do the transformation. */
11917 /* Verify the arguments in the original call. */
11918 if (!validate_arg (arg0
, POINTER_TYPE
)
11919 || !validate_arg (arg1
, POINTER_TYPE
))
11923 len
= c_strlen (arg0
, 0);
11925 /* Get the length of the string passed to fputs. If the length
11926 can't be determined, punt. */
11928 || TREE_CODE (len
) != INTEGER_CST
)
11931 switch (compare_tree_int (len
, 1))
11933 case -1: /* length is 0, delete the call entirely . */
11934 return omit_one_operand_loc (loc
, integer_type_node
,
11935 integer_zero_node
, arg1
);;
11937 case 0: /* length is 1, call fputc. */
11939 const char *p
= c_getstr (arg0
);
11944 return build_call_expr_loc (loc
, fn_fputc
, 2,
11946 (integer_type_node
, p
[0]), arg1
);
11952 case 1: /* length is greater than 1, call fwrite. */
11954 /* If optimizing for size keep fputs. */
11955 if (optimize_function_for_size_p (cfun
))
11957 /* New argument list transforming fputs(string, stream) to
11958 fwrite(string, 1, len, stream). */
11960 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
11961 size_one_node
, len
, arg1
);
11966 gcc_unreachable ();
11971 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11972 produced. False otherwise. This is done so that we don't output the error
11973 or warning twice or three times. */
11976 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11978 tree fntype
= TREE_TYPE (current_function_decl
);
11979 int nargs
= call_expr_nargs (exp
);
11981 /* There is good chance the current input_location points inside the
11982 definition of the va_start macro (perhaps on the token for
11983 builtin) in a system header, so warnings will not be emitted.
11984 Use the location in real source code. */
11985 source_location current_location
=
11986 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11989 if (!stdarg_p (fntype
))
11991 error ("%<va_start%> used in function with fixed args");
11997 if (va_start_p
&& (nargs
!= 2))
11999 error ("wrong number of arguments to function %<va_start%>");
12002 arg
= CALL_EXPR_ARG (exp
, 1);
12004 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12005 when we checked the arguments and if needed issued a warning. */
12010 /* Evidently an out of date version of <stdarg.h>; can't validate
12011 va_start's second argument, but can still work as intended. */
12012 warning_at (current_location
,
12014 "%<__builtin_next_arg%> called without an argument");
12017 else if (nargs
> 1)
12019 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12022 arg
= CALL_EXPR_ARG (exp
, 0);
12025 if (TREE_CODE (arg
) == SSA_NAME
)
12026 arg
= SSA_NAME_VAR (arg
);
12028 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12029 or __builtin_next_arg (0) the first time we see it, after checking
12030 the arguments and if needed issuing a warning. */
12031 if (!integer_zerop (arg
))
12033 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12035 /* Strip off all nops for the sake of the comparison. This
12036 is not quite the same as STRIP_NOPS. It does more.
12037 We must also strip off INDIRECT_EXPR for C++ reference
12039 while (CONVERT_EXPR_P (arg
)
12040 || TREE_CODE (arg
) == INDIRECT_REF
)
12041 arg
= TREE_OPERAND (arg
, 0);
12042 if (arg
!= last_parm
)
12044 /* FIXME: Sometimes with the tree optimizers we can get the
12045 not the last argument even though the user used the last
12046 argument. We just warn and set the arg to be the last
12047 argument so that we will get wrong-code because of
12049 warning_at (current_location
,
12051 "second parameter of %<va_start%> not last named argument");
12054 /* Undefined by C99 7.15.1.4p4 (va_start):
12055 "If the parameter parmN is declared with the register storage
12056 class, with a function or array type, or with a type that is
12057 not compatible with the type that results after application of
12058 the default argument promotions, the behavior is undefined."
12060 else if (DECL_REGISTER (arg
))
12062 warning_at (current_location
,
12064 "undefined behaviour when second parameter of "
12065 "%<va_start%> is declared with %<register%> storage");
12068 /* We want to verify the second parameter just once before the tree
12069 optimizers are run and then avoid keeping it in the tree,
12070 as otherwise we could warn even for correct code like:
12071 void foo (int i, ...)
12072 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12074 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12076 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12082 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12083 ORIG may be null if this is a 2-argument call. We don't attempt to
12084 simplify calls with more than 3 arguments.
12086 Return NULL_TREE if no simplification was possible, otherwise return the
12087 simplified form of the call as a tree. If IGNORED is true, it means that
12088 the caller does not use the returned value of the function. */
12091 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12092 tree orig
, int ignored
)
12095 const char *fmt_str
= NULL
;
12097 /* Verify the required arguments in the original call. We deal with two
12098 types of sprintf() calls: 'sprintf (str, fmt)' and
12099 'sprintf (dest, "%s", orig)'. */
12100 if (!validate_arg (dest
, POINTER_TYPE
)
12101 || !validate_arg (fmt
, POINTER_TYPE
))
12103 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12106 /* Check whether the format is a literal string constant. */
12107 fmt_str
= c_getstr (fmt
);
12108 if (fmt_str
== NULL
)
12112 retval
= NULL_TREE
;
12114 if (!init_target_chars ())
12117 /* If the format doesn't contain % args or %%, use strcpy. */
12118 if (strchr (fmt_str
, target_percent
) == NULL
)
12120 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12125 /* Don't optimize sprintf (buf, "abc", ptr++). */
12129 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12130 'format' is known to contain no % formats. */
12131 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12133 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12136 /* If the format is "%s", use strcpy if the result isn't used. */
12137 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12140 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12145 /* Don't crash on sprintf (str1, "%s"). */
12149 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12152 retval
= c_strlen (orig
, 1);
12153 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12156 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12159 if (call
&& retval
)
12161 retval
= fold_convert_loc
12162 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12164 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12170 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12171 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12172 attempt to simplify calls with more than 4 arguments.
12174 Return NULL_TREE if no simplification was possible, otherwise return the
12175 simplified form of the call as a tree. If IGNORED is true, it means that
12176 the caller does not use the returned value of the function. */
12179 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12180 tree orig
, int ignored
)
12183 const char *fmt_str
= NULL
;
12184 unsigned HOST_WIDE_INT destlen
;
12186 /* Verify the required arguments in the original call. We deal with two
12187 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12188 'snprintf (dest, cst, "%s", orig)'. */
12189 if (!validate_arg (dest
, POINTER_TYPE
)
12190 || !validate_arg (destsize
, INTEGER_TYPE
)
12191 || !validate_arg (fmt
, POINTER_TYPE
))
12193 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12196 if (!tree_fits_uhwi_p (destsize
))
12199 /* Check whether the format is a literal string constant. */
12200 fmt_str
= c_getstr (fmt
);
12201 if (fmt_str
== NULL
)
12205 retval
= NULL_TREE
;
12207 if (!init_target_chars ())
12210 destlen
= tree_to_uhwi (destsize
);
12212 /* If the format doesn't contain % args or %%, use strcpy. */
12213 if (strchr (fmt_str
, target_percent
) == NULL
)
12215 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12216 size_t len
= strlen (fmt_str
);
12218 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12222 /* We could expand this as
12223 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12225 memcpy (str, fmt_with_nul_at_cstm1, cst);
12226 but in the former case that might increase code size
12227 and in the latter case grow .rodata section too much.
12228 So punt for now. */
12229 if (len
>= destlen
)
12235 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12236 'format' is known to contain no % formats and
12237 strlen (fmt) < cst. */
12238 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12241 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12244 /* If the format is "%s", use strcpy if the result isn't used. */
12245 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12247 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12248 unsigned HOST_WIDE_INT origlen
;
12250 /* Don't crash on snprintf (str1, cst, "%s"). */
12254 retval
= c_strlen (orig
, 1);
12255 if (!retval
|| !tree_fits_uhwi_p (retval
))
12258 origlen
= tree_to_uhwi (retval
);
12259 /* We could expand this as
12260 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12262 memcpy (str1, str2_with_nul_at_cstm1, cst);
12263 but in the former case that might increase code size
12264 and in the latter case grow .rodata section too much.
12265 So punt for now. */
12266 if (origlen
>= destlen
)
12269 /* Convert snprintf (str1, cst, "%s", str2) into
12270 strcpy (str1, str2) if strlen (str2) < cst. */
12274 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12277 retval
= NULL_TREE
;
12280 if (call
&& retval
)
12282 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12283 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12284 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12290 /* Expand a call EXP to __builtin_object_size. */
12293 expand_builtin_object_size (tree exp
)
12296 int object_size_type
;
12297 tree fndecl
= get_callee_fndecl (exp
);
12299 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12301 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12303 expand_builtin_trap ();
12307 ost
= CALL_EXPR_ARG (exp
, 1);
12310 if (TREE_CODE (ost
) != INTEGER_CST
12311 || tree_int_cst_sgn (ost
) < 0
12312 || compare_tree_int (ost
, 3) > 0)
12314 error ("%Klast argument of %D is not integer constant between 0 and 3",
12316 expand_builtin_trap ();
12320 object_size_type
= tree_to_shwi (ost
);
12322 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12325 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12326 FCODE is the BUILT_IN_* to use.
12327 Return NULL_RTX if we failed; the caller should emit a normal call,
12328 otherwise try to get the result in TARGET, if convenient (and in
12329 mode MODE if that's convenient). */
12332 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12333 enum built_in_function fcode
)
12335 tree dest
, src
, len
, size
;
12337 if (!validate_arglist (exp
,
12339 fcode
== BUILT_IN_MEMSET_CHK
12340 ? INTEGER_TYPE
: POINTER_TYPE
,
12341 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12344 dest
= CALL_EXPR_ARG (exp
, 0);
12345 src
= CALL_EXPR_ARG (exp
, 1);
12346 len
= CALL_EXPR_ARG (exp
, 2);
12347 size
= CALL_EXPR_ARG (exp
, 3);
12349 if (! tree_fits_uhwi_p (size
))
12352 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
12356 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12358 warning_at (tree_nonartificial_location (exp
),
12359 0, "%Kcall to %D will always overflow destination buffer",
12360 exp
, get_callee_fndecl (exp
));
12365 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12366 mem{cpy,pcpy,move,set} is available. */
12369 case BUILT_IN_MEMCPY_CHK
:
12370 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12372 case BUILT_IN_MEMPCPY_CHK
:
12373 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12375 case BUILT_IN_MEMMOVE_CHK
:
12376 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12378 case BUILT_IN_MEMSET_CHK
:
12379 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12388 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12389 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12390 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12391 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12393 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12397 unsigned int dest_align
= get_pointer_alignment (dest
);
12399 /* If DEST is not a pointer type, call the normal function. */
12400 if (dest_align
== 0)
12403 /* If SRC and DEST are the same (and not volatile), do nothing. */
12404 if (operand_equal_p (src
, dest
, 0))
12408 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12410 /* Evaluate and ignore LEN in case it has side-effects. */
12411 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12412 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12415 expr
= fold_build_pointer_plus (dest
, len
);
12416 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12419 /* __memmove_chk special case. */
12420 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12422 unsigned int src_align
= get_pointer_alignment (src
);
12424 if (src_align
== 0)
12427 /* If src is categorized for a readonly section we can use
12428 normal __memcpy_chk. */
12429 if (readonly_data_expr (src
))
12431 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12434 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12435 dest
, src
, len
, size
);
12436 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12437 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12438 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12445 /* Emit warning if a buffer overflow is detected at compile time. */
12448 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12452 location_t loc
= tree_nonartificial_location (exp
);
12456 case BUILT_IN_STRCPY_CHK
:
12457 case BUILT_IN_STPCPY_CHK
:
12458 /* For __strcat_chk the warning will be emitted only if overflowing
12459 by at least strlen (dest) + 1 bytes. */
12460 case BUILT_IN_STRCAT_CHK
:
12461 len
= CALL_EXPR_ARG (exp
, 1);
12462 size
= CALL_EXPR_ARG (exp
, 2);
12465 case BUILT_IN_STRNCAT_CHK
:
12466 case BUILT_IN_STRNCPY_CHK
:
12467 case BUILT_IN_STPNCPY_CHK
:
12468 len
= CALL_EXPR_ARG (exp
, 2);
12469 size
= CALL_EXPR_ARG (exp
, 3);
12471 case BUILT_IN_SNPRINTF_CHK
:
12472 case BUILT_IN_VSNPRINTF_CHK
:
12473 len
= CALL_EXPR_ARG (exp
, 1);
12474 size
= CALL_EXPR_ARG (exp
, 3);
12477 gcc_unreachable ();
12483 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
12488 len
= c_strlen (len
, 1);
12489 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
12492 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12494 tree src
= CALL_EXPR_ARG (exp
, 1);
12495 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
12497 src
= c_strlen (src
, 1);
12498 if (! src
|| ! tree_fits_uhwi_p (src
))
12500 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12501 exp
, get_callee_fndecl (exp
));
12504 else if (tree_int_cst_lt (src
, size
))
12507 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
12510 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12511 exp
, get_callee_fndecl (exp
));
12514 /* Emit warning if a buffer overflow is detected at compile time
12515 in __sprintf_chk/__vsprintf_chk calls. */
12518 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12520 tree size
, len
, fmt
;
12521 const char *fmt_str
;
12522 int nargs
= call_expr_nargs (exp
);
12524 /* Verify the required arguments in the original call. */
12528 size
= CALL_EXPR_ARG (exp
, 2);
12529 fmt
= CALL_EXPR_ARG (exp
, 3);
12531 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
12534 /* Check whether the format is a literal string constant. */
12535 fmt_str
= c_getstr (fmt
);
12536 if (fmt_str
== NULL
)
12539 if (!init_target_chars ())
12542 /* If the format doesn't contain % args or %%, we know its size. */
12543 if (strchr (fmt_str
, target_percent
) == 0)
12544 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12545 /* If the format is "%s" and first ... argument is a string literal,
12547 else if (fcode
== BUILT_IN_SPRINTF_CHK
12548 && strcmp (fmt_str
, target_percent_s
) == 0)
12554 arg
= CALL_EXPR_ARG (exp
, 4);
12555 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12558 len
= c_strlen (arg
, 1);
12559 if (!len
|| ! tree_fits_uhwi_p (len
))
12565 if (! tree_int_cst_lt (len
, size
))
12566 warning_at (tree_nonartificial_location (exp
),
12567 0, "%Kcall to %D will always overflow destination buffer",
12568 exp
, get_callee_fndecl (exp
));
12571 /* Emit warning if a free is called with address of a variable. */
12574 maybe_emit_free_warning (tree exp
)
12576 tree arg
= CALL_EXPR_ARG (exp
, 0);
12579 if (TREE_CODE (arg
) != ADDR_EXPR
)
12582 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12583 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12586 if (SSA_VAR_P (arg
))
12587 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12588 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12590 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12591 "%Kattempt to free a non-heap object", exp
);
12594 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12598 fold_builtin_object_size (tree ptr
, tree ost
)
12600 unsigned HOST_WIDE_INT bytes
;
12601 int object_size_type
;
12602 int precision
= TYPE_PRECISION (TREE_TYPE (ptr
));
12604 if (!validate_arg (ptr
, POINTER_TYPE
)
12605 || !validate_arg (ost
, INTEGER_TYPE
))
12610 if (TREE_CODE (ost
) != INTEGER_CST
12611 || tree_int_cst_sgn (ost
) < 0
12612 || compare_tree_int (ost
, 3) > 0)
12615 object_size_type
= tree_to_shwi (ost
);
12617 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12618 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12619 and (size_t) 0 for types 2 and 3. */
12620 if (TREE_SIDE_EFFECTS (ptr
))
12621 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12623 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12627 = wi::uhwi (compute_builtin_object_size (ptr
, object_size_type
),
12629 if (wi::fits_to_tree_p (wbytes
, size_type_node
))
12630 return wide_int_to_tree (size_type_node
, wbytes
);
12632 else if (TREE_CODE (ptr
) == SSA_NAME
)
12634 /* If object size is not known yet, delay folding until
12635 later. Maybe subsequent passes will help determining
12638 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12639 wbytes
= wi::uhwi (bytes
, precision
);
12640 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12641 && wi::fits_to_tree_p (wbytes
, size_type_node
))
12642 return wide_int_to_tree (size_type_node
, wbytes
);
12648 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12649 DEST, SRC, LEN, and SIZE are the arguments to the call.
12650 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12651 code of the builtin. If MAXLEN is not NULL, it is maximum length
12652 passed as third argument. */
12655 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12656 tree dest
, tree src
, tree len
, tree size
,
12657 tree maxlen
, bool ignore
,
12658 enum built_in_function fcode
)
12662 if (!validate_arg (dest
, POINTER_TYPE
)
12663 || !validate_arg (src
,
12664 (fcode
== BUILT_IN_MEMSET_CHK
12665 ? INTEGER_TYPE
: POINTER_TYPE
))
12666 || !validate_arg (len
, INTEGER_TYPE
)
12667 || !validate_arg (size
, INTEGER_TYPE
))
12670 /* If SRC and DEST are the same (and not volatile), return DEST
12671 (resp. DEST+LEN for __mempcpy_chk). */
12672 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12674 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12675 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12679 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12680 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12684 if (! tree_fits_uhwi_p (size
))
12687 if (! integer_all_onesp (size
))
12689 if (! tree_fits_uhwi_p (len
))
12691 /* If LEN is not constant, try MAXLEN too.
12692 For MAXLEN only allow optimizing into non-_ocs function
12693 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12694 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12696 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12698 /* (void) __mempcpy_chk () can be optimized into
12699 (void) __memcpy_chk (). */
12700 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12704 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12712 if (tree_int_cst_lt (size
, maxlen
))
12717 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12718 mem{cpy,pcpy,move,set} is available. */
12721 case BUILT_IN_MEMCPY_CHK
:
12722 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12724 case BUILT_IN_MEMPCPY_CHK
:
12725 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12727 case BUILT_IN_MEMMOVE_CHK
:
12728 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12730 case BUILT_IN_MEMSET_CHK
:
12731 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12740 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12743 /* Fold a call to the __st[rp]cpy_chk builtin.
12744 DEST, SRC, and SIZE are the arguments to the call.
12745 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12746 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12747 strings passed as second argument. */
12750 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12751 tree src
, tree size
,
12752 tree maxlen
, bool ignore
,
12753 enum built_in_function fcode
)
12757 if (!validate_arg (dest
, POINTER_TYPE
)
12758 || !validate_arg (src
, POINTER_TYPE
)
12759 || !validate_arg (size
, INTEGER_TYPE
))
12762 /* If SRC and DEST are the same (and not volatile), return DEST. */
12763 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12764 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12766 if (! tree_fits_uhwi_p (size
))
12769 if (! integer_all_onesp (size
))
12771 len
= c_strlen (src
, 1);
12772 if (! len
|| ! tree_fits_uhwi_p (len
))
12774 /* If LEN is not constant, try MAXLEN too.
12775 For MAXLEN only allow optimizing into non-_ocs function
12776 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12777 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12779 if (fcode
== BUILT_IN_STPCPY_CHK
)
12784 /* If return value of __stpcpy_chk is ignored,
12785 optimize into __strcpy_chk. */
12786 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12790 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12793 if (! len
|| TREE_SIDE_EFFECTS (len
))
12796 /* If c_strlen returned something, but not a constant,
12797 transform __strcpy_chk into __memcpy_chk. */
12798 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12802 len
= fold_convert_loc (loc
, size_type_node
, len
);
12803 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12804 build_int_cst (size_type_node
, 1));
12805 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12806 build_call_expr_loc (loc
, fn
, 4,
12807 dest
, src
, len
, size
));
12813 if (! tree_int_cst_lt (maxlen
, size
))
12817 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12818 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12819 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12823 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12826 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12827 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12828 length passed as third argument. IGNORE is true if return value can be
12829 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12832 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12833 tree len
, tree size
, tree maxlen
, bool ignore
,
12834 enum built_in_function fcode
)
12838 if (!validate_arg (dest
, POINTER_TYPE
)
12839 || !validate_arg (src
, POINTER_TYPE
)
12840 || !validate_arg (len
, INTEGER_TYPE
)
12841 || !validate_arg (size
, INTEGER_TYPE
))
12844 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12846 /* If return value of __stpncpy_chk is ignored,
12847 optimize into __strncpy_chk. */
12848 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12850 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12853 if (! tree_fits_uhwi_p (size
))
12856 if (! integer_all_onesp (size
))
12858 if (! tree_fits_uhwi_p (len
))
12860 /* If LEN is not constant, try MAXLEN too.
12861 For MAXLEN only allow optimizing into non-_ocs function
12862 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12863 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12869 if (tree_int_cst_lt (size
, maxlen
))
12873 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12874 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
12875 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
12879 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12882 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12883 are the arguments to the call. */
12886 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12887 tree src
, tree size
)
12892 if (!validate_arg (dest
, POINTER_TYPE
)
12893 || !validate_arg (src
, POINTER_TYPE
)
12894 || !validate_arg (size
, INTEGER_TYPE
))
12897 p
= c_getstr (src
);
12898 /* If the SRC parameter is "", return DEST. */
12899 if (p
&& *p
== '\0')
12900 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12902 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
12905 /* If __builtin_strcat_chk is used, assume strcat is available. */
12906 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
12910 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12913 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12917 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12918 tree dest
, tree src
, tree len
, tree size
)
12923 if (!validate_arg (dest
, POINTER_TYPE
)
12924 || !validate_arg (src
, POINTER_TYPE
)
12925 || !validate_arg (size
, INTEGER_TYPE
)
12926 || !validate_arg (size
, INTEGER_TYPE
))
12929 p
= c_getstr (src
);
12930 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12931 if (p
&& *p
== '\0')
12932 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12933 else if (integer_zerop (len
))
12934 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12936 if (! tree_fits_uhwi_p (size
))
12939 if (! integer_all_onesp (size
))
12941 tree src_len
= c_strlen (src
, 1);
12943 && tree_fits_uhwi_p (src_len
)
12944 && tree_fits_uhwi_p (len
)
12945 && ! tree_int_cst_lt (len
, src_len
))
12947 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12948 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
12952 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12957 /* If __builtin_strncat_chk is used, assume strncat is available. */
12958 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
12962 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12965 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12966 Return NULL_TREE if a normal call should be emitted rather than
12967 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12968 or BUILT_IN_VSPRINTF_CHK. */
12971 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
12972 enum built_in_function fcode
)
12974 tree dest
, size
, len
, fn
, fmt
, flag
;
12975 const char *fmt_str
;
12977 /* Verify the required arguments in the original call. */
12981 if (!validate_arg (dest
, POINTER_TYPE
))
12984 if (!validate_arg (flag
, INTEGER_TYPE
))
12987 if (!validate_arg (size
, INTEGER_TYPE
))
12990 if (!validate_arg (fmt
, POINTER_TYPE
))
12993 if (! tree_fits_uhwi_p (size
))
12998 if (!init_target_chars ())
13001 /* Check whether the format is a literal string constant. */
13002 fmt_str
= c_getstr (fmt
);
13003 if (fmt_str
!= NULL
)
13005 /* If the format doesn't contain % args or %%, we know the size. */
13006 if (strchr (fmt_str
, target_percent
) == 0)
13008 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13009 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13011 /* If the format is "%s" and first ... argument is a string literal,
13012 we know the size too. */
13013 else if (fcode
== BUILT_IN_SPRINTF_CHK
13014 && strcmp (fmt_str
, target_percent_s
) == 0)
13021 if (validate_arg (arg
, POINTER_TYPE
))
13023 len
= c_strlen (arg
, 1);
13024 if (! len
|| ! tree_fits_uhwi_p (len
))
13031 if (! integer_all_onesp (size
))
13033 if (! len
|| ! tree_int_cst_lt (len
, size
))
13037 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13038 or if format doesn't contain % chars or is "%s". */
13039 if (! integer_zerop (flag
))
13041 if (fmt_str
== NULL
)
13043 if (strchr (fmt_str
, target_percent
) != NULL
13044 && strcmp (fmt_str
, target_percent_s
))
13048 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13049 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13050 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13054 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13057 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13058 a normal call should be emitted rather than expanding the function
13059 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13062 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13063 enum built_in_function fcode
)
13065 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13066 CALL_EXPR_ARGP (exp
), fcode
);
13069 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13070 NULL_TREE if a normal call should be emitted rather than expanding
13071 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13072 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13073 passed as second argument. */
13076 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13077 tree maxlen
, enum built_in_function fcode
)
13079 tree dest
, size
, len
, fn
, fmt
, flag
;
13080 const char *fmt_str
;
13082 /* Verify the required arguments in the original call. */
13086 if (!validate_arg (dest
, POINTER_TYPE
))
13089 if (!validate_arg (len
, INTEGER_TYPE
))
13092 if (!validate_arg (flag
, INTEGER_TYPE
))
13095 if (!validate_arg (size
, INTEGER_TYPE
))
13098 if (!validate_arg (fmt
, POINTER_TYPE
))
13101 if (! tree_fits_uhwi_p (size
))
13104 if (! integer_all_onesp (size
))
13106 if (! tree_fits_uhwi_p (len
))
13108 /* If LEN is not constant, try MAXLEN too.
13109 For MAXLEN only allow optimizing into non-_ocs function
13110 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13111 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
13117 if (tree_int_cst_lt (size
, maxlen
))
13121 if (!init_target_chars ())
13124 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13125 or if format doesn't contain % chars or is "%s". */
13126 if (! integer_zerop (flag
))
13128 fmt_str
= c_getstr (fmt
);
13129 if (fmt_str
== NULL
)
13131 if (strchr (fmt_str
, target_percent
) != NULL
13132 && strcmp (fmt_str
, target_percent_s
))
13136 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13138 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13139 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13143 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13146 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13147 a normal call should be emitted rather than expanding the function
13148 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13149 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13150 passed as second argument. */
13153 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13154 enum built_in_function fcode
)
13156 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13157 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13160 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13161 FMT and ARG are the arguments to the call; we don't fold cases with
13162 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13164 Return NULL_TREE if no simplification was possible, otherwise return the
13165 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13166 code of the function to be simplified. */
13169 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13170 tree arg
, bool ignore
,
13171 enum built_in_function fcode
)
13173 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13174 const char *fmt_str
= NULL
;
13176 /* If the return value is used, don't do the transformation. */
13180 /* Verify the required arguments in the original call. */
13181 if (!validate_arg (fmt
, POINTER_TYPE
))
13184 /* Check whether the format is a literal string constant. */
13185 fmt_str
= c_getstr (fmt
);
13186 if (fmt_str
== NULL
)
13189 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13191 /* If we're using an unlocked function, assume the other
13192 unlocked functions exist explicitly. */
13193 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13194 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13198 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13199 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13202 if (!init_target_chars ())
13205 if (strcmp (fmt_str
, target_percent_s
) == 0
13206 || strchr (fmt_str
, target_percent
) == NULL
)
13210 if (strcmp (fmt_str
, target_percent_s
) == 0)
13212 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13215 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13218 str
= c_getstr (arg
);
13224 /* The format specifier doesn't contain any '%' characters. */
13225 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13231 /* If the string was "", printf does nothing. */
13232 if (str
[0] == '\0')
13233 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13235 /* If the string has length of 1, call putchar. */
13236 if (str
[1] == '\0')
13238 /* Given printf("c"), (where c is any one character,)
13239 convert "c"[0] to an int and pass that to the replacement
13241 newarg
= build_int_cst (integer_type_node
, str
[0]);
13243 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13247 /* If the string was "string\n", call puts("string"). */
13248 size_t len
= strlen (str
);
13249 if ((unsigned char)str
[len
- 1] == target_newline
13250 && (size_t) (int) len
== len
13254 tree offset_node
, string_cst
;
13256 /* Create a NUL-terminated string that's one char shorter
13257 than the original, stripping off the trailing '\n'. */
13258 newarg
= build_string_literal (len
, str
);
13259 string_cst
= string_constant (newarg
, &offset_node
);
13260 gcc_checking_assert (string_cst
13261 && (TREE_STRING_LENGTH (string_cst
)
13263 && integer_zerop (offset_node
)
13265 TREE_STRING_POINTER (string_cst
)[len
- 1]
13266 == target_newline
);
13267 /* build_string_literal creates a new STRING_CST,
13268 modify it in place to avoid double copying. */
13269 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13270 newstr
[len
- 1] = '\0';
13272 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13275 /* We'd like to arrange to call fputs(string,stdout) here,
13276 but we need stdout and don't have a way to get it yet. */
13281 /* The other optimizations can be done only on the non-va_list variants. */
13282 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13285 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13286 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13288 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13291 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13294 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13295 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13297 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13300 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13306 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13309 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13310 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13311 more than 3 arguments, and ARG may be null in the 2-argument case.
13313 Return NULL_TREE if no simplification was possible, otherwise return the
13314 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13315 code of the function to be simplified. */
13318 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13319 tree fmt
, tree arg
, bool ignore
,
13320 enum built_in_function fcode
)
13322 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13323 const char *fmt_str
= NULL
;
13325 /* If the return value is used, don't do the transformation. */
13329 /* Verify the required arguments in the original call. */
13330 if (!validate_arg (fp
, POINTER_TYPE
))
13332 if (!validate_arg (fmt
, POINTER_TYPE
))
13335 /* Check whether the format is a literal string constant. */
13336 fmt_str
= c_getstr (fmt
);
13337 if (fmt_str
== NULL
)
13340 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13342 /* If we're using an unlocked function, assume the other
13343 unlocked functions exist explicitly. */
13344 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13345 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13349 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13350 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13353 if (!init_target_chars ())
13356 /* If the format doesn't contain % args or %%, use strcpy. */
13357 if (strchr (fmt_str
, target_percent
) == NULL
)
13359 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13363 /* If the format specifier was "", fprintf does nothing. */
13364 if (fmt_str
[0] == '\0')
13366 /* If FP has side-effects, just wait until gimplification is
13368 if (TREE_SIDE_EFFECTS (fp
))
13371 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13374 /* When "string" doesn't contain %, replace all cases of
13375 fprintf (fp, string) with fputs (string, fp). The fputs
13376 builtin will take care of special cases like length == 1. */
13378 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13381 /* The other optimizations can be done only on the non-va_list variants. */
13382 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13385 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13386 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13388 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13391 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13394 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13395 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13397 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13400 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13405 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13408 /* Initialize format string characters in the target charset. */
13411 init_target_chars (void)
13416 target_newline
= lang_hooks
.to_target_charset ('\n');
13417 target_percent
= lang_hooks
.to_target_charset ('%');
13418 target_c
= lang_hooks
.to_target_charset ('c');
13419 target_s
= lang_hooks
.to_target_charset ('s');
13420 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13424 target_percent_c
[0] = target_percent
;
13425 target_percent_c
[1] = target_c
;
13426 target_percent_c
[2] = '\0';
13428 target_percent_s
[0] = target_percent
;
13429 target_percent_s
[1] = target_s
;
13430 target_percent_s
[2] = '\0';
13432 target_percent_s_newline
[0] = target_percent
;
13433 target_percent_s_newline
[1] = target_s
;
13434 target_percent_s_newline
[2] = target_newline
;
13435 target_percent_s_newline
[3] = '\0';
13442 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13443 and no overflow/underflow occurred. INEXACT is true if M was not
13444 exactly calculated. TYPE is the tree type for the result. This
13445 function assumes that you cleared the MPFR flags and then
13446 calculated M to see if anything subsequently set a flag prior to
13447 entering this function. Return NULL_TREE if any checks fail. */
13450 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13452 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13453 overflow/underflow occurred. If -frounding-math, proceed iff the
13454 result of calling FUNC was exact. */
13455 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13456 && (!flag_rounding_math
|| !inexact
))
13458 REAL_VALUE_TYPE rr
;
13460 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13461 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13462 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13463 but the mpft_t is not, then we underflowed in the
13465 if (real_isfinite (&rr
)
13466 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13468 REAL_VALUE_TYPE rmode
;
13470 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13471 /* Proceed iff the specified mode can hold the value. */
13472 if (real_identical (&rmode
, &rr
))
13473 return build_real (type
, rmode
);
13479 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13480 number and no overflow/underflow occurred. INEXACT is true if M
13481 was not exactly calculated. TYPE is the tree type for the result.
13482 This function assumes that you cleared the MPFR flags and then
13483 calculated M to see if anything subsequently set a flag prior to
13484 entering this function. Return NULL_TREE if any checks fail, if
13485 FORCE_CONVERT is true, then bypass the checks. */
13488 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13490 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13491 overflow/underflow occurred. If -frounding-math, proceed iff the
13492 result of calling FUNC was exact. */
13494 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13495 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13496 && (!flag_rounding_math
|| !inexact
)))
13498 REAL_VALUE_TYPE re
, im
;
13500 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13501 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13502 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13503 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13504 but the mpft_t is not, then we underflowed in the
13507 || (real_isfinite (&re
) && real_isfinite (&im
)
13508 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13509 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13511 REAL_VALUE_TYPE re_mode
, im_mode
;
13513 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13514 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13515 /* Proceed iff the specified mode can hold the value. */
13517 || (real_identical (&re_mode
, &re
)
13518 && real_identical (&im_mode
, &im
)))
13519 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13520 build_real (TREE_TYPE (type
), im_mode
));
13526 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13527 FUNC on it and return the resulting value as a tree with type TYPE.
13528 If MIN and/or MAX are not NULL, then the supplied ARG must be
13529 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13530 acceptable values, otherwise they are not. The mpfr precision is
13531 set to the precision of TYPE. We assume that function FUNC returns
13532 zero if the result could be calculated exactly within the requested
13536 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13537 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13540 tree result
= NULL_TREE
;
13544 /* To proceed, MPFR must exactly represent the target floating point
13545 format, which only happens when the target base equals two. */
13546 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13547 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13549 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13551 if (real_isfinite (ra
)
13552 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13553 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13555 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13556 const int prec
= fmt
->p
;
13557 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13561 mpfr_init2 (m
, prec
);
13562 mpfr_from_real (m
, ra
, GMP_RNDN
);
13563 mpfr_clear_flags ();
13564 inexact
= func (m
, m
, rnd
);
13565 result
= do_mpfr_ckconv (m
, type
, inexact
);
13573 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13574 FUNC on it and return the resulting value as a tree with type TYPE.
13575 The mpfr precision is set to the precision of TYPE. We assume that
13576 function FUNC returns zero if the result could be calculated
13577 exactly within the requested precision. */
13580 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13581 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13583 tree result
= NULL_TREE
;
13588 /* To proceed, MPFR must exactly represent the target floating point
13589 format, which only happens when the target base equals two. */
13590 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13591 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13592 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13594 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13595 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13597 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13599 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13600 const int prec
= fmt
->p
;
13601 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13605 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13606 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13607 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13608 mpfr_clear_flags ();
13609 inexact
= func (m1
, m1
, m2
, rnd
);
13610 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13611 mpfr_clears (m1
, m2
, NULL
);
13618 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13619 FUNC on it and return the resulting value as a tree with type TYPE.
13620 The mpfr precision is set to the precision of TYPE. We assume that
13621 function FUNC returns zero if the result could be calculated
13622 exactly within the requested precision. */
13625 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13626 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13628 tree result
= NULL_TREE
;
13634 /* To proceed, MPFR must exactly represent the target floating point
13635 format, which only happens when the target base equals two. */
13636 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13637 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13638 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13639 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13641 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13642 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13643 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13645 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13647 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13648 const int prec
= fmt
->p
;
13649 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13653 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13654 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13655 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13656 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13657 mpfr_clear_flags ();
13658 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13659 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13660 mpfr_clears (m1
, m2
, m3
, NULL
);
13667 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13668 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13669 If ARG_SINP and ARG_COSP are NULL then the result is returned
13670 as a complex value.
13671 The type is taken from the type of ARG and is used for setting the
13672 precision of the calculation and results. */
13675 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13677 tree
const type
= TREE_TYPE (arg
);
13678 tree result
= NULL_TREE
;
13682 /* To proceed, MPFR must exactly represent the target floating point
13683 format, which only happens when the target base equals two. */
13684 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13685 && TREE_CODE (arg
) == REAL_CST
13686 && !TREE_OVERFLOW (arg
))
13688 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13690 if (real_isfinite (ra
))
13692 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13693 const int prec
= fmt
->p
;
13694 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13695 tree result_s
, result_c
;
13699 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13700 mpfr_from_real (m
, ra
, GMP_RNDN
);
13701 mpfr_clear_flags ();
13702 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13703 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13704 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13705 mpfr_clears (m
, ms
, mc
, NULL
);
13706 if (result_s
&& result_c
)
13708 /* If we are to return in a complex value do so. */
13709 if (!arg_sinp
&& !arg_cosp
)
13710 return build_complex (build_complex_type (type
),
13711 result_c
, result_s
);
13713 /* Dereference the sin/cos pointer arguments. */
13714 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13715 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13716 /* Proceed if valid pointer type were passed in. */
13717 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13718 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13720 /* Set the values. */
13721 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13723 TREE_SIDE_EFFECTS (result_s
) = 1;
13724 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13726 TREE_SIDE_EFFECTS (result_c
) = 1;
13727 /* Combine the assignments into a compound expr. */
13728 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13729 result_s
, result_c
));
13737 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13738 two-argument mpfr order N Bessel function FUNC on them and return
13739 the resulting value as a tree with type TYPE. The mpfr precision
13740 is set to the precision of TYPE. We assume that function FUNC
13741 returns zero if the result could be calculated exactly within the
13742 requested precision. */
13744 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13745 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13746 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13748 tree result
= NULL_TREE
;
13753 /* To proceed, MPFR must exactly represent the target floating point
13754 format, which only happens when the target base equals two. */
13755 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13756 && tree_fits_shwi_p (arg1
)
13757 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13759 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
13760 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13763 && real_isfinite (ra
)
13764 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13766 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13767 const int prec
= fmt
->p
;
13768 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13772 mpfr_init2 (m
, prec
);
13773 mpfr_from_real (m
, ra
, GMP_RNDN
);
13774 mpfr_clear_flags ();
13775 inexact
= func (m
, n
, m
, rnd
);
13776 result
= do_mpfr_ckconv (m
, type
, inexact
);
13784 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13785 the pointer *(ARG_QUO) and return the result. The type is taken
13786 from the type of ARG0 and is used for setting the precision of the
13787 calculation and results. */
13790 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13792 tree
const type
= TREE_TYPE (arg0
);
13793 tree result
= NULL_TREE
;
13798 /* To proceed, MPFR must exactly represent the target floating point
13799 format, which only happens when the target base equals two. */
13800 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13801 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13802 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13804 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13805 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13807 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13809 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13810 const int prec
= fmt
->p
;
13811 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13816 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13817 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13818 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13819 mpfr_clear_flags ();
13820 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13821 /* Remquo is independent of the rounding mode, so pass
13822 inexact=0 to do_mpfr_ckconv(). */
13823 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13824 mpfr_clears (m0
, m1
, NULL
);
13827 /* MPFR calculates quo in the host's long so it may
13828 return more bits in quo than the target int can hold
13829 if sizeof(host long) > sizeof(target int). This can
13830 happen even for native compilers in LP64 mode. In
13831 these cases, modulo the quo value with the largest
13832 number that the target int can hold while leaving one
13833 bit for the sign. */
13834 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13835 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13837 /* Dereference the quo pointer argument. */
13838 arg_quo
= build_fold_indirect_ref (arg_quo
);
13839 /* Proceed iff a valid pointer type was passed in. */
13840 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13842 /* Set the value. */
13844 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13845 build_int_cst (TREE_TYPE (arg_quo
),
13847 TREE_SIDE_EFFECTS (result_quo
) = 1;
13848 /* Combine the quo assignment with the rem. */
13849 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13850 result_quo
, result_rem
));
13858 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13859 resulting value as a tree with type TYPE. The mpfr precision is
13860 set to the precision of TYPE. We assume that this mpfr function
13861 returns zero if the result could be calculated exactly within the
13862 requested precision. In addition, the integer pointer represented
13863 by ARG_SG will be dereferenced and set to the appropriate signgam
13867 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13869 tree result
= NULL_TREE
;
13873 /* To proceed, MPFR must exactly represent the target floating point
13874 format, which only happens when the target base equals two. Also
13875 verify ARG is a constant and that ARG_SG is an int pointer. */
13876 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13877 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13878 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13879 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13881 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13883 /* In addition to NaN and Inf, the argument cannot be zero or a
13884 negative integer. */
13885 if (real_isfinite (ra
)
13886 && ra
->cl
!= rvc_zero
13887 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
13889 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13890 const int prec
= fmt
->p
;
13891 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13896 mpfr_init2 (m
, prec
);
13897 mpfr_from_real (m
, ra
, GMP_RNDN
);
13898 mpfr_clear_flags ();
13899 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13900 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13906 /* Dereference the arg_sg pointer argument. */
13907 arg_sg
= build_fold_indirect_ref (arg_sg
);
13908 /* Assign the signgam value into *arg_sg. */
13909 result_sg
= fold_build2 (MODIFY_EXPR
,
13910 TREE_TYPE (arg_sg
), arg_sg
,
13911 build_int_cst (TREE_TYPE (arg_sg
), sg
));
13912 TREE_SIDE_EFFECTS (result_sg
) = 1;
13913 /* Combine the signgam assignment with the lgamma result. */
13914 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13915 result_sg
, result_lg
));
13923 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13924 function FUNC on it and return the resulting value as a tree with
13925 type TYPE. The mpfr precision is set to the precision of TYPE. We
13926 assume that function FUNC returns zero if the result could be
13927 calculated exactly within the requested precision. */
13930 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
13932 tree result
= NULL_TREE
;
13936 /* To proceed, MPFR must exactly represent the target floating point
13937 format, which only happens when the target base equals two. */
13938 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
13939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
13940 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
13942 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
13943 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
13945 if (real_isfinite (re
) && real_isfinite (im
))
13947 const struct real_format
*const fmt
=
13948 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13949 const int prec
= fmt
->p
;
13950 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13951 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13955 mpc_init2 (m
, prec
);
13956 mpfr_from_real (mpc_realref (m
), re
, rnd
);
13957 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
13958 mpfr_clear_flags ();
13959 inexact
= func (m
, m
, crnd
);
13960 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
13968 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13969 mpc function FUNC on it and return the resulting value as a tree
13970 with type TYPE. The mpfr precision is set to the precision of
13971 TYPE. We assume that function FUNC returns zero if the result
13972 could be calculated exactly within the requested precision. If
13973 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13974 in the arguments and/or results. */
13977 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
13978 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
13980 tree result
= NULL_TREE
;
13985 /* To proceed, MPFR must exactly represent the target floating point
13986 format, which only happens when the target base equals two. */
13987 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
13988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
13989 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
13990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
13991 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
13993 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
13994 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
13995 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
13996 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
13999 || (real_isfinite (re0
) && real_isfinite (im0
)
14000 && real_isfinite (re1
) && real_isfinite (im1
)))
14002 const struct real_format
*const fmt
=
14003 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14004 const int prec
= fmt
->p
;
14005 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14006 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14010 mpc_init2 (m0
, prec
);
14011 mpc_init2 (m1
, prec
);
14012 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
14013 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
14014 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
14015 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
14016 mpfr_clear_flags ();
14017 inexact
= func (m0
, m0
, m1
, crnd
);
14018 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14027 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14028 a normal call should be emitted rather than expanding the function
14029 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14032 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14034 int nargs
= gimple_call_num_args (stmt
);
14036 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14038 ? gimple_call_arg_ptr (stmt
, 0)
14039 : &error_mark_node
), fcode
);
14042 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14043 a normal call should be emitted rather than expanding the function
14044 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14045 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14046 passed as second argument. */
14049 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14050 enum built_in_function fcode
)
14052 int nargs
= gimple_call_num_args (stmt
);
14054 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14056 ? gimple_call_arg_ptr (stmt
, 0)
14057 : &error_mark_node
), maxlen
, fcode
);
14060 /* Builtins with folding operations that operate on "..." arguments
14061 need special handling; we need to store the arguments in a convenient
14062 data structure before attempting any folding. Fortunately there are
14063 only a few builtins that fall into this category. FNDECL is the
14064 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14065 result of the function call is ignored. */
14068 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14069 bool ignore ATTRIBUTE_UNUSED
)
14071 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14072 tree ret
= NULL_TREE
;
14076 case BUILT_IN_SPRINTF_CHK
:
14077 case BUILT_IN_VSPRINTF_CHK
:
14078 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14081 case BUILT_IN_SNPRINTF_CHK
:
14082 case BUILT_IN_VSNPRINTF_CHK
:
14083 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14090 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14091 TREE_NO_WARNING (ret
) = 1;
14097 /* A wrapper function for builtin folding that prevents warnings for
14098 "statement without effect" and the like, caused by removing the
14099 call node earlier than the warning is generated. */
14102 fold_call_stmt (gimple stmt
, bool ignore
)
14104 tree ret
= NULL_TREE
;
14105 tree fndecl
= gimple_call_fndecl (stmt
);
14106 location_t loc
= gimple_location (stmt
);
14108 && TREE_CODE (fndecl
) == FUNCTION_DECL
14109 && DECL_BUILT_IN (fndecl
)
14110 && !gimple_call_va_arg_pack_p (stmt
))
14112 int nargs
= gimple_call_num_args (stmt
);
14113 tree
*args
= (nargs
> 0
14114 ? gimple_call_arg_ptr (stmt
, 0)
14115 : &error_mark_node
);
14117 if (avoid_folding_inline_builtin (fndecl
))
14119 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14121 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14125 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14126 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14128 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14131 /* Propagate location information from original call to
14132 expansion of builtin. Otherwise things like
14133 maybe_emit_chk_warning, that operate on the expansion
14134 of a builtin, will use the wrong location information. */
14135 if (gimple_has_location (stmt
))
14137 tree realret
= ret
;
14138 if (TREE_CODE (ret
) == NOP_EXPR
)
14139 realret
= TREE_OPERAND (ret
, 0);
14140 if (CAN_HAVE_LOCATION_P (realret
)
14141 && !EXPR_HAS_LOCATION (realret
))
14142 SET_EXPR_LOCATION (realret
, loc
);
14152 /* Look up the function in builtin_decl that corresponds to DECL
14153 and set ASMSPEC as its user assembler name. DECL must be a
14154 function decl that declares a builtin. */
14157 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14160 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14161 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14164 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14165 set_user_assembler_name (builtin
, asmspec
);
14166 switch (DECL_FUNCTION_CODE (decl
))
14168 case BUILT_IN_MEMCPY
:
14169 init_block_move_fn (asmspec
);
14170 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14172 case BUILT_IN_MEMSET
:
14173 init_block_clear_fn (asmspec
);
14174 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14176 case BUILT_IN_MEMMOVE
:
14177 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14179 case BUILT_IN_MEMCMP
:
14180 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14182 case BUILT_IN_ABORT
:
14183 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14186 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14188 set_user_assembler_libfunc ("ffs", asmspec
);
14189 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14190 MODE_INT
, 0), "ffs");
14198 /* Return true if DECL is a builtin that expands to a constant or similarly
14201 is_simple_builtin (tree decl
)
14203 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14204 switch (DECL_FUNCTION_CODE (decl
))
14206 /* Builtins that expand to constants. */
14207 case BUILT_IN_CONSTANT_P
:
14208 case BUILT_IN_EXPECT
:
14209 case BUILT_IN_OBJECT_SIZE
:
14210 case BUILT_IN_UNREACHABLE
:
14211 /* Simple register moves or loads from stack. */
14212 case BUILT_IN_ASSUME_ALIGNED
:
14213 case BUILT_IN_RETURN_ADDRESS
:
14214 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14215 case BUILT_IN_FROB_RETURN_ADDR
:
14216 case BUILT_IN_RETURN
:
14217 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14218 case BUILT_IN_FRAME_ADDRESS
:
14219 case BUILT_IN_VA_END
:
14220 case BUILT_IN_STACK_SAVE
:
14221 case BUILT_IN_STACK_RESTORE
:
14222 /* Exception state returns or moves registers around. */
14223 case BUILT_IN_EH_FILTER
:
14224 case BUILT_IN_EH_POINTER
:
14225 case BUILT_IN_EH_COPY_VALUES
:
14235 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14236 most probably expanded inline into reasonably simple code. This is a
14237 superset of is_simple_builtin. */
14239 is_inexpensive_builtin (tree decl
)
14243 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14245 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14246 switch (DECL_FUNCTION_CODE (decl
))
14249 case BUILT_IN_ALLOCA
:
14250 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14251 case BUILT_IN_BSWAP16
:
14252 case BUILT_IN_BSWAP32
:
14253 case BUILT_IN_BSWAP64
:
14255 case BUILT_IN_CLZIMAX
:
14256 case BUILT_IN_CLZL
:
14257 case BUILT_IN_CLZLL
:
14259 case BUILT_IN_CTZIMAX
:
14260 case BUILT_IN_CTZL
:
14261 case BUILT_IN_CTZLL
:
14263 case BUILT_IN_FFSIMAX
:
14264 case BUILT_IN_FFSL
:
14265 case BUILT_IN_FFSLL
:
14266 case BUILT_IN_IMAXABS
:
14267 case BUILT_IN_FINITE
:
14268 case BUILT_IN_FINITEF
:
14269 case BUILT_IN_FINITEL
:
14270 case BUILT_IN_FINITED32
:
14271 case BUILT_IN_FINITED64
:
14272 case BUILT_IN_FINITED128
:
14273 case BUILT_IN_FPCLASSIFY
:
14274 case BUILT_IN_ISFINITE
:
14275 case BUILT_IN_ISINF_SIGN
:
14276 case BUILT_IN_ISINF
:
14277 case BUILT_IN_ISINFF
:
14278 case BUILT_IN_ISINFL
:
14279 case BUILT_IN_ISINFD32
:
14280 case BUILT_IN_ISINFD64
:
14281 case BUILT_IN_ISINFD128
:
14282 case BUILT_IN_ISNAN
:
14283 case BUILT_IN_ISNANF
:
14284 case BUILT_IN_ISNANL
:
14285 case BUILT_IN_ISNAND32
:
14286 case BUILT_IN_ISNAND64
:
14287 case BUILT_IN_ISNAND128
:
14288 case BUILT_IN_ISNORMAL
:
14289 case BUILT_IN_ISGREATER
:
14290 case BUILT_IN_ISGREATEREQUAL
:
14291 case BUILT_IN_ISLESS
:
14292 case BUILT_IN_ISLESSEQUAL
:
14293 case BUILT_IN_ISLESSGREATER
:
14294 case BUILT_IN_ISUNORDERED
:
14295 case BUILT_IN_VA_ARG_PACK
:
14296 case BUILT_IN_VA_ARG_PACK_LEN
:
14297 case BUILT_IN_VA_COPY
:
14298 case BUILT_IN_TRAP
:
14299 case BUILT_IN_SAVEREGS
:
14300 case BUILT_IN_POPCOUNTL
:
14301 case BUILT_IN_POPCOUNTLL
:
14302 case BUILT_IN_POPCOUNTIMAX
:
14303 case BUILT_IN_POPCOUNT
:
14304 case BUILT_IN_PARITYL
:
14305 case BUILT_IN_PARITYLL
:
14306 case BUILT_IN_PARITYIMAX
:
14307 case BUILT_IN_PARITY
:
14308 case BUILT_IN_LABS
:
14309 case BUILT_IN_LLABS
:
14310 case BUILT_IN_PREFETCH
:
14314 return is_simple_builtin (decl
);