1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names
[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names
[(int) END_BUILTINS
] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls
[(int) END_BUILTINS
];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls
[(int) END_BUILTINS
];
77 static const char *c_getstr (tree
);
78 static rtx
c_readstr (const char *, enum machine_mode
);
79 static int target_char_cast (tree
, char *);
80 static rtx
get_memory_rtx (tree
, tree
);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx
result_vector (int, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_sincos (tree
);
100 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_args_info (tree
);
104 static rtx
expand_builtin_next_arg (void);
105 static rtx
expand_builtin_va_start (tree
);
106 static rtx
expand_builtin_va_end (tree
);
107 static rtx
expand_builtin_va_copy (tree
);
108 static rtx
expand_builtin_memchr (tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
111 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
112 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
113 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
120 enum machine_mode
, int);
121 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
122 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
123 enum machine_mode
, int);
124 static rtx
expand_builtin_bcopy (tree
, int);
125 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
127 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
128 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
129 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
130 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
131 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
132 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
133 static rtx
expand_builtin_bzero (tree
);
134 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
139 static rtx
expand_builtin_alloca (tree
, rtx
);
140 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
141 static rtx
expand_builtin_frame_address (tree
, tree
);
142 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
143 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
144 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
145 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
146 static tree
stabilize_va_list (tree
, int);
147 static rtx
expand_builtin_expect (tree
, rtx
);
148 static tree
fold_builtin_constant_p (tree
);
149 static tree
fold_builtin_expect (tree
);
150 static tree
fold_builtin_classify_type (tree
);
151 static tree
fold_builtin_strlen (tree
);
152 static tree
fold_builtin_inf (tree
, int);
153 static tree
fold_builtin_nan (tree
, tree
, int);
154 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
155 static bool validate_arg (tree
, enum tree_code code
);
156 static bool integer_valued_real_p (tree
);
157 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
158 static bool readonly_data_expr (tree
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_sqrt (tree
, tree
);
162 static tree
fold_builtin_cbrt (tree
, tree
);
163 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
164 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
165 static tree
fold_builtin_cos (tree
, tree
, tree
);
166 static tree
fold_builtin_cosh (tree
, tree
, tree
);
167 static tree
fold_builtin_tan (tree
, tree
);
168 static tree
fold_builtin_trunc (tree
, tree
);
169 static tree
fold_builtin_floor (tree
, tree
);
170 static tree
fold_builtin_ceil (tree
, tree
);
171 static tree
fold_builtin_round (tree
, tree
);
172 static tree
fold_builtin_int_roundingfn (tree
, tree
);
173 static tree
fold_builtin_bitop (tree
, tree
);
174 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
175 static tree
fold_builtin_strchr (tree
, tree
, tree
);
176 static tree
fold_builtin_memchr (tree
, tree
, tree
, tree
);
177 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
178 static tree
fold_builtin_strcmp (tree
, tree
);
179 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
180 static tree
fold_builtin_signbit (tree
, tree
);
181 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
182 static tree
fold_builtin_isascii (tree
);
183 static tree
fold_builtin_toascii (tree
);
184 static tree
fold_builtin_isdigit (tree
);
185 static tree
fold_builtin_fabs (tree
, tree
);
186 static tree
fold_builtin_abs (tree
, tree
);
187 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
189 static tree
fold_builtin_n (tree
, tree
*, int, bool);
190 static tree
fold_builtin_0 (tree
, bool);
191 static tree
fold_builtin_1 (tree
, tree
, bool);
192 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
193 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
194 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
195 static tree
fold_builtin_varargs (tree
, tree
, bool);
197 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
198 static tree
fold_builtin_strstr (tree
, tree
, tree
);
199 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
200 static tree
fold_builtin_strcat (tree
, tree
);
201 static tree
fold_builtin_strncat (tree
, tree
, tree
);
202 static tree
fold_builtin_strspn (tree
, tree
);
203 static tree
fold_builtin_strcspn (tree
, tree
);
204 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
206 static rtx
expand_builtin_object_size (tree
);
207 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
208 enum built_in_function
);
209 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
210 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
211 static tree
fold_builtin_object_size (tree
, tree
);
212 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
213 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
214 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
215 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
216 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
217 enum built_in_function
);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline
;
221 static unsigned HOST_WIDE_INT target_percent
;
222 static unsigned HOST_WIDE_INT target_c
;
223 static unsigned HOST_WIDE_INT target_s
;
224 static char target_percent_c
[3];
225 static char target_percent_s
[3];
226 static char target_percent_s_newline
[4];
227 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
228 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
229 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
231 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
232 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
233 static tree
do_mpfr_sincos (tree
, tree
, tree
);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
236 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
237 const REAL_VALUE_TYPE
*, bool);
238 static tree
do_mpfr_remquo (tree
, tree
, tree
);
239 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
246 static bool called_as_built_in (tree node
)
248 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
249 if (strncmp (name
, "__builtin_", 10) == 0)
251 if (strncmp (name
, "__sync_", 7) == 0)
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
265 get_pointer_alignment (tree exp
, unsigned int max_align
)
267 unsigned int align
, inner
;
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize
&& flag_tree_ter
))
273 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
276 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
277 align
= MIN (align
, max_align
);
281 switch (TREE_CODE (exp
))
285 case NON_LVALUE_EXPR
:
286 exp
= TREE_OPERAND (exp
, 0);
287 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
290 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
291 align
= MIN (inner
, max_align
);
295 /* If sum of pointer + int, restrict our maximum alignment to that
296 imposed by the integer. If not, we can't do any better than
298 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
301 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
302 & (max_align
/ BITS_PER_UNIT
- 1))
306 exp
= TREE_OPERAND (exp
, 0);
310 /* See what we are pointing at and look at its alignment. */
311 exp
= TREE_OPERAND (exp
, 0);
313 if (handled_component_p (exp
))
315 HOST_WIDE_INT bitsize
, bitpos
;
317 enum machine_mode mode
;
318 int unsignedp
, volatilep
;
320 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
321 &mode
, &unsignedp
, &volatilep
, true);
323 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
324 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
325 && host_integerp (TREE_OPERAND (offset
, 1), 1))
327 /* Any overflow in calculating offset_bits won't change
330 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
334 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
335 offset
= TREE_OPERAND (offset
, 0);
337 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
338 && host_integerp (TREE_OPERAND (offset
, 1), 1))
340 /* Any overflow in calculating offset_factor won't change
342 unsigned offset_factor
343 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
347 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
350 inner
= MIN (inner
, BITS_PER_UNIT
);
352 if (TREE_CODE (exp
) == FUNCTION_DECL
)
353 align
= FUNCTION_BOUNDARY
;
354 else if (DECL_P (exp
))
355 align
= MIN (inner
, DECL_ALIGN (exp
));
356 #ifdef CONSTANT_ALIGNMENT
357 else if (CONSTANT_CLASS_P (exp
))
358 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
360 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
361 || TREE_CODE (exp
) == INDIRECT_REF
)
362 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
364 align
= MIN (align
, inner
);
365 return MIN (align
, max_align
);
373 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
374 way, because it could contain a zero byte in the middle.
375 TREE_STRING_LENGTH is the size of the character array, not the string.
377 ONLY_VALUE should be nonzero if the result is not going to be emitted
378 into the instruction stream and zero if it is going to be expanded.
379 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
380 is returned, otherwise NULL, since
381 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
382 evaluate the side-effects.
384 The value returned is of type `ssizetype'.
386 Unfortunately, string_constant can't access the values of const char
387 arrays with initializers, so neither can we do so here. */
390 c_strlen (tree src
, int only_value
)
393 HOST_WIDE_INT offset
;
398 if (TREE_CODE (src
) == COND_EXPR
399 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
403 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
404 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
405 if (tree_int_cst_equal (len1
, len2
))
409 if (TREE_CODE (src
) == COMPOUND_EXPR
410 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
411 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
413 src
= string_constant (src
, &offset_node
);
417 max
= TREE_STRING_LENGTH (src
) - 1;
418 ptr
= TREE_STRING_POINTER (src
);
420 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
422 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
423 compute the offset to the following null if we don't know where to
424 start searching for it. */
427 for (i
= 0; i
< max
; i
++)
431 /* We don't know the starting offset, but we do know that the string
432 has no internal zero bytes. We can assume that the offset falls
433 within the bounds of the string; otherwise, the programmer deserves
434 what he gets. Subtract the offset from the length of the string,
435 and return that. This would perhaps not be valid if we were dealing
436 with named arrays in addition to literal string constants. */
438 return size_diffop (size_int (max
), offset_node
);
441 /* We have a known offset into the string. Start searching there for
442 a null character if we can represent it as a single HOST_WIDE_INT. */
443 if (offset_node
== 0)
445 else if (! host_integerp (offset_node
, 0))
448 offset
= tree_low_cst (offset_node
, 0);
450 /* If the offset is known to be out of bounds, warn, and call strlen at
452 if (offset
< 0 || offset
> max
)
454 warning (0, "offset outside bounds of constant string");
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr
+ offset
));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src
= string_constant (src
, &offset_node
);
479 if (offset_node
== 0)
480 return TREE_STRING_POINTER (src
);
481 else if (!host_integerp (offset_node
, 1)
482 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
485 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str
, enum machine_mode mode
)
498 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
503 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
506 if (WORDS_BIG_ENDIAN
)
507 j
= GET_MODE_SIZE (mode
) - i
- 1;
508 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
510 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
512 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
515 ch
= (unsigned char) str
[i
];
516 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
518 return immed_double_const (c
[0], c
[1], mode
);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst
, char *p
)
528 unsigned HOST_WIDE_INT val
, hostval
;
530 if (!host_integerp (cst
, 1)
531 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
534 val
= tree_low_cst (cst
, 1);
535 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
536 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
539 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
540 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp
)
556 if (TREE_ADDRESSABLE (exp
) == 0
557 && (TREE_CODE (exp
) == PARM_DECL
558 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
561 return save_expr (exp
);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
588 tem
= frame_pointer_rtx
;
591 tem
= hard_frame_pointer_rtx
;
593 /* Tell reload not to eliminate the frame pointer. */
594 current_function_accesses_prior_frames
= 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i
= 0; i
< count
; i
++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
623 tem
= memory_address (Pmode
, tem
);
624 tem
= gen_frame_mem (Pmode
, tem
);
625 tem
= copy_to_reg (tem
);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem
);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem
= RETURN_ADDR_RTX (count
, tem
);
641 tem
= memory_address (Pmode
,
642 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
643 tem
= gen_frame_mem (Pmode
, tem
);
648 /* Alias set used for setjmp buffer. */
649 static HOST_WIDE_INT setjmp_alias_set
= -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
658 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
662 if (setjmp_alias_set
== -1)
663 setjmp_alias_set
= new_alias_set ();
665 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
667 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
674 set_mem_alias_set (mem
, setjmp_alias_set
);
675 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
677 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
678 set_mem_alias_set (mem
, setjmp_alias_set
);
680 emit_move_insn (validize_mem (mem
),
681 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
683 stack_save
= gen_rtx_MEM (sa_mode
,
684 plus_constant (buf_addr
,
685 2 * GET_MODE_SIZE (Pmode
)));
686 set_mem_alias_set (stack_save
, setjmp_alias_set
);
687 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup
)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 current_function_calls_setjmp
= 1;
699 /* We have a nonlocal label. */
700 current_function_has_nonlocal_label
= 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto
)
723 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode
, hard_frame_pointer_rtx
));
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs
[ARG_POINTER_REGNUM
])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
736 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
737 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
738 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
741 if (i
== ARRAY_SIZE (elim_regs
))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx
,
747 copy_to_reg (get_arg_pointer_save_area (cfun
)));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver
)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver
)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
778 rtx fp
, lab
, stack
, insn
, last
;
779 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
781 if (setjmp_alias_set
== -1)
782 setjmp_alias_set
= new_alias_set ();
784 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
786 buf_addr
= force_reg (Pmode
, buf_addr
);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value
== const1_rtx
);
795 last
= get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp
)
798 emit_insn (gen_builtin_longjmp (buf_addr
));
802 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
803 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
804 GET_MODE_SIZE (Pmode
)));
806 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
807 2 * GET_MODE_SIZE (Pmode
)));
808 set_mem_alias_set (fp
, setjmp_alias_set
);
809 set_mem_alias_set (lab
, setjmp_alias_set
);
810 set_mem_alias_set (stack
, setjmp_alias_set
);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto
)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
823 lab
= copy_to_reg (lab
);
825 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
826 gen_rtx_MEM (BLKmode
,
827 gen_rtx_SCRATCH (VOIDmode
))));
828 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
829 gen_rtx_MEM (BLKmode
,
830 hard_frame_pointer_rtx
)));
832 emit_move_insn (hard_frame_pointer_rtx
, fp
);
833 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
835 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
836 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
837 emit_indirect_jump (lab
);
841 /* Search backwards and mark the jump insn as a non-local goto.
842 Note that this precludes the use of __builtin_longjmp to a
843 __builtin_setjmp target in the same function. However, we've
844 already cautioned the user that these functions are for
845 internal exception handling use only. */
846 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
848 gcc_assert (insn
!= last
);
852 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
856 else if (CALL_P (insn
))
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
865 expand_builtin_nonlocal_goto (tree exp
)
867 tree t_label
, t_save_area
;
868 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
870 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
873 t_label
= CALL_EXPR_ARG (exp
, 0);
874 t_save_area
= CALL_EXPR_ARG (exp
, 1);
876 r_label
= expand_normal (t_label
);
877 r_label
= convert_memory_address (Pmode
, r_label
);
878 r_save_area
= expand_normal (t_save_area
);
879 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
880 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
881 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
882 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
884 current_function_has_nonlocal_goto
= 1;
886 #ifdef HAVE_nonlocal_goto
887 /* ??? We no longer need to pass the static chain value, afaik. */
888 if (HAVE_nonlocal_goto
)
889 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
893 r_label
= copy_to_reg (r_label
);
895 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
896 gen_rtx_MEM (BLKmode
,
897 gen_rtx_SCRATCH (VOIDmode
))));
899 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
900 gen_rtx_MEM (BLKmode
,
901 hard_frame_pointer_rtx
)));
903 /* Restore frame pointer for containing function.
904 This sets the actual hard register used for the frame pointer
905 to the location of the function's incoming static chain info.
906 The non-local goto handler will then adjust it to contain the
907 proper value and reload the argument pointer, if needed. */
908 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
909 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
911 /* USE of hard_frame_pointer_rtx added for consistency;
912 not clear if really needed. */
913 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
914 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
915 emit_indirect_jump (r_label
);
918 /* Search backwards to the jump insn and mark it as a
920 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
924 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
925 const0_rtx
, REG_NOTES (insn
));
928 else if (CALL_P (insn
))
935 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
936 (not all will be used on all machines) that was passed to __builtin_setjmp.
937 It updates the stack pointer in that block to correspond to the current
941 expand_builtin_update_setjmp_buf (rtx buf_addr
)
943 enum machine_mode sa_mode
= Pmode
;
947 #ifdef HAVE_save_stack_nonlocal
948 if (HAVE_save_stack_nonlocal
)
949 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
951 #ifdef STACK_SAVEAREA_MODE
952 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
956 = gen_rtx_MEM (sa_mode
,
959 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
963 emit_insn (gen_setjmp ());
966 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
969 /* Expand a call to __builtin_prefetch. For a target that does not support
970 data prefetch, evaluate the memory address argument in case it has side
974 expand_builtin_prefetch (tree exp
)
976 tree arg0
, arg1
, arg2
;
980 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
983 arg0
= CALL_EXPR_ARG (exp
, 0);
985 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
986 zero (read) and argument 2 (locality) defaults to 3 (high degree of
988 nargs
= call_expr_nargs (exp
);
990 arg1
= CALL_EXPR_ARG (exp
, 1);
992 arg1
= integer_zero_node
;
994 arg2
= CALL_EXPR_ARG (exp
, 2);
996 arg2
= build_int_cst (NULL_TREE
, 3);
998 /* Argument 0 is an address. */
999 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1001 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1002 if (TREE_CODE (arg1
) != INTEGER_CST
)
1004 error ("second argument to %<__builtin_prefetch%> must be a constant");
1005 arg1
= integer_zero_node
;
1007 op1
= expand_normal (arg1
);
1008 /* Argument 1 must be either zero or one. */
1009 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1011 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1016 /* Argument 2 (locality) must be a compile-time constant int. */
1017 if (TREE_CODE (arg2
) != INTEGER_CST
)
1019 error ("third argument to %<__builtin_prefetch%> must be a constant");
1020 arg2
= integer_zero_node
;
1022 op2
= expand_normal (arg2
);
1023 /* Argument 2 must be 0, 1, 2, or 3. */
1024 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1026 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1030 #ifdef HAVE_prefetch
1033 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1035 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1036 || (GET_MODE (op0
) != Pmode
))
1038 op0
= convert_memory_address (Pmode
, op0
);
1039 op0
= force_reg (Pmode
, op0
);
1041 emit_insn (gen_prefetch (op0
, op1
, op2
));
1045 /* Don't do anything with direct references to volatile memory, but
1046 generate code to handle other side effects. */
1047 if (!MEM_P (op0
) && side_effects_p (op0
))
1051 /* Get a MEM rtx for expression EXP which is the address of an operand
1052 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1053 the maximum length of the block of memory that might be accessed or
1057 get_memory_rtx (tree exp
, tree len
)
1059 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1060 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1062 /* Get an expression we can use to find the attributes to assign to MEM.
1063 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1064 we can. First remove any nops. */
1065 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1066 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1067 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1068 exp
= TREE_OPERAND (exp
, 0);
1070 if (TREE_CODE (exp
) == ADDR_EXPR
)
1071 exp
= TREE_OPERAND (exp
, 0);
1072 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1073 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1077 /* Honor attributes derived from exp, except for the alias set
1078 (as builtin stringops may alias with anything) and the size
1079 (as stringops may access multiple array elements). */
1082 set_mem_attributes (mem
, exp
, 0);
1084 /* Allow the string and memory builtins to overflow from one
1085 field into another, see http://gcc.gnu.org/PR23561.
1086 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1087 memory accessed by the string or memory builtin will fit
1088 within the field. */
1089 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1091 tree mem_expr
= MEM_EXPR (mem
);
1092 HOST_WIDE_INT offset
= -1, length
= -1;
1095 while (TREE_CODE (inner
) == ARRAY_REF
1096 || TREE_CODE (inner
) == NOP_EXPR
1097 || TREE_CODE (inner
) == CONVERT_EXPR
1098 || TREE_CODE (inner
) == NON_LVALUE_EXPR
1099 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1100 || TREE_CODE (inner
) == SAVE_EXPR
)
1101 inner
= TREE_OPERAND (inner
, 0);
1103 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1105 if (MEM_OFFSET (mem
)
1106 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1107 offset
= INTVAL (MEM_OFFSET (mem
));
1109 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1110 length
= tree_low_cst (len
, 0);
1112 while (TREE_CODE (inner
) == COMPONENT_REF
)
1114 tree field
= TREE_OPERAND (inner
, 1);
1115 gcc_assert (! DECL_BIT_FIELD (field
));
1116 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1117 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1120 && TYPE_SIZE_UNIT (TREE_TYPE (inner
))
1121 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0))
1124 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0);
1125 /* If we can prove the memory starting at XEXP (mem, 0)
1126 and ending at XEXP (mem, 0) + LENGTH will fit into
1127 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1130 && offset
+ length
<= size
)
1135 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1136 offset
+= tree_low_cst (DECL_FIELD_OFFSET (field
), 0)
1137 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1145 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1146 inner
= TREE_OPERAND (inner
, 0);
1149 if (mem_expr
== NULL
)
1151 if (mem_expr
!= MEM_EXPR (mem
))
1153 set_mem_expr (mem
, mem_expr
);
1154 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1157 set_mem_alias_set (mem
, 0);
1158 set_mem_size (mem
, NULL_RTX
);
1164 /* Built-in functions to perform an untyped call and return. */
1166 /* For each register that may be used for calling a function, this
1167 gives a mode used to copy the register's value. VOIDmode indicates
1168 the register is not used for calling a function. If the machine
1169 has register windows, this gives only the outbound registers.
1170 INCOMING_REGNO gives the corresponding inbound register. */
1171 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1173 /* For each register that may be used for returning values, this gives
1174 a mode used to copy the register's value. VOIDmode indicates the
1175 register is not used for returning values. If the machine has
1176 register windows, this gives only the outbound registers.
1177 INCOMING_REGNO gives the corresponding inbound register. */
1178 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1180 /* For each register that may be used for calling a function, this
1181 gives the offset of that register into the block returned by
1182 __builtin_apply_args. 0 indicates that the register is not
1183 used for calling a function. */
1184 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1186 /* Return the size required for the block returned by __builtin_apply_args,
1187 and initialize apply_args_mode. */
1190 apply_args_size (void)
1192 static int size
= -1;
1195 enum machine_mode mode
;
1197 /* The values computed by this function never change. */
1200 /* The first value is the incoming arg-pointer. */
1201 size
= GET_MODE_SIZE (Pmode
);
1203 /* The second value is the structure value address unless this is
1204 passed as an "invisible" first argument. */
1205 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1206 size
+= GET_MODE_SIZE (Pmode
);
1208 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1209 if (FUNCTION_ARG_REGNO_P (regno
))
1211 mode
= reg_raw_mode
[regno
];
1213 gcc_assert (mode
!= VOIDmode
);
1215 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1216 if (size
% align
!= 0)
1217 size
= CEIL (size
, align
) * align
;
1218 apply_args_reg_offset
[regno
] = size
;
1219 size
+= GET_MODE_SIZE (mode
);
1220 apply_args_mode
[regno
] = mode
;
1224 apply_args_mode
[regno
] = VOIDmode
;
1225 apply_args_reg_offset
[regno
] = 0;
1231 /* Return the size required for the block returned by __builtin_apply,
1232 and initialize apply_result_mode. */
1235 apply_result_size (void)
1237 static int size
= -1;
1239 enum machine_mode mode
;
1241 /* The values computed by this function never change. */
1246 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1247 if (FUNCTION_VALUE_REGNO_P (regno
))
1249 mode
= reg_raw_mode
[regno
];
1251 gcc_assert (mode
!= VOIDmode
);
1253 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1254 if (size
% align
!= 0)
1255 size
= CEIL (size
, align
) * align
;
1256 size
+= GET_MODE_SIZE (mode
);
1257 apply_result_mode
[regno
] = mode
;
1260 apply_result_mode
[regno
] = VOIDmode
;
1262 /* Allow targets that use untyped_call and untyped_return to override
1263 the size so that machine-specific information can be stored here. */
1264 #ifdef APPLY_RESULT_SIZE
1265 size
= APPLY_RESULT_SIZE
;
1271 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1272 /* Create a vector describing the result block RESULT. If SAVEP is true,
1273 the result block is used to save the values; otherwise it is used to
1274 restore the values. */
1277 result_vector (int savep
, rtx result
)
1279 int regno
, size
, align
, nelts
;
1280 enum machine_mode mode
;
1282 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1285 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1286 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1288 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1289 if (size
% align
!= 0)
1290 size
= CEIL (size
, align
) * align
;
1291 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1292 mem
= adjust_address (result
, mode
, size
);
1293 savevec
[nelts
++] = (savep
1294 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1295 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1296 size
+= GET_MODE_SIZE (mode
);
1298 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1300 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1302 /* Save the state required to perform an untyped call with the same
1303 arguments as were passed to the current function. */
1306 expand_builtin_apply_args_1 (void)
1309 int size
, align
, regno
;
1310 enum machine_mode mode
;
1311 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1313 /* Create a block where the arg-pointer, structure value address,
1314 and argument registers can be saved. */
1315 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1317 /* Walk past the arg-pointer and structure value address. */
1318 size
= GET_MODE_SIZE (Pmode
);
1319 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1320 size
+= GET_MODE_SIZE (Pmode
);
1322 /* Save each register used in calling a function to the block. */
1323 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1324 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1326 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1327 if (size
% align
!= 0)
1328 size
= CEIL (size
, align
) * align
;
1330 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1332 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1333 size
+= GET_MODE_SIZE (mode
);
1336 /* Save the arg pointer to the block. */
1337 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1338 #ifdef STACK_GROWS_DOWNWARD
1339 /* We need the pointer as the caller actually passed them to us, not
1340 as we might have pretended they were passed. Make sure it's a valid
1341 operand, as emit_move_insn isn't expected to handle a PLUS. */
1343 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1346 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1348 size
= GET_MODE_SIZE (Pmode
);
1350 /* Save the structure value address unless this is passed as an
1351 "invisible" first argument. */
1352 if (struct_incoming_value
)
1354 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1355 copy_to_reg (struct_incoming_value
));
1356 size
+= GET_MODE_SIZE (Pmode
);
1359 /* Return the address of the block. */
1360 return copy_addr_to_reg (XEXP (registers
, 0));
1363 /* __builtin_apply_args returns block of memory allocated on
1364 the stack into which is stored the arg pointer, structure
1365 value address, static chain, and all the registers that might
1366 possibly be used in performing a function call. The code is
1367 moved to the start of the function so the incoming values are
1371 expand_builtin_apply_args (void)
1373 /* Don't do __builtin_apply_args more than once in a function.
1374 Save the result of the first call and reuse it. */
1375 if (apply_args_value
!= 0)
1376 return apply_args_value
;
1378 /* When this function is called, it means that registers must be
1379 saved on entry to this function. So we migrate the
1380 call to the first insn of this function. */
1385 temp
= expand_builtin_apply_args_1 ();
1389 apply_args_value
= temp
;
1391 /* Put the insns after the NOTE that starts the function.
1392 If this is inside a start_sequence, make the outer-level insn
1393 chain current, so the code is placed at the start of the
1395 push_topmost_sequence ();
1396 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1397 pop_topmost_sequence ();
1402 /* Perform an untyped call and save the state required to perform an
1403 untyped return of whatever value was returned by the given function. */
1406 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1408 int size
, align
, regno
;
1409 enum machine_mode mode
;
1410 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1411 rtx old_stack_level
= 0;
1412 rtx call_fusage
= 0;
1413 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1415 arguments
= convert_memory_address (Pmode
, arguments
);
1417 /* Create a block where the return registers can be saved. */
1418 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1420 /* Fetch the arg pointer from the ARGUMENTS block. */
1421 incoming_args
= gen_reg_rtx (Pmode
);
1422 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1423 #ifndef STACK_GROWS_DOWNWARD
1424 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1425 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1428 /* Push a new argument block and copy the arguments. Do not allow
1429 the (potential) memcpy call below to interfere with our stack
1431 do_pending_stack_adjust ();
1434 /* Save the stack with nonlocal if available. */
1435 #ifdef HAVE_save_stack_nonlocal
1436 if (HAVE_save_stack_nonlocal
)
1437 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1440 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1442 /* Allocate a block of memory onto the stack and copy the memory
1443 arguments to the outgoing arguments address. */
1444 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1445 dest
= virtual_outgoing_args_rtx
;
1446 #ifndef STACK_GROWS_DOWNWARD
1447 if (GET_CODE (argsize
) == CONST_INT
)
1448 dest
= plus_constant (dest
, -INTVAL (argsize
));
1450 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1452 dest
= gen_rtx_MEM (BLKmode
, dest
);
1453 set_mem_align (dest
, PARM_BOUNDARY
);
1454 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1455 set_mem_align (src
, PARM_BOUNDARY
);
1456 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1458 /* Refer to the argument block. */
1460 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1461 set_mem_align (arguments
, PARM_BOUNDARY
);
1463 /* Walk past the arg-pointer and structure value address. */
1464 size
= GET_MODE_SIZE (Pmode
);
1466 size
+= GET_MODE_SIZE (Pmode
);
1468 /* Restore each of the registers previously saved. Make USE insns
1469 for each of these registers for use in making the call. */
1470 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1471 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1473 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1474 if (size
% align
!= 0)
1475 size
= CEIL (size
, align
) * align
;
1476 reg
= gen_rtx_REG (mode
, regno
);
1477 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1478 use_reg (&call_fusage
, reg
);
1479 size
+= GET_MODE_SIZE (mode
);
1482 /* Restore the structure value address unless this is passed as an
1483 "invisible" first argument. */
1484 size
= GET_MODE_SIZE (Pmode
);
1487 rtx value
= gen_reg_rtx (Pmode
);
1488 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1489 emit_move_insn (struct_value
, value
);
1490 if (REG_P (struct_value
))
1491 use_reg (&call_fusage
, struct_value
);
1492 size
+= GET_MODE_SIZE (Pmode
);
1495 /* All arguments and registers used for the call are set up by now! */
1496 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1498 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1499 and we don't want to load it into a register as an optimization,
1500 because prepare_call_address already did it if it should be done. */
1501 if (GET_CODE (function
) != SYMBOL_REF
)
1502 function
= memory_address (FUNCTION_MODE
, function
);
1504 /* Generate the actual call instruction and save the return value. */
1505 #ifdef HAVE_untyped_call
1506 if (HAVE_untyped_call
)
1507 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1508 result
, result_vector (1, result
)));
1511 #ifdef HAVE_call_value
1512 if (HAVE_call_value
)
1516 /* Locate the unique return register. It is not possible to
1517 express a call that sets more than one return register using
1518 call_value; use untyped_call for that. In fact, untyped_call
1519 only needs to save the return registers in the given block. */
1520 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1521 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1523 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1525 valreg
= gen_rtx_REG (mode
, regno
);
1528 emit_call_insn (GEN_CALL_VALUE (valreg
,
1529 gen_rtx_MEM (FUNCTION_MODE
, function
),
1530 const0_rtx
, NULL_RTX
, const0_rtx
));
1532 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1538 /* Find the CALL insn we just emitted, and attach the register usage
1540 call_insn
= last_call_insn ();
1541 add_function_usage_to (call_insn
, call_fusage
);
1543 /* Restore the stack. */
1544 #ifdef HAVE_save_stack_nonlocal
1545 if (HAVE_save_stack_nonlocal
)
1546 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1549 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1553 /* Return the address of the result block. */
1554 result
= copy_addr_to_reg (XEXP (result
, 0));
1555 return convert_memory_address (ptr_mode
, result
);
1558 /* Perform an untyped return. */
1561 expand_builtin_return (rtx result
)
1563 int size
, align
, regno
;
1564 enum machine_mode mode
;
1566 rtx call_fusage
= 0;
1568 result
= convert_memory_address (Pmode
, result
);
1570 apply_result_size ();
1571 result
= gen_rtx_MEM (BLKmode
, result
);
1573 #ifdef HAVE_untyped_return
1574 if (HAVE_untyped_return
)
1576 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1582 /* Restore the return value and note that each value is used. */
1584 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1585 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1587 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1588 if (size
% align
!= 0)
1589 size
= CEIL (size
, align
) * align
;
1590 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1591 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1593 push_to_sequence (call_fusage
);
1594 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1595 call_fusage
= get_insns ();
1597 size
+= GET_MODE_SIZE (mode
);
1600 /* Put the USE insns before the return. */
1601 emit_insn (call_fusage
);
1603 /* Return whatever values was restored by jumping directly to the end
1605 expand_naked_return ();
1608 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1610 static enum type_class
1611 type_to_class (tree type
)
1613 switch (TREE_CODE (type
))
1615 case VOID_TYPE
: return void_type_class
;
1616 case INTEGER_TYPE
: return integer_type_class
;
1617 case ENUMERAL_TYPE
: return enumeral_type_class
;
1618 case BOOLEAN_TYPE
: return boolean_type_class
;
1619 case POINTER_TYPE
: return pointer_type_class
;
1620 case REFERENCE_TYPE
: return reference_type_class
;
1621 case OFFSET_TYPE
: return offset_type_class
;
1622 case REAL_TYPE
: return real_type_class
;
1623 case COMPLEX_TYPE
: return complex_type_class
;
1624 case FUNCTION_TYPE
: return function_type_class
;
1625 case METHOD_TYPE
: return method_type_class
;
1626 case RECORD_TYPE
: return record_type_class
;
1628 case QUAL_UNION_TYPE
: return union_type_class
;
1629 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1630 ? string_type_class
: array_type_class
);
1631 case LANG_TYPE
: return lang_type_class
;
1632 default: return no_type_class
;
1636 /* Expand a call EXP to __builtin_classify_type. */
1639 expand_builtin_classify_type (tree exp
)
1641 if (call_expr_nargs (exp
))
1642 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1643 return GEN_INT (no_type_class
);
1646 /* This helper macro, meant to be used in mathfn_built_in below,
1647 determines which among a set of three builtin math functions is
1648 appropriate for a given type mode. The `F' and `L' cases are
1649 automatically generated from the `double' case. */
1650 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1651 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1652 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1653 fcodel = BUILT_IN_MATHFN##L ; break;
1654 /* Similar to above, but appends _R after any F/L suffix. */
1655 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1656 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1657 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1658 fcodel = BUILT_IN_MATHFN##L_R ; break;
1660 /* Return mathematic function equivalent to FN but operating directly
1661 on TYPE, if available. If we can't do the conversion, return zero. */
1663 mathfn_built_in (tree type
, enum built_in_function fn
)
1665 enum built_in_function fcode
, fcodef
, fcodel
;
1669 CASE_MATHFN (BUILT_IN_ACOS
)
1670 CASE_MATHFN (BUILT_IN_ACOSH
)
1671 CASE_MATHFN (BUILT_IN_ASIN
)
1672 CASE_MATHFN (BUILT_IN_ASINH
)
1673 CASE_MATHFN (BUILT_IN_ATAN
)
1674 CASE_MATHFN (BUILT_IN_ATAN2
)
1675 CASE_MATHFN (BUILT_IN_ATANH
)
1676 CASE_MATHFN (BUILT_IN_CBRT
)
1677 CASE_MATHFN (BUILT_IN_CEIL
)
1678 CASE_MATHFN (BUILT_IN_CEXPI
)
1679 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1680 CASE_MATHFN (BUILT_IN_COS
)
1681 CASE_MATHFN (BUILT_IN_COSH
)
1682 CASE_MATHFN (BUILT_IN_DREM
)
1683 CASE_MATHFN (BUILT_IN_ERF
)
1684 CASE_MATHFN (BUILT_IN_ERFC
)
1685 CASE_MATHFN (BUILT_IN_EXP
)
1686 CASE_MATHFN (BUILT_IN_EXP10
)
1687 CASE_MATHFN (BUILT_IN_EXP2
)
1688 CASE_MATHFN (BUILT_IN_EXPM1
)
1689 CASE_MATHFN (BUILT_IN_FABS
)
1690 CASE_MATHFN (BUILT_IN_FDIM
)
1691 CASE_MATHFN (BUILT_IN_FLOOR
)
1692 CASE_MATHFN (BUILT_IN_FMA
)
1693 CASE_MATHFN (BUILT_IN_FMAX
)
1694 CASE_MATHFN (BUILT_IN_FMIN
)
1695 CASE_MATHFN (BUILT_IN_FMOD
)
1696 CASE_MATHFN (BUILT_IN_FREXP
)
1697 CASE_MATHFN (BUILT_IN_GAMMA
)
1698 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1699 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1700 CASE_MATHFN (BUILT_IN_HYPOT
)
1701 CASE_MATHFN (BUILT_IN_ILOGB
)
1702 CASE_MATHFN (BUILT_IN_INF
)
1703 CASE_MATHFN (BUILT_IN_ISINF
)
1704 CASE_MATHFN (BUILT_IN_J0
)
1705 CASE_MATHFN (BUILT_IN_J1
)
1706 CASE_MATHFN (BUILT_IN_JN
)
1707 CASE_MATHFN (BUILT_IN_LCEIL
)
1708 CASE_MATHFN (BUILT_IN_LDEXP
)
1709 CASE_MATHFN (BUILT_IN_LFLOOR
)
1710 CASE_MATHFN (BUILT_IN_LGAMMA
)
1711 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1712 CASE_MATHFN (BUILT_IN_LLCEIL
)
1713 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1714 CASE_MATHFN (BUILT_IN_LLRINT
)
1715 CASE_MATHFN (BUILT_IN_LLROUND
)
1716 CASE_MATHFN (BUILT_IN_LOG
)
1717 CASE_MATHFN (BUILT_IN_LOG10
)
1718 CASE_MATHFN (BUILT_IN_LOG1P
)
1719 CASE_MATHFN (BUILT_IN_LOG2
)
1720 CASE_MATHFN (BUILT_IN_LOGB
)
1721 CASE_MATHFN (BUILT_IN_LRINT
)
1722 CASE_MATHFN (BUILT_IN_LROUND
)
1723 CASE_MATHFN (BUILT_IN_MODF
)
1724 CASE_MATHFN (BUILT_IN_NAN
)
1725 CASE_MATHFN (BUILT_IN_NANS
)
1726 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1727 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1728 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1729 CASE_MATHFN (BUILT_IN_POW
)
1730 CASE_MATHFN (BUILT_IN_POWI
)
1731 CASE_MATHFN (BUILT_IN_POW10
)
1732 CASE_MATHFN (BUILT_IN_REMAINDER
)
1733 CASE_MATHFN (BUILT_IN_REMQUO
)
1734 CASE_MATHFN (BUILT_IN_RINT
)
1735 CASE_MATHFN (BUILT_IN_ROUND
)
1736 CASE_MATHFN (BUILT_IN_SCALB
)
1737 CASE_MATHFN (BUILT_IN_SCALBLN
)
1738 CASE_MATHFN (BUILT_IN_SCALBN
)
1739 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1740 CASE_MATHFN (BUILT_IN_SIN
)
1741 CASE_MATHFN (BUILT_IN_SINCOS
)
1742 CASE_MATHFN (BUILT_IN_SINH
)
1743 CASE_MATHFN (BUILT_IN_SQRT
)
1744 CASE_MATHFN (BUILT_IN_TAN
)
1745 CASE_MATHFN (BUILT_IN_TANH
)
1746 CASE_MATHFN (BUILT_IN_TGAMMA
)
1747 CASE_MATHFN (BUILT_IN_TRUNC
)
1748 CASE_MATHFN (BUILT_IN_Y0
)
1749 CASE_MATHFN (BUILT_IN_Y1
)
1750 CASE_MATHFN (BUILT_IN_YN
)
1756 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1757 return implicit_built_in_decls
[fcode
];
1758 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1759 return implicit_built_in_decls
[fcodef
];
1760 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1761 return implicit_built_in_decls
[fcodel
];
1766 /* If errno must be maintained, expand the RTL to check if the result,
1767 TARGET, of a built-in function call, EXP, is NaN, and if so set
1771 expand_errno_check (tree exp
, rtx target
)
1773 rtx lab
= gen_label_rtx ();
1775 /* Test the result; if it is NaN, set errno=EDOM because
1776 the argument was not in the domain. */
1777 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1781 /* If this built-in doesn't throw an exception, set errno directly. */
1782 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1784 #ifdef GEN_ERRNO_RTX
1785 rtx errno_rtx
= GEN_ERRNO_RTX
;
1788 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1790 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1796 /* We can't set errno=EDOM directly; let the library call do it.
1797 Pop the arguments right away in case the call gets deleted. */
1799 expand_call (exp
, target
, 0);
1804 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1805 Return NULL_RTX if a normal call should be emitted rather than expanding
1806 the function in-line. EXP is the expression that is a call to the builtin
1807 function; if convenient, the result should be placed in TARGET.
1808 SUBTARGET may be used as the target for computing one of EXP's operands. */
1811 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1813 optab builtin_optab
;
1814 rtx op0
, insns
, before_call
;
1815 tree fndecl
= get_callee_fndecl (exp
);
1816 enum machine_mode mode
;
1817 bool errno_set
= false;
1820 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1823 arg
= CALL_EXPR_ARG (exp
, 0);
1825 switch (DECL_FUNCTION_CODE (fndecl
))
1827 CASE_FLT_FN (BUILT_IN_SQRT
):
1828 errno_set
= ! tree_expr_nonnegative_p (arg
);
1829 builtin_optab
= sqrt_optab
;
1831 CASE_FLT_FN (BUILT_IN_EXP
):
1832 errno_set
= true; builtin_optab
= exp_optab
; break;
1833 CASE_FLT_FN (BUILT_IN_EXP10
):
1834 CASE_FLT_FN (BUILT_IN_POW10
):
1835 errno_set
= true; builtin_optab
= exp10_optab
; break;
1836 CASE_FLT_FN (BUILT_IN_EXP2
):
1837 errno_set
= true; builtin_optab
= exp2_optab
; break;
1838 CASE_FLT_FN (BUILT_IN_EXPM1
):
1839 errno_set
= true; builtin_optab
= expm1_optab
; break;
1840 CASE_FLT_FN (BUILT_IN_LOGB
):
1841 errno_set
= true; builtin_optab
= logb_optab
; break;
1842 CASE_FLT_FN (BUILT_IN_LOG
):
1843 errno_set
= true; builtin_optab
= log_optab
; break;
1844 CASE_FLT_FN (BUILT_IN_LOG10
):
1845 errno_set
= true; builtin_optab
= log10_optab
; break;
1846 CASE_FLT_FN (BUILT_IN_LOG2
):
1847 errno_set
= true; builtin_optab
= log2_optab
; break;
1848 CASE_FLT_FN (BUILT_IN_LOG1P
):
1849 errno_set
= true; builtin_optab
= log1p_optab
; break;
1850 CASE_FLT_FN (BUILT_IN_ASIN
):
1851 builtin_optab
= asin_optab
; break;
1852 CASE_FLT_FN (BUILT_IN_ACOS
):
1853 builtin_optab
= acos_optab
; break;
1854 CASE_FLT_FN (BUILT_IN_TAN
):
1855 builtin_optab
= tan_optab
; break;
1856 CASE_FLT_FN (BUILT_IN_ATAN
):
1857 builtin_optab
= atan_optab
; break;
1858 CASE_FLT_FN (BUILT_IN_FLOOR
):
1859 builtin_optab
= floor_optab
; break;
1860 CASE_FLT_FN (BUILT_IN_CEIL
):
1861 builtin_optab
= ceil_optab
; break;
1862 CASE_FLT_FN (BUILT_IN_TRUNC
):
1863 builtin_optab
= btrunc_optab
; break;
1864 CASE_FLT_FN (BUILT_IN_ROUND
):
1865 builtin_optab
= round_optab
; break;
1866 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1867 builtin_optab
= nearbyint_optab
;
1868 if (flag_trapping_math
)
1870 /* Else fallthrough and expand as rint. */
1871 CASE_FLT_FN (BUILT_IN_RINT
):
1872 builtin_optab
= rint_optab
; break;
1877 /* Make a suitable register to place result in. */
1878 mode
= TYPE_MODE (TREE_TYPE (exp
));
1880 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1883 /* Before working hard, check whether the instruction is available. */
1884 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1886 target
= gen_reg_rtx (mode
);
1888 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1889 need to expand the argument again. This way, we will not perform
1890 side-effects more the once. */
1891 narg
= builtin_save_expr (arg
);
1895 exp
= build_call_expr (fndecl
, 1, arg
);
1898 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
1902 /* Compute into TARGET.
1903 Set TARGET to wherever the result comes back. */
1904 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1909 expand_errno_check (exp
, target
);
1911 /* Output the entire sequence. */
1912 insns
= get_insns ();
1918 /* If we were unable to expand via the builtin, stop the sequence
1919 (without outputting the insns) and call to the library function
1920 with the stabilized argument list. */
1924 before_call
= get_last_insn ();
1926 target
= expand_call (exp
, target
, target
== const0_rtx
);
1928 /* If this is a sqrt operation and we don't care about errno, try to
1929 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1930 This allows the semantics of the libcall to be visible to the RTL
1932 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1934 /* Search backwards through the insns emitted by expand_call looking
1935 for the instruction with the REG_RETVAL note. */
1936 rtx last
= get_last_insn ();
1937 while (last
!= before_call
)
1939 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1941 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1942 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1943 two elements, i.e. symbol_ref(sqrt) and the operand. */
1945 && GET_CODE (note
) == EXPR_LIST
1946 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1947 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1948 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1950 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1951 /* Check operand is a register with expected mode. */
1954 && GET_MODE (operand
) == mode
)
1956 /* Replace the REG_EQUAL note with a SQRT rtx. */
1957 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1958 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1963 last
= PREV_INSN (last
);
1970 /* Expand a call to the builtin binary math functions (pow and atan2).
1971 Return NULL_RTX if a normal call should be emitted rather than expanding the
1972 function in-line. EXP is the expression that is a call to the builtin
1973 function; if convenient, the result should be placed in TARGET.
1974 SUBTARGET may be used as the target for computing one of EXP's
1978 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1980 optab builtin_optab
;
1981 rtx op0
, op1
, insns
;
1982 int op1_type
= REAL_TYPE
;
1983 tree fndecl
= get_callee_fndecl (exp
);
1984 tree arg0
, arg1
, narg
;
1985 enum machine_mode mode
;
1986 bool errno_set
= true;
1989 switch (DECL_FUNCTION_CODE (fndecl
))
1991 CASE_FLT_FN (BUILT_IN_SCALBN
):
1992 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1993 CASE_FLT_FN (BUILT_IN_LDEXP
):
1994 op1_type
= INTEGER_TYPE
;
1999 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2002 arg0
= CALL_EXPR_ARG (exp
, 0);
2003 arg1
= CALL_EXPR_ARG (exp
, 1);
2005 switch (DECL_FUNCTION_CODE (fndecl
))
2007 CASE_FLT_FN (BUILT_IN_POW
):
2008 builtin_optab
= pow_optab
; break;
2009 CASE_FLT_FN (BUILT_IN_ATAN2
):
2010 builtin_optab
= atan2_optab
; break;
2011 CASE_FLT_FN (BUILT_IN_SCALB
):
2012 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2014 builtin_optab
= scalb_optab
; break;
2015 CASE_FLT_FN (BUILT_IN_SCALBN
):
2016 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2017 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2019 /* Fall through... */
2020 CASE_FLT_FN (BUILT_IN_LDEXP
):
2021 builtin_optab
= ldexp_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_FMOD
):
2023 builtin_optab
= fmod_optab
; break;
2024 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2025 CASE_FLT_FN (BUILT_IN_DREM
):
2026 builtin_optab
= remainder_optab
; break;
2031 /* Make a suitable register to place result in. */
2032 mode
= TYPE_MODE (TREE_TYPE (exp
));
2034 /* Before working hard, check whether the instruction is available. */
2035 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2038 target
= gen_reg_rtx (mode
);
2040 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2043 /* Always stabilize the argument list. */
2044 narg
= builtin_save_expr (arg1
);
2050 narg
= builtin_save_expr (arg0
);
2058 exp
= build_call_expr (fndecl
, 2, arg0
, arg1
);
2060 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2061 op1
= expand_normal (arg1
);
2065 /* Compute into TARGET.
2066 Set TARGET to wherever the result comes back. */
2067 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2068 target
, 0, OPTAB_DIRECT
);
2070 /* If we were unable to expand via the builtin, stop the sequence
2071 (without outputting the insns) and call to the library function
2072 with the stabilized argument list. */
2076 return expand_call (exp
, target
, target
== const0_rtx
);
2080 expand_errno_check (exp
, target
);
2082 /* Output the entire sequence. */
2083 insns
= get_insns ();
2090 /* Expand a call to the builtin sin and cos math functions.
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2098 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2100 optab builtin_optab
;
2102 tree fndecl
= get_callee_fndecl (exp
);
2103 enum machine_mode mode
;
2106 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2109 arg
= CALL_EXPR_ARG (exp
, 0);
2111 switch (DECL_FUNCTION_CODE (fndecl
))
2113 CASE_FLT_FN (BUILT_IN_SIN
):
2114 CASE_FLT_FN (BUILT_IN_COS
):
2115 builtin_optab
= sincos_optab
; break;
2120 /* Make a suitable register to place result in. */
2121 mode
= TYPE_MODE (TREE_TYPE (exp
));
2123 /* Check if sincos insn is available, otherwise fallback
2124 to sin or cos insn. */
2125 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2126 switch (DECL_FUNCTION_CODE (fndecl
))
2128 CASE_FLT_FN (BUILT_IN_SIN
):
2129 builtin_optab
= sin_optab
; break;
2130 CASE_FLT_FN (BUILT_IN_COS
):
2131 builtin_optab
= cos_optab
; break;
2136 /* Before working hard, check whether the instruction is available. */
2137 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2139 target
= gen_reg_rtx (mode
);
2141 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2142 need to expand the argument again. This way, we will not perform
2143 side-effects more the once. */
2144 narg
= save_expr (arg
);
2148 exp
= build_call_expr (fndecl
, 1, arg
);
2151 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2155 /* Compute into TARGET.
2156 Set TARGET to wherever the result comes back. */
2157 if (builtin_optab
== sincos_optab
)
2161 switch (DECL_FUNCTION_CODE (fndecl
))
2163 CASE_FLT_FN (BUILT_IN_SIN
):
2164 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2166 CASE_FLT_FN (BUILT_IN_COS
):
2167 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2172 gcc_assert (result
);
2176 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2181 /* Output the entire sequence. */
2182 insns
= get_insns ();
2188 /* If we were unable to expand via the builtin, stop the sequence
2189 (without outputting the insns) and call to the library function
2190 with the stabilized argument list. */
2194 target
= expand_call (exp
, target
, target
== const0_rtx
);
2199 /* Expand a call to one of the builtin math functions that operate on
2200 floating point argument and output an integer result (ilogb, isinf,
2202 Return 0 if a normal call should be emitted rather than expanding the
2203 function in-line. EXP is the expression that is a call to the builtin
2204 function; if convenient, the result should be placed in TARGET.
2205 SUBTARGET may be used as the target for computing one of EXP's operands. */
2208 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2210 optab builtin_optab
;
2211 enum insn_code icode
;
2213 tree fndecl
= get_callee_fndecl (exp
);
2214 enum machine_mode mode
;
2215 bool errno_set
= false;
2218 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2221 arg
= CALL_EXPR_ARG (exp
, 0);
2223 switch (DECL_FUNCTION_CODE (fndecl
))
2225 CASE_FLT_FN (BUILT_IN_ILOGB
):
2226 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2227 CASE_FLT_FN (BUILT_IN_ISINF
):
2228 builtin_optab
= isinf_optab
; break;
2233 /* There's no easy way to detect the case we need to set EDOM. */
2234 if (flag_errno_math
&& errno_set
)
2237 /* Optab mode depends on the mode of the input argument. */
2238 mode
= TYPE_MODE (TREE_TYPE (arg
));
2240 icode
= builtin_optab
->handlers
[(int) mode
].insn_code
;
2242 /* Before working hard, check whether the instruction is available. */
2243 if (icode
!= CODE_FOR_nothing
)
2245 /* Make a suitable register to place result in. */
2247 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2248 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2250 gcc_assert (insn_data
[icode
].operand
[0].predicate
2251 (target
, GET_MODE (target
)));
2253 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2254 need to expand the argument again. This way, we will not perform
2255 side-effects more the once. */
2256 narg
= builtin_save_expr (arg
);
2260 exp
= build_call_expr (fndecl
, 1, arg
);
2263 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2265 if (mode
!= GET_MODE (op0
))
2266 op0
= convert_to_mode (mode
, op0
, 0);
2268 /* Compute into TARGET.
2269 Set TARGET to wherever the result comes back. */
2270 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2274 target
= expand_call (exp
, target
, target
== const0_rtx
);
2279 /* Expand a call to the builtin sincos math function.
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2285 expand_builtin_sincos (tree exp
)
2287 rtx op0
, op1
, op2
, target1
, target2
;
2288 enum machine_mode mode
;
2289 tree arg
, sinp
, cosp
;
2292 if (!validate_arglist (exp
, REAL_TYPE
,
2293 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2296 arg
= CALL_EXPR_ARG (exp
, 0);
2297 sinp
= CALL_EXPR_ARG (exp
, 1);
2298 cosp
= CALL_EXPR_ARG (exp
, 2);
2300 /* Make a suitable register to place result in. */
2301 mode
= TYPE_MODE (TREE_TYPE (arg
));
2303 /* Check if sincos insn is available, otherwise emit the call. */
2304 if (sincos_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2307 target1
= gen_reg_rtx (mode
);
2308 target2
= gen_reg_rtx (mode
);
2310 op0
= expand_normal (arg
);
2311 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2312 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2314 /* Compute into target1 and target2.
2315 Set TARGET to wherever the result comes back. */
2316 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2317 gcc_assert (result
);
2319 /* Move target1 and target2 to the memory locations indicated
2321 emit_move_insn (op1
, target1
);
2322 emit_move_insn (op2
, target2
);
2327 /* Expand a call to the internal cexpi builtin to the sincos math function.
2328 EXP is the expression that is a call to the builtin function; if convenient,
2329 the result should be placed in TARGET. SUBTARGET may be used as the target
2330 for computing one of EXP's operands. */
2333 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2335 tree fndecl
= get_callee_fndecl (exp
);
2337 enum machine_mode mode
;
2340 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2343 arg
= CALL_EXPR_ARG (exp
, 0);
2344 type
= TREE_TYPE (arg
);
2345 mode
= TYPE_MODE (TREE_TYPE (arg
));
2347 /* Try expanding via a sincos optab, fall back to emitting a libcall
2348 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2349 is only generated from sincos, cexp or if we have either of them. */
2350 if (sincos_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2352 op1
= gen_reg_rtx (mode
);
2353 op2
= gen_reg_rtx (mode
);
2355 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2357 /* Compute into op1 and op2. */
2358 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2360 else if (TARGET_HAS_SINCOS
)
2362 tree call
, fn
= NULL_TREE
;
2366 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2367 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2368 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2369 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2370 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2371 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2375 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2376 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2377 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2378 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2379 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2380 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2382 /* Make sure not to fold the sincos call again. */
2383 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2384 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2385 call
, 3, arg
, top1
, top2
));
2389 tree call
, fn
= NULL_TREE
, narg
;
2390 tree ctype
= build_complex_type (type
);
2392 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2393 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2394 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2395 fn
= built_in_decls
[BUILT_IN_CEXP
];
2396 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2397 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2401 /* If we don't have a decl for cexp create one. This is the
2402 friendliest fallback if the user calls __builtin_cexpi
2403 without full target C99 function support. */
2404 if (fn
== NULL_TREE
)
2407 const char *name
= NULL
;
2409 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2411 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2413 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2416 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2417 fn
= build_fn_decl (name
, fntype
);
2420 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2421 build_real (type
, dconst0
), arg
);
2423 /* Make sure not to fold the cexp call again. */
2424 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2425 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2426 target
, VOIDmode
, EXPAND_NORMAL
);
2429 /* Now build the proper return type. */
2430 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2431 make_tree (TREE_TYPE (arg
), op2
),
2432 make_tree (TREE_TYPE (arg
), op1
)),
2433 target
, VOIDmode
, EXPAND_NORMAL
);
2436 /* Expand a call to one of the builtin rounding functions gcc defines
2437 as an extension (lfloor and lceil). As these are gcc extensions we
2438 do not need to worry about setting errno to EDOM.
2439 If expanding via optab fails, lower expression to (int)(floor(x)).
2440 EXP is the expression that is a call to the builtin function;
2441 if convenient, the result should be placed in TARGET. SUBTARGET may
2442 be used as the target for computing one of EXP's operands. */
2445 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2447 convert_optab builtin_optab
;
2448 rtx op0
, insns
, tmp
;
2449 tree fndecl
= get_callee_fndecl (exp
);
2450 enum built_in_function fallback_fn
;
2451 tree fallback_fndecl
;
2452 enum machine_mode mode
;
2455 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2458 arg
= CALL_EXPR_ARG (exp
, 0);
2460 switch (DECL_FUNCTION_CODE (fndecl
))
2462 CASE_FLT_FN (BUILT_IN_LCEIL
):
2463 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2464 builtin_optab
= lceil_optab
;
2465 fallback_fn
= BUILT_IN_CEIL
;
2468 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2469 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2470 builtin_optab
= lfloor_optab
;
2471 fallback_fn
= BUILT_IN_FLOOR
;
2478 /* Make a suitable register to place result in. */
2479 mode
= TYPE_MODE (TREE_TYPE (exp
));
2481 target
= gen_reg_rtx (mode
);
2483 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2484 need to expand the argument again. This way, we will not perform
2485 side-effects more the once. */
2486 narg
= builtin_save_expr (arg
);
2490 exp
= build_call_expr (fndecl
, 1, arg
);
2493 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2497 /* Compute into TARGET. */
2498 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2500 /* Output the entire sequence. */
2501 insns
= get_insns ();
2507 /* If we were unable to expand via the builtin, stop the sequence
2508 (without outputting the insns). */
2511 /* Fall back to floating point rounding optab. */
2512 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2514 /* For non-C99 targets we may end up without a fallback fndecl here
2515 if the user called __builtin_lfloor directly. In this case emit
2516 a call to the floor/ceil variants nevertheless. This should result
2517 in the best user experience for not full C99 targets. */
2518 if (fallback_fndecl
== NULL_TREE
)
2521 const char *name
= NULL
;
2523 switch (DECL_FUNCTION_CODE (fndecl
))
2525 case BUILT_IN_LCEIL
:
2526 case BUILT_IN_LLCEIL
:
2529 case BUILT_IN_LCEILF
:
2530 case BUILT_IN_LLCEILF
:
2533 case BUILT_IN_LCEILL
:
2534 case BUILT_IN_LLCEILL
:
2537 case BUILT_IN_LFLOOR
:
2538 case BUILT_IN_LLFLOOR
:
2541 case BUILT_IN_LFLOORF
:
2542 case BUILT_IN_LLFLOORF
:
2545 case BUILT_IN_LFLOORL
:
2546 case BUILT_IN_LLFLOORL
:
2553 fntype
= build_function_type_list (TREE_TYPE (arg
),
2554 TREE_TYPE (arg
), NULL_TREE
);
2555 fallback_fndecl
= build_fn_decl (name
, fntype
);
2558 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2560 tmp
= expand_normal (exp
);
2562 /* Truncate the result of floating point optab to integer
2563 via expand_fix (). */
2564 target
= gen_reg_rtx (mode
);
2565 expand_fix (target
, tmp
, 0);
2570 /* Expand a call to one of the builtin math functions doing integer
2572 Return 0 if a normal call should be emitted rather than expanding the
2573 function in-line. EXP is the expression that is a call to the builtin
2574 function; if convenient, the result should be placed in TARGET.
2575 SUBTARGET may be used as the target for computing one of EXP's operands. */
2578 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
, rtx subtarget
)
2580 convert_optab builtin_optab
;
2582 tree fndecl
= get_callee_fndecl (exp
);
2584 enum machine_mode mode
;
2586 /* There's no easy way to detect the case we need to set EDOM. */
2587 if (flag_errno_math
)
2590 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2593 arg
= CALL_EXPR_ARG (exp
, 0);
2595 switch (DECL_FUNCTION_CODE (fndecl
))
2597 CASE_FLT_FN (BUILT_IN_LRINT
):
2598 CASE_FLT_FN (BUILT_IN_LLRINT
):
2599 builtin_optab
= lrint_optab
; break;
2600 CASE_FLT_FN (BUILT_IN_LROUND
):
2601 CASE_FLT_FN (BUILT_IN_LLROUND
):
2602 builtin_optab
= lround_optab
; break;
2607 /* Make a suitable register to place result in. */
2608 mode
= TYPE_MODE (TREE_TYPE (exp
));
2610 target
= gen_reg_rtx (mode
);
2612 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2613 need to expand the argument again. This way, we will not perform
2614 side-effects more the once. */
2615 narg
= builtin_save_expr (arg
);
2619 exp
= build_call_expr (fndecl
, 1, arg
);
2622 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2626 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2628 /* Output the entire sequence. */
2629 insns
= get_insns ();
2635 /* If we were unable to expand via the builtin, stop the sequence
2636 (without outputting the insns) and call to the library function
2637 with the stabilized argument list. */
2640 target
= expand_call (exp
, target
, target
== const0_rtx
);
2645 /* To evaluate powi(x,n), the floating point value x raised to the
2646 constant integer exponent n, we use a hybrid algorithm that
2647 combines the "window method" with look-up tables. For an
2648 introduction to exponentiation algorithms and "addition chains",
2649 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2650 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2651 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2652 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2654 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2655 multiplications to inline before calling the system library's pow
2656 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2657 so this default never requires calling pow, powf or powl. */
2659 #ifndef POWI_MAX_MULTS
2660 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2663 /* The size of the "optimal power tree" lookup table. All
2664 exponents less than this value are simply looked up in the
2665 powi_table below. This threshold is also used to size the
2666 cache of pseudo registers that hold intermediate results. */
2667 #define POWI_TABLE_SIZE 256
2669 /* The size, in bits of the window, used in the "window method"
2670 exponentiation algorithm. This is equivalent to a radix of
2671 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2672 #define POWI_WINDOW_SIZE 3
2674 /* The following table is an efficient representation of an
2675 "optimal power tree". For each value, i, the corresponding
2676 value, j, in the table states than an optimal evaluation
2677 sequence for calculating pow(x,i) can be found by evaluating
2678 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2679 100 integers is given in Knuth's "Seminumerical algorithms". */
2681 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2683 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2684 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2685 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2686 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2687 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2688 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2689 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2690 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2691 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2692 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2693 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2694 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2695 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2696 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2697 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2698 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2699 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2700 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2701 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2702 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2703 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2704 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2705 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2706 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2707 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2708 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2709 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2710 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2711 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2712 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2713 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2714 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2718 /* Return the number of multiplications required to calculate
2719 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2720 subroutine of powi_cost. CACHE is an array indicating
2721 which exponents have already been calculated. */
2724 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2726 /* If we've already calculated this exponent, then this evaluation
2727 doesn't require any additional multiplications. */
2732 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2733 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2736 /* Return the number of multiplications required to calculate
2737 powi(x,n) for an arbitrary x, given the exponent N. This
2738 function needs to be kept in sync with expand_powi below. */
2741 powi_cost (HOST_WIDE_INT n
)
2743 bool cache
[POWI_TABLE_SIZE
];
2744 unsigned HOST_WIDE_INT digit
;
2745 unsigned HOST_WIDE_INT val
;
2751 /* Ignore the reciprocal when calculating the cost. */
2752 val
= (n
< 0) ? -n
: n
;
2754 /* Initialize the exponent cache. */
2755 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2760 while (val
>= POWI_TABLE_SIZE
)
2764 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2765 result
+= powi_lookup_cost (digit
, cache
)
2766 + POWI_WINDOW_SIZE
+ 1;
2767 val
>>= POWI_WINDOW_SIZE
;
2776 return result
+ powi_lookup_cost (val
, cache
);
2779 /* Recursive subroutine of expand_powi. This function takes the array,
2780 CACHE, of already calculated exponents and an exponent N and returns
2781 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2784 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2786 unsigned HOST_WIDE_INT digit
;
2790 if (n
< POWI_TABLE_SIZE
)
2795 target
= gen_reg_rtx (mode
);
2798 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2799 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2803 target
= gen_reg_rtx (mode
);
2804 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2805 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2806 op1
= expand_powi_1 (mode
, digit
, cache
);
2810 target
= gen_reg_rtx (mode
);
2811 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2815 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2816 if (result
!= target
)
2817 emit_move_insn (target
, result
);
2821 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2822 floating point operand in mode MODE, and N is the exponent. This
2823 function needs to be kept in sync with powi_cost above. */
2826 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2828 unsigned HOST_WIDE_INT val
;
2829 rtx cache
[POWI_TABLE_SIZE
];
2833 return CONST1_RTX (mode
);
2835 val
= (n
< 0) ? -n
: n
;
2837 memset (cache
, 0, sizeof (cache
));
2840 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2842 /* If the original exponent was negative, reciprocate the result. */
2844 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2845 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2850 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2851 a normal call should be emitted rather than expanding the function
2852 in-line. EXP is the expression that is a call to the builtin
2853 function; if convenient, the result should be placed in TARGET. */
2856 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2860 tree type
= TREE_TYPE (exp
);
2861 REAL_VALUE_TYPE cint
, c
, c2
;
2864 enum machine_mode mode
= TYPE_MODE (type
);
2866 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2869 arg0
= CALL_EXPR_ARG (exp
, 0);
2870 arg1
= CALL_EXPR_ARG (exp
, 1);
2872 if (TREE_CODE (arg1
) != REAL_CST
2873 || TREE_OVERFLOW (arg1
))
2874 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2876 /* Handle constant exponents. */
2878 /* For integer valued exponents we can expand to an optimal multiplication
2879 sequence using expand_powi. */
2880 c
= TREE_REAL_CST (arg1
);
2881 n
= real_to_integer (&c
);
2882 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2883 if (real_identical (&c
, &cint
)
2884 && ((n
>= -1 && n
<= 2)
2885 || (flag_unsafe_math_optimizations
2887 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2889 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2892 op
= force_reg (mode
, op
);
2893 op
= expand_powi (op
, mode
, n
);
2898 narg0
= builtin_save_expr (arg0
);
2900 /* If the exponent is not integer valued, check if it is half of an integer.
2901 In this case we can expand to sqrt (x) * x**(n/2). */
2902 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2903 if (fn
!= NULL_TREE
)
2905 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2906 n
= real_to_integer (&c2
);
2907 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2908 if (real_identical (&c2
, &cint
)
2909 && ((flag_unsafe_math_optimizations
2911 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2914 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2915 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2918 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2919 op2
= force_reg (mode
, op2
);
2920 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2921 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2922 0, OPTAB_LIB_WIDEN
);
2923 /* If the original exponent was negative, reciprocate the
2926 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2927 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2933 /* Try if the exponent is a third of an integer. In this case
2934 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2935 different from pow (x, 1./3.) due to rounding and behavior
2936 with negative x we need to constrain this transformation to
2937 unsafe math and positive x or finite math. */
2938 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2940 && flag_unsafe_math_optimizations
2941 && (tree_expr_nonnegative_p (arg0
)
2942 || !HONOR_NANS (mode
)))
2944 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2945 real_round (&c2
, mode
, &c2
);
2946 n
= real_to_integer (&c2
);
2947 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2948 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2949 real_convert (&c2
, mode
, &c2
);
2950 if (real_identical (&c2
, &c
)
2952 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
2955 tree call_expr
= build_call_expr (fn
, 1,narg0
);
2956 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2957 if (abs (n
) % 3 == 2)
2958 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
2959 0, OPTAB_LIB_WIDEN
);
2962 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2963 op2
= force_reg (mode
, op2
);
2964 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
2965 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2966 0, OPTAB_LIB_WIDEN
);
2967 /* If the original exponent was negative, reciprocate the
2970 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2971 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2977 /* Fall back to optab expansion. */
2978 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2981 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2982 a normal call should be emitted rather than expanding the function
2983 in-line. EXP is the expression that is a call to the builtin
2984 function; if convenient, the result should be placed in TARGET. */
2987 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
2991 enum machine_mode mode
;
2992 enum machine_mode mode2
;
2994 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2997 arg0
= CALL_EXPR_ARG (exp
, 0);
2998 arg1
= CALL_EXPR_ARG (exp
, 1);
2999 mode
= TYPE_MODE (TREE_TYPE (exp
));
3001 /* Handle constant power. */
3003 if (TREE_CODE (arg1
) == INTEGER_CST
3004 && !TREE_OVERFLOW (arg1
))
3006 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3008 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3009 Otherwise, check the number of multiplications required. */
3010 if ((TREE_INT_CST_HIGH (arg1
) == 0
3011 || TREE_INT_CST_HIGH (arg1
) == -1)
3012 && ((n
>= -1 && n
<= 2)
3014 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3016 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3017 op0
= force_reg (mode
, op0
);
3018 return expand_powi (op0
, mode
, n
);
3022 /* Emit a libcall to libgcc. */
3024 /* Mode of the 2nd argument must match that of an int. */
3025 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3027 if (target
== NULL_RTX
)
3028 target
= gen_reg_rtx (mode
);
3030 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3031 if (GET_MODE (op0
) != mode
)
3032 op0
= convert_to_mode (mode
, op0
, 0);
3033 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3034 if (GET_MODE (op1
) != mode2
)
3035 op1
= convert_to_mode (mode2
, op1
, 0);
3037 target
= emit_library_call_value (powi_optab
->handlers
[(int) mode
].libfunc
,
3038 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
3039 op0
, mode
, op1
, mode2
);
3044 /* Expand expression EXP which is a call to the strlen builtin. Return
3045 NULL_RTX if we failed the caller should emit a normal call, otherwise
3046 try to get the result in TARGET, if convenient. */
3049 expand_builtin_strlen (tree exp
, rtx target
,
3050 enum machine_mode target_mode
)
3052 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3058 tree src
= CALL_EXPR_ARG (exp
, 0);
3059 rtx result
, src_reg
, char_rtx
, before_strlen
;
3060 enum machine_mode insn_mode
= target_mode
, char_mode
;
3061 enum insn_code icode
= CODE_FOR_nothing
;
3064 /* If the length can be computed at compile-time, return it. */
3065 len
= c_strlen (src
, 0);
3067 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3069 /* If the length can be computed at compile-time and is constant
3070 integer, but there are side-effects in src, evaluate
3071 src for side-effects, then return len.
3072 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3073 can be optimized into: i++; x = 3; */
3074 len
= c_strlen (src
, 1);
3075 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3077 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3078 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3081 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3083 /* If SRC is not a pointer type, don't do this operation inline. */
3087 /* Bail out if we can't compute strlen in the right mode. */
3088 while (insn_mode
!= VOIDmode
)
3090 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
3091 if (icode
!= CODE_FOR_nothing
)
3094 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3096 if (insn_mode
== VOIDmode
)
3099 /* Make a place to write the result of the instruction. */
3103 && GET_MODE (result
) == insn_mode
3104 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3105 result
= gen_reg_rtx (insn_mode
);
3107 /* Make a place to hold the source address. We will not expand
3108 the actual source until we are sure that the expansion will
3109 not fail -- there are trees that cannot be expanded twice. */
3110 src_reg
= gen_reg_rtx (Pmode
);
3112 /* Mark the beginning of the strlen sequence so we can emit the
3113 source operand later. */
3114 before_strlen
= get_last_insn ();
3116 char_rtx
= const0_rtx
;
3117 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3118 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3120 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3122 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3123 char_rtx
, GEN_INT (align
));
3128 /* Now that we are assured of success, expand the source. */
3130 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3132 emit_move_insn (src_reg
, pat
);
3137 emit_insn_after (pat
, before_strlen
);
3139 emit_insn_before (pat
, get_insns ());
3141 /* Return the value in the proper mode for this function. */
3142 if (GET_MODE (result
) == target_mode
)
3144 else if (target
!= 0)
3145 convert_move (target
, result
, 0);
3147 target
= convert_to_mode (target_mode
, result
, 0);
3153 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3154 caller should emit a normal call, otherwise try to get the result
3155 in TARGET, if convenient (and in mode MODE if that's convenient). */
3158 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3160 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3162 tree type
= TREE_TYPE (exp
);
3163 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3164 CALL_EXPR_ARG (exp
, 1), type
);
3166 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3171 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3172 caller should emit a normal call, otherwise try to get the result
3173 in TARGET, if convenient (and in mode MODE if that's convenient). */
3176 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3178 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3180 tree type
= TREE_TYPE (exp
);
3181 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3182 CALL_EXPR_ARG (exp
, 1), type
);
3184 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3186 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3191 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3192 caller should emit a normal call, otherwise try to get the result
3193 in TARGET, if convenient (and in mode MODE if that's convenient). */
3196 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3198 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3200 tree type
= TREE_TYPE (exp
);
3201 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3202 CALL_EXPR_ARG (exp
, 1), type
);
3204 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3209 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3210 caller should emit a normal call, otherwise try to get the result
3211 in TARGET, if convenient (and in mode MODE if that's convenient). */
3214 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3216 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3218 tree type
= TREE_TYPE (exp
);
3219 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3220 CALL_EXPR_ARG (exp
, 1), type
);
3222 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3227 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3228 bytes from constant string DATA + OFFSET and return it as target
3232 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3233 enum machine_mode mode
)
3235 const char *str
= (const char *) data
;
3237 gcc_assert (offset
>= 0
3238 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3239 <= strlen (str
) + 1));
3241 return c_readstr (str
+ offset
, mode
);
3244 /* Expand a call EXP to the memcpy builtin.
3245 Return NULL_RTX if we failed, the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3250 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3252 tree fndecl
= get_callee_fndecl (exp
);
3254 if (!validate_arglist (exp
,
3255 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3259 tree dest
= CALL_EXPR_ARG (exp
, 0);
3260 tree src
= CALL_EXPR_ARG (exp
, 1);
3261 tree len
= CALL_EXPR_ARG (exp
, 2);
3262 const char *src_str
;
3263 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3264 unsigned int dest_align
3265 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3266 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3267 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3268 TREE_TYPE (TREE_TYPE (fndecl
)),
3270 HOST_WIDE_INT expected_size
= -1;
3271 unsigned int expected_align
= 0;
3275 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3277 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3279 result
= TREE_OPERAND (result
, 1);
3281 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3284 /* If DEST is not a pointer type, call the normal function. */
3285 if (dest_align
== 0)
3288 /* If either SRC is not a pointer type, don't do this
3289 operation in-line. */
3293 stringop_block_profile (exp
, &expected_align
, &expected_size
);
3294 if (expected_align
< dest_align
)
3295 expected_align
= dest_align
;
3296 dest_mem
= get_memory_rtx (dest
, len
);
3297 set_mem_align (dest_mem
, dest_align
);
3298 len_rtx
= expand_normal (len
);
3299 src_str
= c_getstr (src
);
3301 /* If SRC is a string constant and block move would be done
3302 by pieces, we can avoid loading the string from memory
3303 and only stored the computed constants. */
3305 && GET_CODE (len_rtx
) == CONST_INT
3306 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3307 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3308 (void *) src_str
, dest_align
))
3310 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3311 builtin_memcpy_read_str
,
3312 (void *) src_str
, dest_align
, 0);
3313 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3314 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3318 src_mem
= get_memory_rtx (src
, len
);
3319 set_mem_align (src_mem
, src_align
);
3321 /* Copy word part most expediently. */
3322 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3323 CALL_EXPR_TAILCALL (exp
)
3324 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3325 expected_align
, expected_size
);
3329 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3330 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3336 /* Expand a call EXP to the mempcpy builtin.
3337 Return NULL_RTX if we failed; the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). If ENDP is 0 return the
3340 destination pointer, if ENDP is 1 return the end pointer ala
3341 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3345 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3347 if (!validate_arglist (exp
,
3348 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3352 tree dest
= CALL_EXPR_ARG (exp
, 0);
3353 tree src
= CALL_EXPR_ARG (exp
, 1);
3354 tree len
= CALL_EXPR_ARG (exp
, 2);
3355 return expand_builtin_mempcpy_args (dest
, src
, len
,
3357 target
, mode
, /*endp=*/ 1);
3361 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3362 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3363 so that this can also be called without constructing an actual CALL_EXPR.
3364 TYPE is the return type of the call. The other arguments and return value
3365 are the same as for expand_builtin_mempcpy. */
3368 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3369 rtx target
, enum machine_mode mode
, int endp
)
3371 /* If return value is ignored, transform mempcpy into memcpy. */
3372 if (target
== const0_rtx
)
3374 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3379 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3380 target
, mode
, EXPAND_NORMAL
);
3384 const char *src_str
;
3385 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3386 unsigned int dest_align
3387 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3388 rtx dest_mem
, src_mem
, len_rtx
;
3389 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3393 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3395 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3397 result
= TREE_OPERAND (result
, 1);
3399 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3402 /* If either SRC or DEST is not a pointer type, don't do this
3403 operation in-line. */
3404 if (dest_align
== 0 || src_align
== 0)
3407 /* If LEN is not constant, call the normal function. */
3408 if (! host_integerp (len
, 1))
3411 len_rtx
= expand_normal (len
);
3412 src_str
= c_getstr (src
);
3414 /* If SRC is a string constant and block move would be done
3415 by pieces, we can avoid loading the string from memory
3416 and only stored the computed constants. */
3418 && GET_CODE (len_rtx
) == CONST_INT
3419 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3420 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3421 (void *) src_str
, dest_align
))
3423 dest_mem
= get_memory_rtx (dest
, len
);
3424 set_mem_align (dest_mem
, dest_align
);
3425 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3426 builtin_memcpy_read_str
,
3427 (void *) src_str
, dest_align
, endp
);
3428 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3429 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3433 if (GET_CODE (len_rtx
) == CONST_INT
3434 && can_move_by_pieces (INTVAL (len_rtx
),
3435 MIN (dest_align
, src_align
)))
3437 dest_mem
= get_memory_rtx (dest
, len
);
3438 set_mem_align (dest_mem
, dest_align
);
3439 src_mem
= get_memory_rtx (src
, len
);
3440 set_mem_align (src_mem
, src_align
);
3441 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3442 MIN (dest_align
, src_align
), endp
);
3443 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3444 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3452 /* Expand expression EXP, which is a call to the memmove builtin. Return
3453 NULL_RTX if we failed; the caller should emit a normal call. */
3456 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3458 if (!validate_arglist (exp
,
3459 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3463 tree dest
= CALL_EXPR_ARG (exp
, 0);
3464 tree src
= CALL_EXPR_ARG (exp
, 1);
3465 tree len
= CALL_EXPR_ARG (exp
, 2);
3466 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3467 target
, mode
, ignore
);
3471 /* Helper function to do the actual work for expand_builtin_memmove. The
3472 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3473 so that this can also be called without constructing an actual CALL_EXPR.
3474 TYPE is the return type of the call. The other arguments and return value
3475 are the same as for expand_builtin_memmove. */
3478 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3479 tree type
, rtx target
, enum machine_mode mode
,
3482 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3486 STRIP_TYPE_NOPS (result
);
3487 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3489 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3491 result
= TREE_OPERAND (result
, 1);
3493 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3496 /* Otherwise, call the normal function. */
3500 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3501 NULL_RTX if we failed the caller should emit a normal call. */
3504 expand_builtin_bcopy (tree exp
, int ignore
)
3506 tree type
= TREE_TYPE (exp
);
3507 tree src
, dest
, size
;
3509 if (!validate_arglist (exp
,
3510 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3513 src
= CALL_EXPR_ARG (exp
, 0);
3514 dest
= CALL_EXPR_ARG (exp
, 1);
3515 size
= CALL_EXPR_ARG (exp
, 2);
3517 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3518 This is done this way so that if it isn't expanded inline, we fall
3519 back to calling bcopy instead of memmove. */
3520 return expand_builtin_memmove_args (dest
, src
,
3521 fold_convert (sizetype
, size
),
3522 type
, const0_rtx
, VOIDmode
,
3527 # define HAVE_movstr 0
3528 # define CODE_FOR_movstr CODE_FOR_nothing
3531 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3532 we failed, the caller should emit a normal call, otherwise try to
3533 get the result in TARGET, if convenient. If ENDP is 0 return the
3534 destination pointer, if ENDP is 1 return the end pointer ala
3535 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3539 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3545 const struct insn_data
* data
;
3550 dest_mem
= get_memory_rtx (dest
, NULL
);
3551 src_mem
= get_memory_rtx (src
, NULL
);
3554 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3555 dest_mem
= replace_equiv_address (dest_mem
, target
);
3556 end
= gen_reg_rtx (Pmode
);
3560 if (target
== 0 || target
== const0_rtx
)
3562 end
= gen_reg_rtx (Pmode
);
3570 data
= insn_data
+ CODE_FOR_movstr
;
3572 if (data
->operand
[0].mode
!= VOIDmode
)
3573 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3575 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3581 /* movstr is supposed to set end to the address of the NUL
3582 terminator. If the caller requested a mempcpy-like return value,
3584 if (endp
== 1 && target
!= const0_rtx
)
3586 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3587 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3593 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3594 NULL_RTX if we failed the caller should emit a normal call, otherwise
3595 try to get the result in TARGET, if convenient (and in mode MODE if that's
3599 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3601 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3603 tree dest
= CALL_EXPR_ARG (exp
, 0);
3604 tree src
= CALL_EXPR_ARG (exp
, 1);
3605 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3610 /* Helper function to do the actual work for expand_builtin_strcpy. The
3611 arguments to the builtin_strcpy call DEST and SRC are broken out
3612 so that this can also be called without constructing an actual CALL_EXPR.
3613 The other arguments and return value are the same as for
3614 expand_builtin_strcpy. */
3617 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3618 rtx target
, enum machine_mode mode
)
3620 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3622 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3623 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3627 /* Expand a call EXP to the stpcpy builtin.
3628 Return NULL_RTX if we failed the caller should emit a normal call,
3629 otherwise try to get the result in TARGET, if convenient (and in
3630 mode MODE if that's convenient). */
3633 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3637 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3640 dst
= CALL_EXPR_ARG (exp
, 0);
3641 src
= CALL_EXPR_ARG (exp
, 1);
3643 /* If return value is ignored, transform stpcpy into strcpy. */
3644 if (target
== const0_rtx
)
3646 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3650 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3651 target
, mode
, EXPAND_NORMAL
);
3658 /* Ensure we get an actual string whose length can be evaluated at
3659 compile-time, not an expression containing a string. This is
3660 because the latter will potentially produce pessimized code
3661 when used to produce the return value. */
3662 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3663 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3665 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3666 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3667 target
, mode
, /*endp=*/2);
3672 if (TREE_CODE (len
) == INTEGER_CST
)
3674 rtx len_rtx
= expand_normal (len
);
3676 if (GET_CODE (len_rtx
) == CONST_INT
)
3678 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3679 dst
, src
, target
, mode
);
3685 if (mode
!= VOIDmode
)
3686 target
= gen_reg_rtx (mode
);
3688 target
= gen_reg_rtx (GET_MODE (ret
));
3690 if (GET_MODE (target
) != GET_MODE (ret
))
3691 ret
= gen_lowpart (GET_MODE (target
), ret
);
3693 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3694 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3702 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3706 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3707 bytes from constant string DATA + OFFSET and return it as target
3711 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3712 enum machine_mode mode
)
3714 const char *str
= (const char *) data
;
3716 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3719 return c_readstr (str
+ offset
, mode
);
3722 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3723 NULL_RTX if we failed the caller should emit a normal call. */
3726 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3728 tree fndecl
= get_callee_fndecl (exp
);
3730 if (validate_arglist (exp
,
3731 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3733 tree dest
= CALL_EXPR_ARG (exp
, 0);
3734 tree src
= CALL_EXPR_ARG (exp
, 1);
3735 tree len
= CALL_EXPR_ARG (exp
, 2);
3736 tree slen
= c_strlen (src
, 1);
3737 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3741 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3743 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3745 result
= TREE_OPERAND (result
, 1);
3747 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3750 /* We must be passed a constant len and src parameter. */
3751 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3754 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3756 /* We're required to pad with trailing zeros if the requested
3757 len is greater than strlen(s2)+1. In that case try to
3758 use store_by_pieces, if it fails, punt. */
3759 if (tree_int_cst_lt (slen
, len
))
3761 unsigned int dest_align
3762 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3763 const char *p
= c_getstr (src
);
3766 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3767 || !can_store_by_pieces (tree_low_cst (len
, 1),
3768 builtin_strncpy_read_str
,
3769 (void *) p
, dest_align
))
3772 dest_mem
= get_memory_rtx (dest
, len
);
3773 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3774 builtin_strncpy_read_str
,
3775 (void *) p
, dest_align
, 0);
3776 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3777 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3784 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3785 bytes from constant string DATA + OFFSET and return it as target
3789 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3790 enum machine_mode mode
)
3792 const char *c
= (const char *) data
;
3793 char *p
= alloca (GET_MODE_SIZE (mode
));
3795 memset (p
, *c
, GET_MODE_SIZE (mode
));
3797 return c_readstr (p
, mode
);
3800 /* Callback routine for store_by_pieces. Return the RTL of a register
3801 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3802 char value given in the RTL register data. For example, if mode is
3803 4 bytes wide, return the RTL for 0x01010101*data. */
3806 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3807 enum machine_mode mode
)
3813 size
= GET_MODE_SIZE (mode
);
3818 memset (p
, 1, size
);
3819 coeff
= c_readstr (p
, mode
);
3821 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3822 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3823 return force_reg (mode
, target
);
3826 /* Expand expression EXP, which is a call to the memset builtin. Return
3827 NULL_RTX if we failed the caller should emit a normal call, otherwise
3828 try to get the result in TARGET, if convenient (and in mode MODE if that's
3832 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3834 if (!validate_arglist (exp
,
3835 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3839 tree dest
= CALL_EXPR_ARG (exp
, 0);
3840 tree val
= CALL_EXPR_ARG (exp
, 1);
3841 tree len
= CALL_EXPR_ARG (exp
, 2);
3842 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3846 /* Helper function to do the actual work for expand_builtin_memset. The
3847 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3848 so that this can also be called without constructing an actual CALL_EXPR.
3849 The other arguments and return value are the same as for
3850 expand_builtin_memset. */
3853 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3854 rtx target
, enum machine_mode mode
, tree orig_exp
)
3857 enum built_in_function fcode
;
3859 unsigned int dest_align
;
3860 rtx dest_mem
, dest_addr
, len_rtx
;
3861 HOST_WIDE_INT expected_size
= -1;
3862 unsigned int expected_align
= 0;
3864 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3866 /* If DEST is not a pointer type, don't do this operation in-line. */
3867 if (dest_align
== 0)
3870 stringop_block_profile (orig_exp
, &expected_align
, &expected_size
);
3871 if (expected_align
< dest_align
)
3872 expected_align
= dest_align
;
3874 /* If the LEN parameter is zero, return DEST. */
3875 if (integer_zerop (len
))
3877 /* Evaluate and ignore VAL in case it has side-effects. */
3878 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3879 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3882 /* Stabilize the arguments in case we fail. */
3883 dest
= builtin_save_expr (dest
);
3884 val
= builtin_save_expr (val
);
3885 len
= builtin_save_expr (len
);
3887 len_rtx
= expand_normal (len
);
3888 dest_mem
= get_memory_rtx (dest
, len
);
3890 if (TREE_CODE (val
) != INTEGER_CST
)
3894 val_rtx
= expand_normal (val
);
3895 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3898 /* Assume that we can memset by pieces if we can store
3899 * the coefficients by pieces (in the required modes).
3900 * We can't pass builtin_memset_gen_str as that emits RTL. */
3902 if (host_integerp (len
, 1)
3903 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3904 && can_store_by_pieces (tree_low_cst (len
, 1),
3905 builtin_memset_read_str
, &c
, dest_align
))
3907 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3909 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3910 builtin_memset_gen_str
, val_rtx
, dest_align
, 0);
3912 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3913 dest_align
, expected_align
,
3917 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3918 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3922 if (target_char_cast (val
, &c
))
3927 if (host_integerp (len
, 1)
3928 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3929 && can_store_by_pieces (tree_low_cst (len
, 1),
3930 builtin_memset_read_str
, &c
, dest_align
))
3931 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3932 builtin_memset_read_str
, &c
, dest_align
, 0);
3933 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3934 dest_align
, expected_align
,
3938 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3939 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3943 set_mem_align (dest_mem
, dest_align
);
3944 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3945 CALL_EXPR_TAILCALL (orig_exp
)
3946 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3947 expected_align
, expected_size
);
3951 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3952 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3958 fndecl
= get_callee_fndecl (orig_exp
);
3959 fcode
= DECL_FUNCTION_CODE (fndecl
);
3960 if (fcode
== BUILT_IN_MEMSET
)
3961 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
3962 else if (fcode
== BUILT_IN_BZERO
)
3963 fn
= build_call_expr (fndecl
, 2, dest
, len
);
3966 if (TREE_CODE (fn
) == CALL_EXPR
)
3967 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3968 return expand_call (fn
, target
, target
== const0_rtx
);
3971 /* Expand expression EXP, which is a call to the bzero builtin. Return
3972 NULL_RTX if we failed the caller should emit a normal call. */
3975 expand_builtin_bzero (tree exp
)
3979 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3982 dest
= CALL_EXPR_ARG (exp
, 0);
3983 size
= CALL_EXPR_ARG (exp
, 1);
3985 /* New argument list transforming bzero(ptr x, int y) to
3986 memset(ptr x, int 0, size_t y). This is done this way
3987 so that if it isn't expanded inline, we fallback to
3988 calling bzero instead of memset. */
3990 return expand_builtin_memset_args (dest
, integer_zero_node
,
3991 fold_convert (sizetype
, size
),
3992 const0_rtx
, VOIDmode
, exp
);
3995 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3996 caller should emit a normal call, otherwise try to get the result
3997 in TARGET, if convenient (and in mode MODE if that's convenient). */
4000 expand_builtin_memchr (tree exp
, rtx target
, enum machine_mode mode
)
4002 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
,
4003 INTEGER_TYPE
, VOID_TYPE
))
4005 tree type
= TREE_TYPE (exp
);
4006 tree result
= fold_builtin_memchr (CALL_EXPR_ARG (exp
, 0),
4007 CALL_EXPR_ARG (exp
, 1),
4008 CALL_EXPR_ARG (exp
, 2), type
);
4010 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4015 /* Expand expression EXP, which is a call to the memcmp built-in function.
4016 Return NULL_RTX if we failed and the
4017 caller should emit a normal call, otherwise try to get the result in
4018 TARGET, if convenient (and in mode MODE, if that's convenient). */
4021 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
4023 if (!validate_arglist (exp
,
4024 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4028 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
4029 CALL_EXPR_ARG (exp
, 1),
4030 CALL_EXPR_ARG (exp
, 2));
4032 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4035 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4037 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4040 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4041 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4042 tree len
= CALL_EXPR_ARG (exp
, 2);
4045 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4047 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4048 enum machine_mode insn_mode
;
4050 #ifdef HAVE_cmpmemsi
4052 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4055 #ifdef HAVE_cmpstrnsi
4057 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4062 /* If we don't have POINTER_TYPE, call the function. */
4063 if (arg1_align
== 0 || arg2_align
== 0)
4066 /* Make a place to write the result of the instruction. */
4069 && REG_P (result
) && GET_MODE (result
) == insn_mode
4070 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4071 result
= gen_reg_rtx (insn_mode
);
4073 arg1_rtx
= get_memory_rtx (arg1
, len
);
4074 arg2_rtx
= get_memory_rtx (arg2
, len
);
4075 arg3_rtx
= expand_normal (len
);
4077 /* Set MEM_SIZE as appropriate. */
4078 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4080 set_mem_size (arg1_rtx
, arg3_rtx
);
4081 set_mem_size (arg2_rtx
, arg3_rtx
);
4084 #ifdef HAVE_cmpmemsi
4086 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4087 GEN_INT (MIN (arg1_align
, arg2_align
)));
4090 #ifdef HAVE_cmpstrnsi
4092 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4093 GEN_INT (MIN (arg1_align
, arg2_align
)));
4101 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
4102 TYPE_MODE (integer_type_node
), 3,
4103 XEXP (arg1_rtx
, 0), Pmode
,
4104 XEXP (arg2_rtx
, 0), Pmode
,
4105 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4106 TYPE_UNSIGNED (sizetype
)),
4107 TYPE_MODE (sizetype
));
4109 /* Return the value in the proper mode for this function. */
4110 mode
= TYPE_MODE (TREE_TYPE (exp
));
4111 if (GET_MODE (result
) == mode
)
4113 else if (target
!= 0)
4115 convert_move (target
, result
, 0);
4119 return convert_to_mode (mode
, result
, 0);
4126 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4127 if we failed the caller should emit a normal call, otherwise try to get
4128 the result in TARGET, if convenient. */
4131 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4133 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4137 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4138 CALL_EXPR_ARG (exp
, 1));
4140 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4143 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4144 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4145 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4147 rtx arg1_rtx
, arg2_rtx
;
4148 rtx result
, insn
= NULL_RTX
;
4150 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4151 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4154 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4156 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4158 /* If we don't have POINTER_TYPE, call the function. */
4159 if (arg1_align
== 0 || arg2_align
== 0)
4162 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4163 arg1
= builtin_save_expr (arg1
);
4164 arg2
= builtin_save_expr (arg2
);
4166 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4167 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4169 #ifdef HAVE_cmpstrsi
4170 /* Try to call cmpstrsi. */
4173 enum machine_mode insn_mode
4174 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4176 /* Make a place to write the result of the instruction. */
4179 && REG_P (result
) && GET_MODE (result
) == insn_mode
4180 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4181 result
= gen_reg_rtx (insn_mode
);
4183 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4184 GEN_INT (MIN (arg1_align
, arg2_align
)));
4187 #ifdef HAVE_cmpstrnsi
4188 /* Try to determine at least one length and call cmpstrnsi. */
4189 if (!insn
&& HAVE_cmpstrnsi
)
4194 enum machine_mode insn_mode
4195 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4196 tree len1
= c_strlen (arg1
, 1);
4197 tree len2
= c_strlen (arg2
, 1);
4200 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4202 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4204 /* If we don't have a constant length for the first, use the length
4205 of the second, if we know it. We don't require a constant for
4206 this case; some cost analysis could be done if both are available
4207 but neither is constant. For now, assume they're equally cheap,
4208 unless one has side effects. If both strings have constant lengths,
4215 else if (TREE_SIDE_EFFECTS (len1
))
4217 else if (TREE_SIDE_EFFECTS (len2
))
4219 else if (TREE_CODE (len1
) != INTEGER_CST
)
4221 else if (TREE_CODE (len2
) != INTEGER_CST
)
4223 else if (tree_int_cst_lt (len1
, len2
))
4228 /* If both arguments have side effects, we cannot optimize. */
4229 if (!len
|| TREE_SIDE_EFFECTS (len
))
4232 arg3_rtx
= expand_normal (len
);
4234 /* Make a place to write the result of the instruction. */
4237 && REG_P (result
) && GET_MODE (result
) == insn_mode
4238 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4239 result
= gen_reg_rtx (insn_mode
);
4241 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4242 GEN_INT (MIN (arg1_align
, arg2_align
)));
4250 /* Return the value in the proper mode for this function. */
4251 mode
= TYPE_MODE (TREE_TYPE (exp
));
4252 if (GET_MODE (result
) == mode
)
4255 return convert_to_mode (mode
, result
, 0);
4256 convert_move (target
, result
, 0);
4260 /* Expand the library call ourselves using a stabilized argument
4261 list to avoid re-evaluating the function's arguments twice. */
4262 #ifdef HAVE_cmpstrnsi
4265 fndecl
= get_callee_fndecl (exp
);
4266 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4267 if (TREE_CODE (fn
) == CALL_EXPR
)
4268 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4269 return expand_call (fn
, target
, target
== const0_rtx
);
4275 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4276 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4277 the result in TARGET, if convenient. */
4280 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4282 if (!validate_arglist (exp
,
4283 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4287 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4288 CALL_EXPR_ARG (exp
, 1),
4289 CALL_EXPR_ARG (exp
, 2));
4291 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4294 /* If c_strlen can determine an expression for one of the string
4295 lengths, and it doesn't have side effects, then emit cmpstrnsi
4296 using length MIN(strlen(string)+1, arg3). */
4297 #ifdef HAVE_cmpstrnsi
4300 tree len
, len1
, len2
;
4301 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4304 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4305 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4306 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4309 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4311 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4312 enum machine_mode insn_mode
4313 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4315 len1
= c_strlen (arg1
, 1);
4316 len2
= c_strlen (arg2
, 1);
4319 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4321 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4334 else if (TREE_SIDE_EFFECTS (len1
))
4336 else if (TREE_SIDE_EFFECTS (len2
))
4338 else if (TREE_CODE (len1
) != INTEGER_CST
)
4340 else if (TREE_CODE (len2
) != INTEGER_CST
)
4342 else if (tree_int_cst_lt (len1
, len2
))
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len
|| TREE_SIDE_EFFECTS (len
))
4351 /* The actual new length parameter is MIN(len,arg3). */
4352 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4353 fold_convert (TREE_TYPE (len
), arg3
));
4355 /* If we don't have POINTER_TYPE, call the function. */
4356 if (arg1_align
== 0 || arg2_align
== 0)
4359 /* Make a place to write the result of the instruction. */
4362 && REG_P (result
) && GET_MODE (result
) == insn_mode
4363 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4364 result
= gen_reg_rtx (insn_mode
);
4366 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4367 arg1
= builtin_save_expr (arg1
);
4368 arg2
= builtin_save_expr (arg2
);
4369 len
= builtin_save_expr (len
);
4371 arg1_rtx
= get_memory_rtx (arg1
, len
);
4372 arg2_rtx
= get_memory_rtx (arg2
, len
);
4373 arg3_rtx
= expand_normal (len
);
4374 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4375 GEN_INT (MIN (arg1_align
, arg2_align
)));
4380 /* Return the value in the proper mode for this function. */
4381 mode
= TYPE_MODE (TREE_TYPE (exp
));
4382 if (GET_MODE (result
) == mode
)
4385 return convert_to_mode (mode
, result
, 0);
4386 convert_move (target
, result
, 0);
4390 /* Expand the library call ourselves using a stabilized argument
4391 list to avoid re-evaluating the function's arguments twice. */
4392 fndecl
= get_callee_fndecl (exp
);
4393 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4394 if (TREE_CODE (fn
) == CALL_EXPR
)
4395 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4396 return expand_call (fn
, target
, target
== const0_rtx
);
4402 /* Expand expression EXP, which is a call to the strcat builtin.
4403 Return NULL_RTX if we failed the caller should emit a normal call,
4404 otherwise try to get the result in TARGET, if convenient. */
4407 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4409 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4413 tree dst
= CALL_EXPR_ARG (exp
, 0);
4414 tree src
= CALL_EXPR_ARG (exp
, 1);
4415 const char *p
= c_getstr (src
);
4417 /* If the string length is zero, return the dst parameter. */
4418 if (p
&& *p
== '\0')
4419 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4423 /* See if we can store by pieces into (dst + strlen(dst)). */
4424 tree newsrc
, newdst
,
4425 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4428 /* Stabilize the argument list. */
4429 newsrc
= builtin_save_expr (src
);
4430 dst
= builtin_save_expr (dst
);
4434 /* Create strlen (dst). */
4435 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4436 /* Create (dst + (cast) strlen (dst)). */
4437 newdst
= fold_convert (TREE_TYPE (dst
), newdst
);
4438 newdst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4440 newdst
= builtin_save_expr (newdst
);
4442 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4444 end_sequence (); /* Stop sequence. */
4448 /* Output the entire sequence. */
4449 insns
= get_insns ();
4453 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4460 /* Expand expression EXP, which is a call to the strncat builtin.
4461 Return NULL_RTX if we failed the caller should emit a normal call,
4462 otherwise try to get the result in TARGET, if convenient. */
4465 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4467 if (validate_arglist (exp
,
4468 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4470 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4471 CALL_EXPR_ARG (exp
, 1),
4472 CALL_EXPR_ARG (exp
, 2));
4474 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4479 /* Expand expression EXP, which is a call to the strspn builtin.
4480 Return NULL_RTX if we failed the caller should emit a normal call,
4481 otherwise try to get the result in TARGET, if convenient. */
4484 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4486 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4488 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4489 CALL_EXPR_ARG (exp
, 1));
4491 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4496 /* Expand expression EXP, which is a call to the strcspn builtin.
4497 Return NULL_RTX if we failed the caller should emit a normal call,
4498 otherwise try to get the result in TARGET, if convenient. */
4501 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4503 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4505 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4506 CALL_EXPR_ARG (exp
, 1));
4508 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4513 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4514 if that's convenient. */
4517 expand_builtin_saveregs (void)
4521 /* Don't do __builtin_saveregs more than once in a function.
4522 Save the result of the first call and reuse it. */
4523 if (saveregs_value
!= 0)
4524 return saveregs_value
;
4526 /* When this function is called, it means that registers must be
4527 saved on entry to this function. So we migrate the call to the
4528 first insn of this function. */
4532 /* Do whatever the machine needs done in this case. */
4533 val
= targetm
.calls
.expand_builtin_saveregs ();
4538 saveregs_value
= val
;
4540 /* Put the insns after the NOTE that starts the function. If this
4541 is inside a start_sequence, make the outer-level insn chain current, so
4542 the code is placed at the start of the function. */
4543 push_topmost_sequence ();
4544 emit_insn_after (seq
, entry_of_function ());
4545 pop_topmost_sequence ();
4550 /* __builtin_args_info (N) returns word N of the arg space info
4551 for the current function. The number and meanings of words
4552 is controlled by the definition of CUMULATIVE_ARGS. */
4555 expand_builtin_args_info (tree exp
)
4557 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4558 int *word_ptr
= (int *) ¤t_function_args_info
;
4560 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4562 if (call_expr_nargs (exp
) != 0)
4564 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4565 error ("argument of %<__builtin_args_info%> must be constant");
4568 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4570 if (wordnum
< 0 || wordnum
>= nwords
)
4571 error ("argument of %<__builtin_args_info%> out of range");
4573 return GEN_INT (word_ptr
[wordnum
]);
4577 error ("missing argument in %<__builtin_args_info%>");
4582 /* Expand a call to __builtin_next_arg. */
4585 expand_builtin_next_arg (void)
4587 /* Checking arguments is already done in fold_builtin_next_arg
4588 that must be called before this function. */
4589 return expand_binop (Pmode
, add_optab
,
4590 current_function_internal_arg_pointer
,
4591 current_function_arg_offset_rtx
,
4592 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4595 /* Make it easier for the backends by protecting the valist argument
4596 from multiple evaluations. */
4599 stabilize_va_list (tree valist
, int needs_lvalue
)
4601 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4603 if (TREE_SIDE_EFFECTS (valist
))
4604 valist
= save_expr (valist
);
4606 /* For this case, the backends will be expecting a pointer to
4607 TREE_TYPE (va_list_type_node), but it's possible we've
4608 actually been given an array (an actual va_list_type_node).
4610 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4612 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4613 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4622 if (! TREE_SIDE_EFFECTS (valist
))
4625 pt
= build_pointer_type (va_list_type_node
);
4626 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4627 TREE_SIDE_EFFECTS (valist
) = 1;
4630 if (TREE_SIDE_EFFECTS (valist
))
4631 valist
= save_expr (valist
);
4632 valist
= build_fold_indirect_ref (valist
);
4638 /* The "standard" definition of va_list is void*. */
4641 std_build_builtin_va_list (void)
4643 return ptr_type_node
;
4646 /* The "standard" implementation of va_start: just assign `nextarg' to
4650 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4654 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
4655 make_tree (ptr_type_node
, nextarg
));
4656 TREE_SIDE_EFFECTS (t
) = 1;
4658 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4661 /* Expand EXP, a call to __builtin_va_start. */
4664 expand_builtin_va_start (tree exp
)
4669 if (call_expr_nargs (exp
) < 2)
4671 error ("too few arguments to function %<va_start%>");
4675 if (fold_builtin_next_arg (exp
, true))
4678 nextarg
= expand_builtin_next_arg ();
4679 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4681 #ifdef EXPAND_BUILTIN_VA_START
4682 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4684 std_expand_builtin_va_start (valist
, nextarg
);
4690 /* The "standard" implementation of va_arg: read the value from the
4691 current (padded) address and increment by the (padded) size. */
4694 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4696 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4697 unsigned HOST_WIDE_INT align
, boundary
;
4700 #ifdef ARGS_GROW_DOWNWARD
4701 /* All of the alignment and movement below is for args-grow-up machines.
4702 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4703 implement their own specialized gimplify_va_arg_expr routines. */
4707 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4709 type
= build_pointer_type (type
);
4711 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4712 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4714 /* Hoist the valist value into a temporary for the moment. */
4715 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4717 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4718 requires greater alignment, we must perform dynamic alignment. */
4719 if (boundary
> align
4720 && !integer_zerop (TYPE_SIZE (type
)))
4722 t
= fold_convert (TREE_TYPE (valist
), size_int (boundary
- 1));
4723 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4724 build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4725 gimplify_and_add (t
, pre_p
);
4727 t
= fold_convert (TREE_TYPE (valist
), size_int (-boundary
));
4728 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4729 build2 (BIT_AND_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4730 gimplify_and_add (t
, pre_p
);
4735 /* If the actual alignment is less than the alignment of the type,
4736 adjust the type accordingly so that we don't assume strict alignment
4737 when deferencing the pointer. */
4738 boundary
*= BITS_PER_UNIT
;
4739 if (boundary
< TYPE_ALIGN (type
))
4741 type
= build_variant_type_copy (type
);
4742 TYPE_ALIGN (type
) = boundary
;
4745 /* Compute the rounded size of the type. */
4746 type_size
= size_in_bytes (type
);
4747 rounded_size
= round_up (type_size
, align
);
4749 /* Reduce rounded_size so it's sharable with the postqueue. */
4750 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4754 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4756 /* Small args are padded downward. */
4757 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4758 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4759 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4760 t
= fold_convert (TREE_TYPE (addr
), t
);
4761 addr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4764 /* Compute new value for AP. */
4765 t
= fold_convert (TREE_TYPE (valist
), rounded_size
);
4766 t
= build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
);
4767 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4768 gimplify_and_add (t
, pre_p
);
4770 addr
= fold_convert (build_pointer_type (type
), addr
);
4773 addr
= build_va_arg_indirect_ref (addr
);
4775 return build_va_arg_indirect_ref (addr
);
4778 /* Build an indirect-ref expression over the given TREE, which represents a
4779 piece of a va_arg() expansion. */
4781 build_va_arg_indirect_ref (tree addr
)
4783 addr
= build_fold_indirect_ref (addr
);
4785 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4791 /* Return a dummy expression of type TYPE in order to keep going after an
4795 dummy_object (tree type
)
4797 tree t
= build_int_cst (build_pointer_type (type
), 0);
4798 return build1 (INDIRECT_REF
, type
, t
);
4801 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4802 builtin function, but a very special sort of operator. */
4804 enum gimplify_status
4805 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4807 tree promoted_type
, want_va_type
, have_va_type
;
4808 tree valist
= TREE_OPERAND (*expr_p
, 0);
4809 tree type
= TREE_TYPE (*expr_p
);
4812 /* Verify that valist is of the proper type. */
4813 want_va_type
= va_list_type_node
;
4814 have_va_type
= TREE_TYPE (valist
);
4816 if (have_va_type
== error_mark_node
)
4819 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4821 /* If va_list is an array type, the argument may have decayed
4822 to a pointer type, e.g. by being passed to another function.
4823 In that case, unwrap both types so that we can compare the
4824 underlying records. */
4825 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4826 || POINTER_TYPE_P (have_va_type
))
4828 want_va_type
= TREE_TYPE (want_va_type
);
4829 have_va_type
= TREE_TYPE (have_va_type
);
4833 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4835 error ("first argument to %<va_arg%> not of type %<va_list%>");
4839 /* Generate a diagnostic for requesting data of a type that cannot
4840 be passed through `...' due to type promotion at the call site. */
4841 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4844 static bool gave_help
;
4846 /* Unfortunately, this is merely undefined, rather than a constraint
4847 violation, so we cannot make this an error. If this call is never
4848 executed, the program is still strictly conforming. */
4849 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4850 type
, promoted_type
);
4854 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4855 promoted_type
, type
);
4858 /* We can, however, treat "undefined" any way we please.
4859 Call abort to encourage the user to fix the program. */
4860 inform ("if this code is reached, the program will abort");
4861 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4862 append_to_statement_list (t
, pre_p
);
4864 /* This is dead code, but go ahead and finish so that the
4865 mode of the result comes out right. */
4866 *expr_p
= dummy_object (type
);
4871 /* Make it easier for the backends by protecting the valist argument
4872 from multiple evaluations. */
4873 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4875 /* For this case, the backends will be expecting a pointer to
4876 TREE_TYPE (va_list_type_node), but it's possible we've
4877 actually been given an array (an actual va_list_type_node).
4879 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4881 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4882 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4884 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4887 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4889 if (!targetm
.gimplify_va_arg_expr
)
4890 /* FIXME:Once most targets are converted we should merely
4891 assert this is non-null. */
4894 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4899 /* Expand EXP, a call to __builtin_va_end. */
4902 expand_builtin_va_end (tree exp
)
4904 tree valist
= CALL_EXPR_ARG (exp
, 0);
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4908 if (TREE_SIDE_EFFECTS (valist
))
4909 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4919 expand_builtin_va_copy (tree exp
)
4923 dst
= CALL_EXPR_ARG (exp
, 0);
4924 src
= CALL_EXPR_ARG (exp
, 1);
4926 dst
= stabilize_va_list (dst
, 1);
4927 src
= stabilize_va_list (src
, 0);
4929 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4931 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4932 TREE_SIDE_EFFECTS (t
) = 1;
4933 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4937 rtx dstb
, srcb
, size
;
4939 /* Evaluate to pointers. */
4940 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4941 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4942 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4943 VOIDmode
, EXPAND_NORMAL
);
4945 dstb
= convert_memory_address (Pmode
, dstb
);
4946 srcb
= convert_memory_address (Pmode
, srcb
);
4948 /* "Dereference" to BLKmode memories. */
4949 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4950 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4951 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4952 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4953 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4954 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4957 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4963 /* Expand a call to one of the builtin functions __builtin_frame_address or
4964 __builtin_return_address. */
4967 expand_builtin_frame_address (tree fndecl
, tree exp
)
4969 /* The argument must be a nonnegative integer constant.
4970 It counts the number of frames to scan up the stack.
4971 The value is the return address saved in that frame. */
4972 if (call_expr_nargs (exp
) == 0)
4973 /* Warning about missing arg was already issued. */
4975 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4977 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4978 error ("invalid argument to %<__builtin_frame_address%>");
4980 error ("invalid argument to %<__builtin_return_address%>");
4986 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4987 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4989 /* Some ports cannot access arbitrary stack frames. */
4992 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4993 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4995 warning (0, "unsupported argument to %<__builtin_return_address%>");
4999 /* For __builtin_frame_address, return what we've got. */
5000 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5004 && ! CONSTANT_P (tem
))
5005 tem
= copy_to_mode_reg (Pmode
, tem
);
5010 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5011 we failed and the caller should emit a normal call, otherwise try to get
5012 the result in TARGET, if convenient. */
5015 expand_builtin_alloca (tree exp
, rtx target
)
5020 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5021 should always expand to function calls. These can be intercepted
5026 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5029 /* Compute the argument. */
5030 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5032 /* Allocate the desired space. */
5033 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5034 result
= convert_memory_address (ptr_mode
, result
);
5039 /* Expand a call to a bswap builtin with argument ARG0. MODE
5040 is the mode to expand with. */
5043 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5045 enum machine_mode mode
;
5049 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5052 arg
= CALL_EXPR_ARG (exp
, 0);
5053 mode
= TYPE_MODE (TREE_TYPE (arg
));
5054 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5056 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5058 gcc_assert (target
);
5060 return convert_to_mode (mode
, target
, 0);
5063 /* Expand a call to a unary builtin in EXP.
5064 Return NULL_RTX if a normal call should be emitted rather than expanding the
5065 function in-line. If convenient, the result should be placed in TARGET.
5066 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5070 rtx subtarget
, optab op_optab
)
5074 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5077 /* Compute the argument. */
5078 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5079 VOIDmode
, EXPAND_NORMAL
);
5080 /* Compute op, into TARGET if possible.
5081 Set TARGET to wherever the result comes back. */
5082 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5083 op_optab
, op0
, target
, 1);
5084 gcc_assert (target
);
5086 return convert_to_mode (target_mode
, target
, 0);
5089 /* If the string passed to fputs is a constant and is one character
5090 long, we attempt to transform this call into __builtin_fputc(). */
5093 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5095 /* Verify the arguments in the original call. */
5096 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5098 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5099 CALL_EXPR_ARG (exp
, 1),
5100 (target
== const0_rtx
),
5101 unlocked
, NULL_TREE
);
5103 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5108 /* Expand a call to __builtin_expect. We just return our argument
5109 as the builtin_expect semantic should've been already executed by
5110 tree branch prediction pass. */
5113 expand_builtin_expect (tree exp
, rtx target
)
5117 if (call_expr_nargs (exp
) < 2)
5119 arg
= CALL_EXPR_ARG (exp
, 0);
5120 c
= CALL_EXPR_ARG (exp
, 1);
5122 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5123 /* When guessing was done, the hints should be already stripped away. */
5124 gcc_assert (!flag_guess_branch_prob
);
5129 expand_builtin_trap (void)
5133 emit_insn (gen_trap ());
5136 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5140 /* Expand EXP, a call to fabs, fabsf or fabsl.
5141 Return NULL_RTX if a normal call should be emitted rather than expanding
5142 the function inline. If convenient, the result should be placed
5143 in TARGET. SUBTARGET may be used as the target for computing
5147 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5149 enum machine_mode mode
;
5153 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5156 arg
= CALL_EXPR_ARG (exp
, 0);
5157 mode
= TYPE_MODE (TREE_TYPE (arg
));
5158 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5159 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5162 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5163 Return NULL is a normal call should be emitted rather than expanding the
5164 function inline. If convenient, the result should be placed in TARGET.
5165 SUBTARGET may be used as the target for computing the operand. */
5168 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5173 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5176 arg
= CALL_EXPR_ARG (exp
, 0);
5177 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5179 arg
= CALL_EXPR_ARG (exp
, 1);
5180 op1
= expand_normal (arg
);
5182 return expand_copysign (op0
, op1
, target
);
5185 /* Create a new constant string literal and return a char* pointer to it.
5186 The STRING_CST value is the LEN characters at STR. */
5188 build_string_literal (int len
, const char *str
)
5190 tree t
, elem
, index
, type
;
5192 t
= build_string (len
, str
);
5193 elem
= build_type_variant (char_type_node
, 1, 0);
5194 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
5195 type
= build_array_type (elem
, index
);
5196 TREE_TYPE (t
) = type
;
5197 TREE_CONSTANT (t
) = 1;
5198 TREE_INVARIANT (t
) = 1;
5199 TREE_READONLY (t
) = 1;
5200 TREE_STATIC (t
) = 1;
5202 type
= build_pointer_type (type
);
5203 t
= build1 (ADDR_EXPR
, type
, t
);
5205 type
= build_pointer_type (elem
);
5206 t
= build1 (NOP_EXPR
, type
, t
);
5210 /* Expand EXP, a call to printf or printf_unlocked.
5211 Return NULL_RTX if a normal call should be emitted rather than transforming
5212 the function inline. If convenient, the result should be placed in
5213 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5216 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5219 /* If we're using an unlocked function, assume the other unlocked
5220 functions exist explicitly. */
5221 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5222 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5223 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5224 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5225 const char *fmt_str
;
5228 int nargs
= call_expr_nargs (exp
);
5230 /* If the return value is used, don't do the transformation. */
5231 if (target
!= const0_rtx
)
5234 /* Verify the required arguments in the original call. */
5237 fmt
= CALL_EXPR_ARG (exp
, 0);
5238 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5241 /* Check whether the format is a literal string constant. */
5242 fmt_str
= c_getstr (fmt
);
5243 if (fmt_str
== NULL
)
5246 if (!init_target_chars ())
5249 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5250 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5253 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5256 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5258 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5259 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5262 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5265 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5269 /* We can't handle anything else with % args or %% ... yet. */
5270 if (strchr (fmt_str
, target_percent
))
5276 /* If the format specifier was "", printf does nothing. */
5277 if (fmt_str
[0] == '\0')
5279 /* If the format specifier has length of 1, call putchar. */
5280 if (fmt_str
[1] == '\0')
5282 /* Given printf("c"), (where c is any one character,)
5283 convert "c"[0] to an int and pass that to the replacement
5285 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5287 fn
= build_call_expr (fn_putchar
, 1, arg
);
5291 /* If the format specifier was "string\n", call puts("string"). */
5292 size_t len
= strlen (fmt_str
);
5293 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5295 /* Create a NUL-terminated string that's one char shorter
5296 than the original, stripping off the trailing '\n'. */
5297 char *newstr
= alloca (len
);
5298 memcpy (newstr
, fmt_str
, len
- 1);
5299 newstr
[len
- 1] = 0;
5300 arg
= build_string_literal (len
, newstr
);
5302 fn
= build_call_expr (fn_puts
, 1, arg
);
5305 /* We'd like to arrange to call fputs(string,stdout) here,
5306 but we need stdout and don't have a way to get it yet. */
5313 if (TREE_CODE (fn
) == CALL_EXPR
)
5314 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5315 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5318 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5319 Return NULL_RTX if a normal call should be emitted rather than transforming
5320 the function inline. If convenient, the result should be placed in
5321 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5324 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5327 /* If we're using an unlocked function, assume the other unlocked
5328 functions exist explicitly. */
5329 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5330 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5331 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5332 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5333 const char *fmt_str
;
5336 int nargs
= call_expr_nargs (exp
);
5338 /* If the return value is used, don't do the transformation. */
5339 if (target
!= const0_rtx
)
5342 /* Verify the required arguments in the original call. */
5345 fp
= CALL_EXPR_ARG (exp
, 0);
5346 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5348 fmt
= CALL_EXPR_ARG (exp
, 1);
5349 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5352 /* Check whether the format is a literal string constant. */
5353 fmt_str
= c_getstr (fmt
);
5354 if (fmt_str
== NULL
)
5357 if (!init_target_chars ())
5360 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5361 if (strcmp (fmt_str
, target_percent_s
) == 0)
5364 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5366 arg
= CALL_EXPR_ARG (exp
, 2);
5368 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5370 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5371 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5374 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5376 arg
= CALL_EXPR_ARG (exp
, 2);
5378 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5382 /* We can't handle anything else with % args or %% ... yet. */
5383 if (strchr (fmt_str
, target_percent
))
5389 /* If the format specifier was "", fprintf does nothing. */
5390 if (fmt_str
[0] == '\0')
5392 /* Evaluate and ignore FILE* argument for side-effects. */
5393 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5397 /* When "string" doesn't contain %, replace all cases of
5398 fprintf(stream,string) with fputs(string,stream). The fputs
5399 builtin will take care of special cases like length == 1. */
5401 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5406 if (TREE_CODE (fn
) == CALL_EXPR
)
5407 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5408 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5411 /* Expand a call EXP to sprintf. Return NULL_RTX if
5412 a normal call should be emitted rather than expanding the function
5413 inline. If convenient, the result should be placed in TARGET with
5417 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5420 const char *fmt_str
;
5421 int nargs
= call_expr_nargs (exp
);
5423 /* Verify the required arguments in the original call. */
5426 dest
= CALL_EXPR_ARG (exp
, 0);
5427 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5429 fmt
= CALL_EXPR_ARG (exp
, 0);
5430 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5433 /* Check whether the format is a literal string constant. */
5434 fmt_str
= c_getstr (fmt
);
5435 if (fmt_str
== NULL
)
5438 if (!init_target_chars ())
5441 /* If the format doesn't contain % args or %%, use strcpy. */
5442 if (strchr (fmt_str
, target_percent
) == 0)
5444 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5447 if ((nargs
> 2) || ! fn
)
5449 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5450 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5451 if (target
== const0_rtx
)
5453 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5454 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5456 /* If the format is "%s", use strcpy if the result isn't used. */
5457 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5460 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5466 arg
= CALL_EXPR_ARG (exp
, 2);
5467 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5470 if (target
!= const0_rtx
)
5472 len
= c_strlen (arg
, 1);
5473 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5479 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5480 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5482 if (target
== const0_rtx
)
5484 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5490 /* Expand a call to either the entry or exit function profiler. */
5493 expand_builtin_profile_func (bool exitp
)
5497 this = DECL_RTL (current_function_decl
);
5498 gcc_assert (MEM_P (this));
5499 this = XEXP (this, 0);
5502 which
= profile_function_exit_libfunc
;
5504 which
= profile_function_entry_libfunc
;
5506 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5507 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5514 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5517 round_trampoline_addr (rtx tramp
)
5519 rtx temp
, addend
, mask
;
5521 /* If we don't need too much alignment, we'll have been guaranteed
5522 proper alignment by get_trampoline_type. */
5523 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5526 /* Round address up to desired boundary. */
5527 temp
= gen_reg_rtx (Pmode
);
5528 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5529 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5531 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5532 temp
, 0, OPTAB_LIB_WIDEN
);
5533 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5534 temp
, 0, OPTAB_LIB_WIDEN
);
5540 expand_builtin_init_trampoline (tree exp
)
5542 tree t_tramp
, t_func
, t_chain
;
5543 rtx r_tramp
, r_func
, r_chain
;
5544 #ifdef TRAMPOLINE_TEMPLATE
5548 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5549 POINTER_TYPE
, VOID_TYPE
))
5552 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5553 t_func
= CALL_EXPR_ARG (exp
, 1);
5554 t_chain
= CALL_EXPR_ARG (exp
, 2);
5556 r_tramp
= expand_normal (t_tramp
);
5557 r_func
= expand_normal (t_func
);
5558 r_chain
= expand_normal (t_chain
);
5560 /* Generate insns to initialize the trampoline. */
5561 r_tramp
= round_trampoline_addr (r_tramp
);
5562 #ifdef TRAMPOLINE_TEMPLATE
5563 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5564 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5565 emit_block_move (blktramp
, assemble_trampoline_template (),
5566 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5568 trampolines_created
= 1;
5569 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5575 expand_builtin_adjust_trampoline (tree exp
)
5579 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5582 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5583 tramp
= round_trampoline_addr (tramp
);
5584 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5585 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5591 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5592 signbitd64, or signbitd128 function.
5593 Return NULL_RTX if a normal call should be emitted rather than expanding
5594 the function in-line. EXP is the expression that is a call to the builtin
5595 function; if convenient, the result should be placed in TARGET. */
5598 expand_builtin_signbit (tree exp
, rtx target
)
5600 const struct real_format
*fmt
;
5601 enum machine_mode fmode
, imode
, rmode
;
5602 HOST_WIDE_INT hi
, lo
;
5607 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5610 arg
= CALL_EXPR_ARG (exp
, 0);
5611 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5612 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5613 fmt
= REAL_MODE_FORMAT (fmode
);
5615 /* For floating point formats without a sign bit, implement signbit
5617 bitpos
= fmt
->signbit_ro
;
5620 /* But we can't do this if the format supports signed zero. */
5621 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5624 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5625 build_real (TREE_TYPE (arg
), dconst0
));
5626 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5629 temp
= expand_normal (arg
);
5630 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5632 imode
= int_mode_for_mode (fmode
);
5633 if (imode
== BLKmode
)
5635 temp
= gen_lowpart (imode
, temp
);
5640 /* Handle targets with different FP word orders. */
5641 if (FLOAT_WORDS_BIG_ENDIAN
)
5642 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5644 word
= bitpos
/ BITS_PER_WORD
;
5645 temp
= operand_subword_force (temp
, word
, fmode
);
5646 bitpos
= bitpos
% BITS_PER_WORD
;
5649 /* Force the intermediate word_mode (or narrower) result into a
5650 register. This avoids attempting to create paradoxical SUBREGs
5651 of floating point modes below. */
5652 temp
= force_reg (imode
, temp
);
5654 /* If the bitpos is within the "result mode" lowpart, the operation
5655 can be implement with a single bitwise AND. Otherwise, we need
5656 a right shift and an AND. */
5658 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5660 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5663 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5667 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5672 temp
= gen_lowpart (rmode
, temp
);
5673 temp
= expand_binop (rmode
, and_optab
, temp
,
5674 immed_double_const (lo
, hi
, rmode
),
5675 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5679 /* Perform a logical right shift to place the signbit in the least
5680 significant bit, then truncate the result to the desired mode
5681 and mask just this bit. */
5682 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5683 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5684 temp
= gen_lowpart (rmode
, temp
);
5685 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5686 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5692 /* Expand fork or exec calls. TARGET is the desired target of the
5693 call. EXP is the call. FN is the
5694 identificator of the actual function. IGNORE is nonzero if the
5695 value is to be ignored. */
5698 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5703 /* If we are not profiling, just call the function. */
5704 if (!profile_arc_flag
)
5707 /* Otherwise call the wrapper. This should be equivalent for the rest of
5708 compiler, so the code does not diverge, and the wrapper may run the
5709 code necessary for keeping the profiling sane. */
5711 switch (DECL_FUNCTION_CODE (fn
))
5714 id
= get_identifier ("__gcov_fork");
5717 case BUILT_IN_EXECL
:
5718 id
= get_identifier ("__gcov_execl");
5721 case BUILT_IN_EXECV
:
5722 id
= get_identifier ("__gcov_execv");
5725 case BUILT_IN_EXECLP
:
5726 id
= get_identifier ("__gcov_execlp");
5729 case BUILT_IN_EXECLE
:
5730 id
= get_identifier ("__gcov_execle");
5733 case BUILT_IN_EXECVP
:
5734 id
= get_identifier ("__gcov_execvp");
5737 case BUILT_IN_EXECVE
:
5738 id
= get_identifier ("__gcov_execve");
5745 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5746 DECL_EXTERNAL (decl
) = 1;
5747 TREE_PUBLIC (decl
) = 1;
5748 DECL_ARTIFICIAL (decl
) = 1;
5749 TREE_NOTHROW (decl
) = 1;
5750 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5751 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5752 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5753 return expand_call (call
, target
, ignore
);
5758 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5759 the pointer in these functions is void*, the tree optimizers may remove
5760 casts. The mode computed in expand_builtin isn't reliable either, due
5761 to __sync_bool_compare_and_swap.
5763 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5764 group of builtins. This gives us log2 of the mode size. */
5766 static inline enum machine_mode
5767 get_builtin_sync_mode (int fcode_diff
)
5769 /* The size is not negotiable, so ask not to get BLKmode in return
5770 if the target indicates that a smaller size would be better. */
5771 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5774 /* Expand the memory expression LOC and return the appropriate memory operand
5775 for the builtin_sync operations. */
5778 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5782 addr
= expand_expr (loc
, NULL_RTX
, Pmode
, EXPAND_SUM
);
5784 /* Note that we explicitly do not want any alias information for this
5785 memory, so that we kill all other live memories. Otherwise we don't
5786 satisfy the full barrier semantics of the intrinsic. */
5787 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5789 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5790 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5791 MEM_VOLATILE_P (mem
) = 1;
5796 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5797 EXP is the CALL_EXPR. CODE is the rtx code
5798 that corresponds to the arithmetic or logical operation from the name;
5799 an exception here is that NOT actually means NAND. TARGET is an optional
5800 place for us to store the results; AFTER is true if this is the
5801 fetch_and_xxx form. IGNORE is true if we don't actually care about
5802 the result of the operation at all. */
5805 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5806 enum rtx_code code
, bool after
,
5807 rtx target
, bool ignore
)
5810 enum machine_mode old_mode
;
5812 /* Expand the operands. */
5813 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5815 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5816 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5817 of CONST_INTs, where we know the old_mode only from the call argument. */
5818 old_mode
= GET_MODE (val
);
5819 if (old_mode
== VOIDmode
)
5820 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5821 val
= convert_modes (mode
, old_mode
, val
, 1);
5824 return expand_sync_operation (mem
, val
, code
);
5826 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5829 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5830 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5831 true if this is the boolean form. TARGET is a place for us to store the
5832 results; this is NOT optional if IS_BOOL is true. */
5835 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5836 bool is_bool
, rtx target
)
5838 rtx old_val
, new_val
, mem
;
5839 enum machine_mode old_mode
;
5841 /* Expand the operands. */
5842 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5845 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
5846 mode
, EXPAND_NORMAL
);
5847 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5848 of CONST_INTs, where we know the old_mode only from the call argument. */
5849 old_mode
= GET_MODE (old_val
);
5850 if (old_mode
== VOIDmode
)
5851 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5852 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5854 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
5855 mode
, EXPAND_NORMAL
);
5856 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5857 of CONST_INTs, where we know the old_mode only from the call argument. */
5858 old_mode
= GET_MODE (new_val
);
5859 if (old_mode
== VOIDmode
)
5860 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5861 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5864 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5866 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5869 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5870 general form is actually an atomic exchange, and some targets only
5871 support a reduced form with the second argument being a constant 1.
5872 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5876 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5880 enum machine_mode old_mode
;
5882 /* Expand the operands. */
5883 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5884 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5885 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5886 of CONST_INTs, where we know the old_mode only from the call argument. */
5887 old_mode
= GET_MODE (val
);
5888 if (old_mode
== VOIDmode
)
5889 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5890 val
= convert_modes (mode
, old_mode
, val
, 1);
5892 return expand_sync_lock_test_and_set (mem
, val
, target
);
5895 /* Expand the __sync_synchronize intrinsic. */
5898 expand_builtin_synchronize (void)
5902 #ifdef HAVE_memory_barrier
5903 if (HAVE_memory_barrier
)
5905 emit_insn (gen_memory_barrier ());
5910 /* If no explicit memory barrier instruction is available, create an
5911 empty asm stmt with a memory clobber. */
5912 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
5913 tree_cons (NULL
, build_string (6, "memory"), NULL
));
5914 ASM_VOLATILE_P (x
) = 1;
5915 expand_asm_expr (x
);
5918 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5921 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
5923 enum insn_code icode
;
5925 rtx val
= const0_rtx
;
5927 /* Expand the operands. */
5928 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5930 /* If there is an explicit operation in the md file, use it. */
5931 icode
= sync_lock_release
[mode
];
5932 if (icode
!= CODE_FOR_nothing
)
5934 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5935 val
= force_reg (mode
, val
);
5937 insn
= GEN_FCN (icode
) (mem
, val
);
5945 /* Otherwise we can implement this operation by emitting a barrier
5946 followed by a store of zero. */
5947 expand_builtin_synchronize ();
5948 emit_move_insn (mem
, val
);
5951 /* Expand an expression EXP that calls a built-in function,
5952 with result going to TARGET if that's convenient
5953 (and in mode MODE if that's convenient).
5954 SUBTARGET may be used as the target for computing one of EXP's operands.
5955 IGNORE is nonzero if the value is to be ignored. */
5958 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5961 tree fndecl
= get_callee_fndecl (exp
);
5962 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5963 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5965 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5966 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5968 /* When not optimizing, generate calls to library functions for a certain
5971 && !called_as_built_in (fndecl
)
5972 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5973 && fcode
!= BUILT_IN_ALLOCA
)
5974 return expand_call (exp
, target
, ignore
);
5976 /* The built-in function expanders test for target == const0_rtx
5977 to determine whether the function's result will be ignored. */
5979 target
= const0_rtx
;
5981 /* If the result of a pure or const built-in function is ignored, and
5982 none of its arguments are volatile, we can avoid expanding the
5983 built-in call and just evaluate the arguments for side-effects. */
5984 if (target
== const0_rtx
5985 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
5987 bool volatilep
= false;
5989 call_expr_arg_iterator iter
;
5991 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5992 if (TREE_THIS_VOLATILE (arg
))
6000 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6001 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6008 CASE_FLT_FN (BUILT_IN_FABS
):
6009 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6014 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6015 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6020 /* Just do a normal library call if we were unable to fold
6022 CASE_FLT_FN (BUILT_IN_CABS
):
6025 CASE_FLT_FN (BUILT_IN_EXP
):
6026 CASE_FLT_FN (BUILT_IN_EXP10
):
6027 CASE_FLT_FN (BUILT_IN_POW10
):
6028 CASE_FLT_FN (BUILT_IN_EXP2
):
6029 CASE_FLT_FN (BUILT_IN_EXPM1
):
6030 CASE_FLT_FN (BUILT_IN_LOGB
):
6031 CASE_FLT_FN (BUILT_IN_LOG
):
6032 CASE_FLT_FN (BUILT_IN_LOG10
):
6033 CASE_FLT_FN (BUILT_IN_LOG2
):
6034 CASE_FLT_FN (BUILT_IN_LOG1P
):
6035 CASE_FLT_FN (BUILT_IN_TAN
):
6036 CASE_FLT_FN (BUILT_IN_ASIN
):
6037 CASE_FLT_FN (BUILT_IN_ACOS
):
6038 CASE_FLT_FN (BUILT_IN_ATAN
):
6039 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6040 because of possible accuracy problems. */
6041 if (! flag_unsafe_math_optimizations
)
6043 CASE_FLT_FN (BUILT_IN_SQRT
):
6044 CASE_FLT_FN (BUILT_IN_FLOOR
):
6045 CASE_FLT_FN (BUILT_IN_CEIL
):
6046 CASE_FLT_FN (BUILT_IN_TRUNC
):
6047 CASE_FLT_FN (BUILT_IN_ROUND
):
6048 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6049 CASE_FLT_FN (BUILT_IN_RINT
):
6050 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6055 CASE_FLT_FN (BUILT_IN_ILOGB
):
6056 if (! flag_unsafe_math_optimizations
)
6058 CASE_FLT_FN (BUILT_IN_ISINF
):
6059 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6064 CASE_FLT_FN (BUILT_IN_LCEIL
):
6065 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6066 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6067 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6068 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
6073 CASE_FLT_FN (BUILT_IN_LRINT
):
6074 CASE_FLT_FN (BUILT_IN_LLRINT
):
6075 CASE_FLT_FN (BUILT_IN_LROUND
):
6076 CASE_FLT_FN (BUILT_IN_LLROUND
):
6077 target
= expand_builtin_int_roundingfn_2 (exp
, target
, subtarget
);
6082 CASE_FLT_FN (BUILT_IN_POW
):
6083 target
= expand_builtin_pow (exp
, target
, subtarget
);
6088 CASE_FLT_FN (BUILT_IN_POWI
):
6089 target
= expand_builtin_powi (exp
, target
, subtarget
);
6094 CASE_FLT_FN (BUILT_IN_ATAN2
):
6095 CASE_FLT_FN (BUILT_IN_LDEXP
):
6096 CASE_FLT_FN (BUILT_IN_SCALB
):
6097 CASE_FLT_FN (BUILT_IN_SCALBN
):
6098 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6099 if (! flag_unsafe_math_optimizations
)
6102 CASE_FLT_FN (BUILT_IN_FMOD
):
6103 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6104 CASE_FLT_FN (BUILT_IN_DREM
):
6105 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6110 CASE_FLT_FN (BUILT_IN_CEXPI
):
6111 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6112 gcc_assert (target
);
6115 CASE_FLT_FN (BUILT_IN_SIN
):
6116 CASE_FLT_FN (BUILT_IN_COS
):
6117 if (! flag_unsafe_math_optimizations
)
6119 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6124 CASE_FLT_FN (BUILT_IN_SINCOS
):
6125 if (! flag_unsafe_math_optimizations
)
6127 target
= expand_builtin_sincos (exp
);
6132 case BUILT_IN_APPLY_ARGS
:
6133 return expand_builtin_apply_args ();
6135 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6136 FUNCTION with a copy of the parameters described by
6137 ARGUMENTS, and ARGSIZE. It returns a block of memory
6138 allocated on the stack into which is stored all the registers
6139 that might possibly be used for returning the result of a
6140 function. ARGUMENTS is the value returned by
6141 __builtin_apply_args. ARGSIZE is the number of bytes of
6142 arguments that must be copied. ??? How should this value be
6143 computed? We'll also need a safe worst case value for varargs
6145 case BUILT_IN_APPLY
:
6146 if (!validate_arglist (exp
, POINTER_TYPE
,
6147 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6148 && !validate_arglist (exp
, REFERENCE_TYPE
,
6149 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6155 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6156 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6157 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6159 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6162 /* __builtin_return (RESULT) causes the function to return the
6163 value described by RESULT. RESULT is address of the block of
6164 memory returned by __builtin_apply. */
6165 case BUILT_IN_RETURN
:
6166 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6167 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6170 case BUILT_IN_SAVEREGS
:
6171 return expand_builtin_saveregs ();
6173 case BUILT_IN_ARGS_INFO
:
6174 return expand_builtin_args_info (exp
);
6176 /* Return the address of the first anonymous stack arg. */
6177 case BUILT_IN_NEXT_ARG
:
6178 if (fold_builtin_next_arg (exp
, false))
6180 return expand_builtin_next_arg ();
6182 case BUILT_IN_CLASSIFY_TYPE
:
6183 return expand_builtin_classify_type (exp
);
6185 case BUILT_IN_CONSTANT_P
:
6188 case BUILT_IN_FRAME_ADDRESS
:
6189 case BUILT_IN_RETURN_ADDRESS
:
6190 return expand_builtin_frame_address (fndecl
, exp
);
6192 /* Returns the address of the area where the structure is returned.
6194 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6195 if (call_expr_nargs (exp
) != 0
6196 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6197 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6200 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6202 case BUILT_IN_ALLOCA
:
6203 target
= expand_builtin_alloca (exp
, target
);
6208 case BUILT_IN_STACK_SAVE
:
6209 return expand_stack_save ();
6211 case BUILT_IN_STACK_RESTORE
:
6212 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6215 case BUILT_IN_BSWAP32
:
6216 case BUILT_IN_BSWAP64
:
6217 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6223 CASE_INT_FN (BUILT_IN_FFS
):
6224 case BUILT_IN_FFSIMAX
:
6225 target
= expand_builtin_unop (target_mode
, exp
, target
,
6226 subtarget
, ffs_optab
);
6231 CASE_INT_FN (BUILT_IN_CLZ
):
6232 case BUILT_IN_CLZIMAX
:
6233 target
= expand_builtin_unop (target_mode
, exp
, target
,
6234 subtarget
, clz_optab
);
6239 CASE_INT_FN (BUILT_IN_CTZ
):
6240 case BUILT_IN_CTZIMAX
:
6241 target
= expand_builtin_unop (target_mode
, exp
, target
,
6242 subtarget
, ctz_optab
);
6247 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6248 case BUILT_IN_POPCOUNTIMAX
:
6249 target
= expand_builtin_unop (target_mode
, exp
, target
,
6250 subtarget
, popcount_optab
);
6255 CASE_INT_FN (BUILT_IN_PARITY
):
6256 case BUILT_IN_PARITYIMAX
:
6257 target
= expand_builtin_unop (target_mode
, exp
, target
,
6258 subtarget
, parity_optab
);
6263 case BUILT_IN_STRLEN
:
6264 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6269 case BUILT_IN_STRCPY
:
6270 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6275 case BUILT_IN_STRNCPY
:
6276 target
= expand_builtin_strncpy (exp
, target
, mode
);
6281 case BUILT_IN_STPCPY
:
6282 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6287 case BUILT_IN_STRCAT
:
6288 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6293 case BUILT_IN_STRNCAT
:
6294 target
= expand_builtin_strncat (exp
, target
, mode
);
6299 case BUILT_IN_STRSPN
:
6300 target
= expand_builtin_strspn (exp
, target
, mode
);
6305 case BUILT_IN_STRCSPN
:
6306 target
= expand_builtin_strcspn (exp
, target
, mode
);
6311 case BUILT_IN_STRSTR
:
6312 target
= expand_builtin_strstr (exp
, target
, mode
);
6317 case BUILT_IN_STRPBRK
:
6318 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6323 case BUILT_IN_INDEX
:
6324 case BUILT_IN_STRCHR
:
6325 target
= expand_builtin_strchr (exp
, target
, mode
);
6330 case BUILT_IN_RINDEX
:
6331 case BUILT_IN_STRRCHR
:
6332 target
= expand_builtin_strrchr (exp
, target
, mode
);
6337 case BUILT_IN_MEMCPY
:
6338 target
= expand_builtin_memcpy (exp
, target
, mode
);
6343 case BUILT_IN_MEMPCPY
:
6344 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6349 case BUILT_IN_MEMMOVE
:
6350 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6355 case BUILT_IN_BCOPY
:
6356 target
= expand_builtin_bcopy (exp
, ignore
);
6361 case BUILT_IN_MEMSET
:
6362 target
= expand_builtin_memset (exp
, target
, mode
);
6367 case BUILT_IN_BZERO
:
6368 target
= expand_builtin_bzero (exp
);
6373 case BUILT_IN_STRCMP
:
6374 target
= expand_builtin_strcmp (exp
, target
, mode
);
6379 case BUILT_IN_STRNCMP
:
6380 target
= expand_builtin_strncmp (exp
, target
, mode
);
6385 case BUILT_IN_MEMCHR
:
6386 target
= expand_builtin_memchr (exp
, target
, mode
);
6392 case BUILT_IN_MEMCMP
:
6393 target
= expand_builtin_memcmp (exp
, target
, mode
);
6398 case BUILT_IN_SETJMP
:
6399 /* This should have been lowered to the builtins below. */
6402 case BUILT_IN_SETJMP_SETUP
:
6403 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6404 and the receiver label. */
6405 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6407 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6408 VOIDmode
, EXPAND_NORMAL
);
6409 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6410 rtx label_r
= label_rtx (label
);
6412 /* This is copied from the handling of non-local gotos. */
6413 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6414 nonlocal_goto_handler_labels
6415 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6416 nonlocal_goto_handler_labels
);
6417 /* ??? Do not let expand_label treat us as such since we would
6418 not want to be both on the list of non-local labels and on
6419 the list of forced labels. */
6420 FORCED_LABEL (label
) = 0;
6425 case BUILT_IN_SETJMP_DISPATCHER
:
6426 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6427 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6429 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6430 rtx label_r
= label_rtx (label
);
6432 /* Remove the dispatcher label from the list of non-local labels
6433 since the receiver labels have been added to it above. */
6434 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6439 case BUILT_IN_SETJMP_RECEIVER
:
6440 /* __builtin_setjmp_receiver is passed the receiver label. */
6441 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6443 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6444 rtx label_r
= label_rtx (label
);
6446 expand_builtin_setjmp_receiver (label_r
);
6451 /* __builtin_longjmp is passed a pointer to an array of five words.
6452 It's similar to the C library longjmp function but works with
6453 __builtin_setjmp above. */
6454 case BUILT_IN_LONGJMP
:
6455 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6457 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6458 VOIDmode
, EXPAND_NORMAL
);
6459 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6461 if (value
!= const1_rtx
)
6463 error ("%<__builtin_longjmp%> second argument must be 1");
6467 expand_builtin_longjmp (buf_addr
, value
);
6472 case BUILT_IN_NONLOCAL_GOTO
:
6473 target
= expand_builtin_nonlocal_goto (exp
);
6478 /* This updates the setjmp buffer that is its argument with the value
6479 of the current stack pointer. */
6480 case BUILT_IN_UPDATE_SETJMP_BUF
:
6481 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6484 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6486 expand_builtin_update_setjmp_buf (buf_addr
);
6492 expand_builtin_trap ();
6495 case BUILT_IN_PRINTF
:
6496 target
= expand_builtin_printf (exp
, target
, mode
, false);
6501 case BUILT_IN_PRINTF_UNLOCKED
:
6502 target
= expand_builtin_printf (exp
, target
, mode
, true);
6507 case BUILT_IN_FPUTS
:
6508 target
= expand_builtin_fputs (exp
, target
, false);
6512 case BUILT_IN_FPUTS_UNLOCKED
:
6513 target
= expand_builtin_fputs (exp
, target
, true);
6518 case BUILT_IN_FPRINTF
:
6519 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6524 case BUILT_IN_FPRINTF_UNLOCKED
:
6525 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6530 case BUILT_IN_SPRINTF
:
6531 target
= expand_builtin_sprintf (exp
, target
, mode
);
6536 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6537 case BUILT_IN_SIGNBITD32
:
6538 case BUILT_IN_SIGNBITD64
:
6539 case BUILT_IN_SIGNBITD128
:
6540 target
= expand_builtin_signbit (exp
, target
);
6545 /* Various hooks for the DWARF 2 __throw routine. */
6546 case BUILT_IN_UNWIND_INIT
:
6547 expand_builtin_unwind_init ();
6549 case BUILT_IN_DWARF_CFA
:
6550 return virtual_cfa_rtx
;
6551 #ifdef DWARF2_UNWIND_INFO
6552 case BUILT_IN_DWARF_SP_COLUMN
:
6553 return expand_builtin_dwarf_sp_column ();
6554 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6555 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6558 case BUILT_IN_FROB_RETURN_ADDR
:
6559 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6560 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6561 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6562 case BUILT_IN_EH_RETURN
:
6563 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6564 CALL_EXPR_ARG (exp
, 1));
6566 #ifdef EH_RETURN_DATA_REGNO
6567 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6568 return expand_builtin_eh_return_data_regno (exp
);
6570 case BUILT_IN_EXTEND_POINTER
:
6571 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6573 case BUILT_IN_VA_START
:
6574 case BUILT_IN_STDARG_START
:
6575 return expand_builtin_va_start (exp
);
6576 case BUILT_IN_VA_END
:
6577 return expand_builtin_va_end (exp
);
6578 case BUILT_IN_VA_COPY
:
6579 return expand_builtin_va_copy (exp
);
6580 case BUILT_IN_EXPECT
:
6581 return expand_builtin_expect (exp
, target
);
6582 case BUILT_IN_PREFETCH
:
6583 expand_builtin_prefetch (exp
);
6586 case BUILT_IN_PROFILE_FUNC_ENTER
:
6587 return expand_builtin_profile_func (false);
6588 case BUILT_IN_PROFILE_FUNC_EXIT
:
6589 return expand_builtin_profile_func (true);
6591 case BUILT_IN_INIT_TRAMPOLINE
:
6592 return expand_builtin_init_trampoline (exp
);
6593 case BUILT_IN_ADJUST_TRAMPOLINE
:
6594 return expand_builtin_adjust_trampoline (exp
);
6597 case BUILT_IN_EXECL
:
6598 case BUILT_IN_EXECV
:
6599 case BUILT_IN_EXECLP
:
6600 case BUILT_IN_EXECLE
:
6601 case BUILT_IN_EXECVP
:
6602 case BUILT_IN_EXECVE
:
6603 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6608 case BUILT_IN_FETCH_AND_ADD_1
:
6609 case BUILT_IN_FETCH_AND_ADD_2
:
6610 case BUILT_IN_FETCH_AND_ADD_4
:
6611 case BUILT_IN_FETCH_AND_ADD_8
:
6612 case BUILT_IN_FETCH_AND_ADD_16
:
6613 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6614 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6615 false, target
, ignore
);
6620 case BUILT_IN_FETCH_AND_SUB_1
:
6621 case BUILT_IN_FETCH_AND_SUB_2
:
6622 case BUILT_IN_FETCH_AND_SUB_4
:
6623 case BUILT_IN_FETCH_AND_SUB_8
:
6624 case BUILT_IN_FETCH_AND_SUB_16
:
6625 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6626 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6627 false, target
, ignore
);
6632 case BUILT_IN_FETCH_AND_OR_1
:
6633 case BUILT_IN_FETCH_AND_OR_2
:
6634 case BUILT_IN_FETCH_AND_OR_4
:
6635 case BUILT_IN_FETCH_AND_OR_8
:
6636 case BUILT_IN_FETCH_AND_OR_16
:
6637 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6638 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6639 false, target
, ignore
);
6644 case BUILT_IN_FETCH_AND_AND_1
:
6645 case BUILT_IN_FETCH_AND_AND_2
:
6646 case BUILT_IN_FETCH_AND_AND_4
:
6647 case BUILT_IN_FETCH_AND_AND_8
:
6648 case BUILT_IN_FETCH_AND_AND_16
:
6649 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6650 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6651 false, target
, ignore
);
6656 case BUILT_IN_FETCH_AND_XOR_1
:
6657 case BUILT_IN_FETCH_AND_XOR_2
:
6658 case BUILT_IN_FETCH_AND_XOR_4
:
6659 case BUILT_IN_FETCH_AND_XOR_8
:
6660 case BUILT_IN_FETCH_AND_XOR_16
:
6661 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6662 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6663 false, target
, ignore
);
6668 case BUILT_IN_FETCH_AND_NAND_1
:
6669 case BUILT_IN_FETCH_AND_NAND_2
:
6670 case BUILT_IN_FETCH_AND_NAND_4
:
6671 case BUILT_IN_FETCH_AND_NAND_8
:
6672 case BUILT_IN_FETCH_AND_NAND_16
:
6673 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6674 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6675 false, target
, ignore
);
6680 case BUILT_IN_ADD_AND_FETCH_1
:
6681 case BUILT_IN_ADD_AND_FETCH_2
:
6682 case BUILT_IN_ADD_AND_FETCH_4
:
6683 case BUILT_IN_ADD_AND_FETCH_8
:
6684 case BUILT_IN_ADD_AND_FETCH_16
:
6685 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6686 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6687 true, target
, ignore
);
6692 case BUILT_IN_SUB_AND_FETCH_1
:
6693 case BUILT_IN_SUB_AND_FETCH_2
:
6694 case BUILT_IN_SUB_AND_FETCH_4
:
6695 case BUILT_IN_SUB_AND_FETCH_8
:
6696 case BUILT_IN_SUB_AND_FETCH_16
:
6697 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6698 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6699 true, target
, ignore
);
6704 case BUILT_IN_OR_AND_FETCH_1
:
6705 case BUILT_IN_OR_AND_FETCH_2
:
6706 case BUILT_IN_OR_AND_FETCH_4
:
6707 case BUILT_IN_OR_AND_FETCH_8
:
6708 case BUILT_IN_OR_AND_FETCH_16
:
6709 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6710 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6711 true, target
, ignore
);
6716 case BUILT_IN_AND_AND_FETCH_1
:
6717 case BUILT_IN_AND_AND_FETCH_2
:
6718 case BUILT_IN_AND_AND_FETCH_4
:
6719 case BUILT_IN_AND_AND_FETCH_8
:
6720 case BUILT_IN_AND_AND_FETCH_16
:
6721 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6722 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6723 true, target
, ignore
);
6728 case BUILT_IN_XOR_AND_FETCH_1
:
6729 case BUILT_IN_XOR_AND_FETCH_2
:
6730 case BUILT_IN_XOR_AND_FETCH_4
:
6731 case BUILT_IN_XOR_AND_FETCH_8
:
6732 case BUILT_IN_XOR_AND_FETCH_16
:
6733 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6734 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6735 true, target
, ignore
);
6740 case BUILT_IN_NAND_AND_FETCH_1
:
6741 case BUILT_IN_NAND_AND_FETCH_2
:
6742 case BUILT_IN_NAND_AND_FETCH_4
:
6743 case BUILT_IN_NAND_AND_FETCH_8
:
6744 case BUILT_IN_NAND_AND_FETCH_16
:
6745 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6746 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6747 true, target
, ignore
);
6752 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6753 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6754 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6755 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6756 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6757 if (mode
== VOIDmode
)
6758 mode
= TYPE_MODE (boolean_type_node
);
6759 if (!target
|| !register_operand (target
, mode
))
6760 target
= gen_reg_rtx (mode
);
6762 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6763 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6768 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6769 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6770 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6771 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6772 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6773 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6774 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6779 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6780 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6781 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6782 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6783 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6784 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6785 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6790 case BUILT_IN_LOCK_RELEASE_1
:
6791 case BUILT_IN_LOCK_RELEASE_2
:
6792 case BUILT_IN_LOCK_RELEASE_4
:
6793 case BUILT_IN_LOCK_RELEASE_8
:
6794 case BUILT_IN_LOCK_RELEASE_16
:
6795 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6796 expand_builtin_lock_release (mode
, exp
);
6799 case BUILT_IN_SYNCHRONIZE
:
6800 expand_builtin_synchronize ();
6803 case BUILT_IN_OBJECT_SIZE
:
6804 return expand_builtin_object_size (exp
);
6806 case BUILT_IN_MEMCPY_CHK
:
6807 case BUILT_IN_MEMPCPY_CHK
:
6808 case BUILT_IN_MEMMOVE_CHK
:
6809 case BUILT_IN_MEMSET_CHK
:
6810 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6815 case BUILT_IN_STRCPY_CHK
:
6816 case BUILT_IN_STPCPY_CHK
:
6817 case BUILT_IN_STRNCPY_CHK
:
6818 case BUILT_IN_STRCAT_CHK
:
6819 case BUILT_IN_STRNCAT_CHK
:
6820 case BUILT_IN_SNPRINTF_CHK
:
6821 case BUILT_IN_VSNPRINTF_CHK
:
6822 maybe_emit_chk_warning (exp
, fcode
);
6825 case BUILT_IN_SPRINTF_CHK
:
6826 case BUILT_IN_VSPRINTF_CHK
:
6827 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6830 default: /* just do library call, if unknown builtin */
6834 /* The switch statement above can drop through to cause the function
6835 to be called normally. */
6836 return expand_call (exp
, target
, ignore
);
6839 /* Determine whether a tree node represents a call to a built-in
6840 function. If the tree T is a call to a built-in function with
6841 the right number of arguments of the appropriate types, return
6842 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6843 Otherwise the return value is END_BUILTINS. */
6845 enum built_in_function
6846 builtin_mathfn_code (tree t
)
6848 tree fndecl
, arg
, parmlist
;
6849 tree argtype
, parmtype
;
6850 call_expr_arg_iterator iter
;
6852 if (TREE_CODE (t
) != CALL_EXPR
6853 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6854 return END_BUILTINS
;
6856 fndecl
= get_callee_fndecl (t
);
6857 if (fndecl
== NULL_TREE
6858 || TREE_CODE (fndecl
) != FUNCTION_DECL
6859 || ! DECL_BUILT_IN (fndecl
)
6860 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6861 return END_BUILTINS
;
6863 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6864 init_call_expr_arg_iterator (t
, &iter
);
6865 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6867 /* If a function doesn't take a variable number of arguments,
6868 the last element in the list will have type `void'. */
6869 parmtype
= TREE_VALUE (parmlist
);
6870 if (VOID_TYPE_P (parmtype
))
6872 if (more_call_expr_args_p (&iter
))
6873 return END_BUILTINS
;
6874 return DECL_FUNCTION_CODE (fndecl
);
6877 if (! more_call_expr_args_p (&iter
))
6878 return END_BUILTINS
;
6880 arg
= next_call_expr_arg (&iter
);
6881 argtype
= TREE_TYPE (arg
);
6883 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6885 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6886 return END_BUILTINS
;
6888 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6890 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6891 return END_BUILTINS
;
6893 else if (POINTER_TYPE_P (parmtype
))
6895 if (! POINTER_TYPE_P (argtype
))
6896 return END_BUILTINS
;
6898 else if (INTEGRAL_TYPE_P (parmtype
))
6900 if (! INTEGRAL_TYPE_P (argtype
))
6901 return END_BUILTINS
;
6904 return END_BUILTINS
;
6907 /* Variable-length argument list. */
6908 return DECL_FUNCTION_CODE (fndecl
);
6911 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6912 evaluate to a constant. */
6915 fold_builtin_constant_p (tree arg
)
6917 /* We return 1 for a numeric type that's known to be a constant
6918 value at compile-time or for an aggregate type that's a
6919 literal constant. */
6922 /* If we know this is a constant, emit the constant of one. */
6923 if (CONSTANT_CLASS_P (arg
)
6924 || (TREE_CODE (arg
) == CONSTRUCTOR
6925 && TREE_CONSTANT (arg
)))
6926 return integer_one_node
;
6927 if (TREE_CODE (arg
) == ADDR_EXPR
)
6929 tree op
= TREE_OPERAND (arg
, 0);
6930 if (TREE_CODE (op
) == STRING_CST
6931 || (TREE_CODE (op
) == ARRAY_REF
6932 && integer_zerop (TREE_OPERAND (op
, 1))
6933 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6934 return integer_one_node
;
6937 /* If this expression has side effects, show we don't know it to be a
6938 constant. Likewise if it's a pointer or aggregate type since in
6939 those case we only want literals, since those are only optimized
6940 when generating RTL, not later.
6941 And finally, if we are compiling an initializer, not code, we
6942 need to return a definite result now; there's not going to be any
6943 more optimization done. */
6944 if (TREE_SIDE_EFFECTS (arg
)
6945 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6946 || POINTER_TYPE_P (TREE_TYPE (arg
))
6948 || folding_initializer
)
6949 return integer_zero_node
;
6954 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6955 comparison against the argument will fold to a constant. In practice,
6956 this means a true constant or the address of a non-weak symbol. */
6959 fold_builtin_expect (tree arg
)
6963 /* If the argument isn't invariant, then there's nothing we can do. */
6964 if (!TREE_INVARIANT (arg
))
6967 /* If we're looking at an address of a weak decl, then do not fold. */
6970 if (TREE_CODE (inner
) == ADDR_EXPR
)
6974 inner
= TREE_OPERAND (inner
, 0);
6976 while (TREE_CODE (inner
) == COMPONENT_REF
6977 || TREE_CODE (inner
) == ARRAY_REF
);
6978 if (DECL_P (inner
) && DECL_WEAK (inner
))
6982 /* Otherwise, ARG already has the proper type for the return value. */
6986 /* Fold a call to __builtin_classify_type with argument ARG. */
6989 fold_builtin_classify_type (tree arg
)
6992 return build_int_cst (NULL_TREE
, no_type_class
);
6994 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
6997 /* Fold a call to __builtin_strlen with argument ARG. */
7000 fold_builtin_strlen (tree arg
)
7002 if (!validate_arg (arg
, POINTER_TYPE
))
7006 tree len
= c_strlen (arg
, 0);
7010 /* Convert from the internal "sizetype" type to "size_t". */
7012 len
= fold_convert (size_type_node
, len
);
7020 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7023 fold_builtin_inf (tree type
, int warn
)
7025 REAL_VALUE_TYPE real
;
7027 /* __builtin_inff is intended to be usable to define INFINITY on all
7028 targets. If an infinity is not available, INFINITY expands "to a
7029 positive constant of type float that overflows at translation
7030 time", footnote "In this case, using INFINITY will violate the
7031 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7032 Thus we pedwarn to ensure this constraint violation is
7034 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7035 pedwarn ("target format does not support infinity");
7038 return build_real (type
, real
);
7041 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7044 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7046 REAL_VALUE_TYPE real
;
7049 if (!validate_arg (arg
, POINTER_TYPE
))
7051 str
= c_getstr (arg
);
7055 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7058 return build_real (type
, real
);
7061 /* Return true if the floating point expression T has an integer value.
7062 We also allow +Inf, -Inf and NaN to be considered integer values. */
7065 integer_valued_real_p (tree t
)
7067 switch (TREE_CODE (t
))
7074 case NON_LVALUE_EXPR
:
7075 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7080 return integer_valued_real_p (GENERIC_TREE_OPERAND (t
, 1));
7087 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7088 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7091 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7092 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7095 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7099 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7100 if (TREE_CODE (type
) == INTEGER_TYPE
)
7102 if (TREE_CODE (type
) == REAL_TYPE
)
7103 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7108 switch (builtin_mathfn_code (t
))
7110 CASE_FLT_FN (BUILT_IN_CEIL
):
7111 CASE_FLT_FN (BUILT_IN_FLOOR
):
7112 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7113 CASE_FLT_FN (BUILT_IN_RINT
):
7114 CASE_FLT_FN (BUILT_IN_ROUND
):
7115 CASE_FLT_FN (BUILT_IN_TRUNC
):
7118 CASE_FLT_FN (BUILT_IN_FMIN
):
7119 CASE_FLT_FN (BUILT_IN_FMAX
):
7120 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7121 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7134 /* FNDECL is assumed to be a builtin where truncation can be propagated
7135 across (for instance floor((double)f) == (double)floorf (f).
7136 Do the transformation for a call with argument ARG. */
7139 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7141 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7143 if (!validate_arg (arg
, REAL_TYPE
))
7146 /* Integer rounding functions are idempotent. */
7147 if (fcode
== builtin_mathfn_code (arg
))
7150 /* If argument is already integer valued, and we don't need to worry
7151 about setting errno, there's no need to perform rounding. */
7152 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7157 tree arg0
= strip_float_extensions (arg
);
7158 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7159 tree newtype
= TREE_TYPE (arg0
);
7162 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7163 && (decl
= mathfn_built_in (newtype
, fcode
)))
7164 return fold_convert (ftype
,
7165 build_call_expr (decl
, 1,
7166 fold_convert (newtype
, arg0
)));
7171 /* FNDECL is assumed to be builtin which can narrow the FP type of
7172 the argument, for instance lround((double)f) -> lroundf (f).
7173 Do the transformation for a call with argument ARG. */
7176 fold_fixed_mathfn (tree fndecl
, tree arg
)
7178 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7180 if (!validate_arg (arg
, REAL_TYPE
))
7183 /* If argument is already integer valued, and we don't need to worry
7184 about setting errno, there's no need to perform rounding. */
7185 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7186 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7190 tree ftype
= TREE_TYPE (arg
);
7191 tree arg0
= strip_float_extensions (arg
);
7192 tree newtype
= TREE_TYPE (arg0
);
7195 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7196 && (decl
= mathfn_built_in (newtype
, fcode
)))
7197 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7200 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7201 sizeof (long long) == sizeof (long). */
7202 if (TYPE_PRECISION (long_long_integer_type_node
)
7203 == TYPE_PRECISION (long_integer_type_node
))
7205 tree newfn
= NULL_TREE
;
7208 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7209 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7212 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7213 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7216 CASE_FLT_FN (BUILT_IN_LLROUND
):
7217 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7220 CASE_FLT_FN (BUILT_IN_LLRINT
):
7221 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7230 tree newcall
= build_call_expr(newfn
, 1, arg
);
7231 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7238 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7239 return type. Return NULL_TREE if no simplification can be made. */
7242 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7246 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7247 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7250 /* Calculate the result when the argument is a constant. */
7251 if (TREE_CODE (arg
) == COMPLEX_CST
7252 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7256 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7258 tree real
= TREE_OPERAND (arg
, 0);
7259 tree imag
= TREE_OPERAND (arg
, 1);
7261 /* If either part is zero, cabs is fabs of the other. */
7262 if (real_zerop (real
))
7263 return fold_build1 (ABS_EXPR
, type
, imag
);
7264 if (real_zerop (imag
))
7265 return fold_build1 (ABS_EXPR
, type
, real
);
7267 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7268 if (flag_unsafe_math_optimizations
7269 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7271 const REAL_VALUE_TYPE sqrt2_trunc
7272 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
7274 return fold_build2 (MULT_EXPR
, type
,
7275 fold_build1 (ABS_EXPR
, type
, real
),
7276 build_real (type
, sqrt2_trunc
));
7280 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7281 if (TREE_CODE (arg
) == NEGATE_EXPR
7282 || TREE_CODE (arg
) == CONJ_EXPR
)
7283 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7285 /* Don't do this when optimizing for size. */
7286 if (flag_unsafe_math_optimizations
7287 && optimize
&& !optimize_size
)
7289 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7291 if (sqrtfn
!= NULL_TREE
)
7293 tree rpart
, ipart
, result
;
7295 arg
= builtin_save_expr (arg
);
7297 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7298 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7300 rpart
= builtin_save_expr (rpart
);
7301 ipart
= builtin_save_expr (ipart
);
7303 result
= fold_build2 (PLUS_EXPR
, type
,
7304 fold_build2 (MULT_EXPR
, type
,
7306 fold_build2 (MULT_EXPR
, type
,
7309 return build_call_expr (sqrtfn
, 1, result
);
7316 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7317 Return NULL_TREE if no simplification can be made. */
7320 fold_builtin_sqrt (tree arg
, tree type
)
7323 enum built_in_function fcode
;
7326 if (!validate_arg (arg
, REAL_TYPE
))
7329 /* Calculate the result when the argument is a constant. */
7330 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7333 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7334 fcode
= builtin_mathfn_code (arg
);
7335 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7337 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7338 arg
= fold_build2 (MULT_EXPR
, type
,
7339 CALL_EXPR_ARG (arg
, 0),
7340 build_real (type
, dconsthalf
));
7341 return build_call_expr (expfn
, 1, arg
);
7344 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7345 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7347 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7351 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7353 /* The inner root was either sqrt or cbrt. */
7354 REAL_VALUE_TYPE dconstroot
=
7355 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
7357 /* Adjust for the outer root. */
7358 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7359 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7360 tree_root
= build_real (type
, dconstroot
);
7361 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7365 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7366 if (flag_unsafe_math_optimizations
7367 && (fcode
== BUILT_IN_POW
7368 || fcode
== BUILT_IN_POWF
7369 || fcode
== BUILT_IN_POWL
))
7371 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7372 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7373 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7375 if (!tree_expr_nonnegative_p (arg0
))
7376 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7377 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7378 build_real (type
, dconsthalf
));
7379 return build_call_expr (powfn
, 2, arg0
, narg1
);
7385 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7386 Return NULL_TREE if no simplification can be made. */
7389 fold_builtin_cbrt (tree arg
, tree type
)
7391 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7394 if (!validate_arg (arg
, REAL_TYPE
))
7397 /* Calculate the result when the argument is a constant. */
7398 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7401 if (flag_unsafe_math_optimizations
)
7403 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7404 if (BUILTIN_EXPONENT_P (fcode
))
7406 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7407 const REAL_VALUE_TYPE third_trunc
=
7408 real_value_truncate (TYPE_MODE (type
), dconstthird
);
7409 arg
= fold_build2 (MULT_EXPR
, type
,
7410 CALL_EXPR_ARG (arg
, 0),
7411 build_real (type
, third_trunc
));
7412 return build_call_expr (expfn
, 1, arg
);
7415 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7416 if (BUILTIN_SQRT_P (fcode
))
7418 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7422 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7424 REAL_VALUE_TYPE dconstroot
= dconstthird
;
7426 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7427 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7428 tree_root
= build_real (type
, dconstroot
);
7429 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7433 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7434 if (BUILTIN_CBRT_P (fcode
))
7436 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7437 if (tree_expr_nonnegative_p (arg0
))
7439 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7444 REAL_VALUE_TYPE dconstroot
;
7446 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
7447 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7448 tree_root
= build_real (type
, dconstroot
);
7449 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7454 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7455 if (fcode
== BUILT_IN_POW
7456 || fcode
== BUILT_IN_POWF
7457 || fcode
== BUILT_IN_POWL
)
7459 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7460 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7461 if (tree_expr_nonnegative_p (arg00
))
7463 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7464 const REAL_VALUE_TYPE dconstroot
7465 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7466 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7467 build_real (type
, dconstroot
));
7468 return build_call_expr (powfn
, 2, arg00
, narg01
);
7475 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7476 TYPE is the type of the return value. Return NULL_TREE if no
7477 simplification can be made. */
7480 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7484 if (!validate_arg (arg
, REAL_TYPE
))
7487 /* Calculate the result when the argument is a constant. */
7488 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7491 /* Optimize cos(-x) into cos (x). */
7492 if ((narg
= fold_strip_sign_ops (arg
)))
7493 return build_call_expr (fndecl
, 1, narg
);
7498 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7499 Return NULL_TREE if no simplification can be made. */
7502 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7504 if (validate_arg (arg
, REAL_TYPE
))
7508 /* Calculate the result when the argument is a constant. */
7509 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7512 /* Optimize cosh(-x) into cosh (x). */
7513 if ((narg
= fold_strip_sign_ops (arg
)))
7514 return build_call_expr (fndecl
, 1, narg
);
7520 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7521 Return NULL_TREE if no simplification can be made. */
7524 fold_builtin_tan (tree arg
, tree type
)
7526 enum built_in_function fcode
;
7529 if (!validate_arg (arg
, REAL_TYPE
))
7532 /* Calculate the result when the argument is a constant. */
7533 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7536 /* Optimize tan(atan(x)) = x. */
7537 fcode
= builtin_mathfn_code (arg
);
7538 if (flag_unsafe_math_optimizations
7539 && (fcode
== BUILT_IN_ATAN
7540 || fcode
== BUILT_IN_ATANF
7541 || fcode
== BUILT_IN_ATANL
))
7542 return CALL_EXPR_ARG (arg
, 0);
7547 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7548 NULL_TREE if no simplification can be made. */
7551 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7556 if (!validate_arg (arg0
, REAL_TYPE
)
7557 || !validate_arg (arg1
, POINTER_TYPE
)
7558 || !validate_arg (arg2
, POINTER_TYPE
))
7561 type
= TREE_TYPE (arg0
);
7563 /* Calculate the result when the argument is a constant. */
7564 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7567 /* Canonicalize sincos to cexpi. */
7568 if (!TARGET_C99_FUNCTIONS
)
7570 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7574 call
= build_call_expr (fn
, 1, arg0
);
7575 call
= builtin_save_expr (call
);
7577 return build2 (COMPOUND_EXPR
, type
,
7578 build2 (MODIFY_EXPR
, void_type_node
,
7579 build_fold_indirect_ref (arg1
),
7580 build1 (IMAGPART_EXPR
, type
, call
)),
7581 build2 (MODIFY_EXPR
, void_type_node
,
7582 build_fold_indirect_ref (arg2
),
7583 build1 (REALPART_EXPR
, type
, call
)));
7586 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7587 NULL_TREE if no simplification can be made. */
7590 fold_builtin_cexp (tree arg0
, tree type
)
7593 tree realp
, imagp
, ifn
;
7595 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7598 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7600 /* In case we can figure out the real part of arg0 and it is constant zero
7602 if (!TARGET_C99_FUNCTIONS
)
7604 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7608 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7609 && real_zerop (realp
))
7611 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7612 return build_call_expr (ifn
, 1, narg
);
7615 /* In case we can easily decompose real and imaginary parts split cexp
7616 to exp (r) * cexpi (i). */
7617 if (flag_unsafe_math_optimizations
7620 tree rfn
, rcall
, icall
;
7622 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7626 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7630 icall
= build_call_expr (ifn
, 1, imagp
);
7631 icall
= builtin_save_expr (icall
);
7632 rcall
= build_call_expr (rfn
, 1, realp
);
7633 rcall
= builtin_save_expr (rcall
);
7634 return build2 (COMPLEX_EXPR
, type
,
7635 build2 (MULT_EXPR
, rtype
,
7637 build1 (REALPART_EXPR
, rtype
, icall
)),
7638 build2 (MULT_EXPR
, rtype
,
7640 build1 (IMAGPART_EXPR
, rtype
, icall
)));
7646 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7647 Return NULL_TREE if no simplification can be made. */
7650 fold_builtin_trunc (tree fndecl
, tree arg
)
7652 if (!validate_arg (arg
, REAL_TYPE
))
7655 /* Optimize trunc of constant value. */
7656 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7658 REAL_VALUE_TYPE r
, x
;
7659 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7661 x
= TREE_REAL_CST (arg
);
7662 real_trunc (&r
, TYPE_MODE (type
), &x
);
7663 return build_real (type
, r
);
7666 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7669 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7670 Return NULL_TREE if no simplification can be made. */
7673 fold_builtin_floor (tree fndecl
, tree arg
)
7675 if (!validate_arg (arg
, REAL_TYPE
))
7678 /* Optimize floor of constant value. */
7679 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7683 x
= TREE_REAL_CST (arg
);
7684 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7686 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7689 real_floor (&r
, TYPE_MODE (type
), &x
);
7690 return build_real (type
, r
);
7694 /* Fold floor (x) where x is nonnegative to trunc (x). */
7695 if (tree_expr_nonnegative_p (arg
))
7697 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7699 return build_call_expr (truncfn
, 1, arg
);
7702 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7705 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7706 Return NULL_TREE if no simplification can be made. */
7709 fold_builtin_ceil (tree fndecl
, tree arg
)
7711 if (!validate_arg (arg
, REAL_TYPE
))
7714 /* Optimize ceil of constant value. */
7715 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7719 x
= TREE_REAL_CST (arg
);
7720 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7722 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7725 real_ceil (&r
, TYPE_MODE (type
), &x
);
7726 return build_real (type
, r
);
7730 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7733 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7734 Return NULL_TREE if no simplification can be made. */
7737 fold_builtin_round (tree fndecl
, tree arg
)
7739 if (!validate_arg (arg
, REAL_TYPE
))
7742 /* Optimize round of constant value. */
7743 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7747 x
= TREE_REAL_CST (arg
);
7748 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7750 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7753 real_round (&r
, TYPE_MODE (type
), &x
);
7754 return build_real (type
, r
);
7758 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7761 /* Fold function call to builtin lround, lroundf or lroundl (or the
7762 corresponding long long versions) and other rounding functions. ARG
7763 is the argument to the call. Return NULL_TREE if no simplification
7767 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
7769 if (!validate_arg (arg
, REAL_TYPE
))
7772 /* Optimize lround of constant value. */
7773 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7775 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7777 if (! REAL_VALUE_ISNAN (x
) && ! REAL_VALUE_ISINF (x
))
7779 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7780 tree ftype
= TREE_TYPE (arg
);
7781 unsigned HOST_WIDE_INT lo2
;
7782 HOST_WIDE_INT hi
, lo
;
7785 switch (DECL_FUNCTION_CODE (fndecl
))
7787 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7788 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7789 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7792 CASE_FLT_FN (BUILT_IN_LCEIL
):
7793 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7794 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7797 CASE_FLT_FN (BUILT_IN_LROUND
):
7798 CASE_FLT_FN (BUILT_IN_LLROUND
):
7799 real_round (&r
, TYPE_MODE (ftype
), &x
);
7806 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7807 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
7808 return build_int_cst_wide (itype
, lo2
, hi
);
7812 switch (DECL_FUNCTION_CODE (fndecl
))
7814 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7815 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7816 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7817 if (tree_expr_nonnegative_p (arg
))
7818 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
7824 return fold_fixed_mathfn (fndecl
, arg
);
7827 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7828 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7829 the argument to the call. Return NULL_TREE if no simplification can
7833 fold_builtin_bitop (tree fndecl
, tree arg
)
7835 if (!validate_arg (arg
, INTEGER_TYPE
))
7838 /* Optimize for constant argument. */
7839 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7841 HOST_WIDE_INT hi
, width
, result
;
7842 unsigned HOST_WIDE_INT lo
;
7845 type
= TREE_TYPE (arg
);
7846 width
= TYPE_PRECISION (type
);
7847 lo
= TREE_INT_CST_LOW (arg
);
7849 /* Clear all the bits that are beyond the type's precision. */
7850 if (width
> HOST_BITS_PER_WIDE_INT
)
7852 hi
= TREE_INT_CST_HIGH (arg
);
7853 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7854 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7859 if (width
< HOST_BITS_PER_WIDE_INT
)
7860 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7863 switch (DECL_FUNCTION_CODE (fndecl
))
7865 CASE_INT_FN (BUILT_IN_FFS
):
7867 result
= exact_log2 (lo
& -lo
) + 1;
7869 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
7874 CASE_INT_FN (BUILT_IN_CLZ
):
7876 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7878 result
= width
- floor_log2 (lo
) - 1;
7879 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7883 CASE_INT_FN (BUILT_IN_CTZ
):
7885 result
= exact_log2 (lo
& -lo
);
7887 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
7888 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7892 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7895 result
++, lo
&= lo
- 1;
7897 result
++, hi
&= hi
- 1;
7900 CASE_INT_FN (BUILT_IN_PARITY
):
7903 result
++, lo
&= lo
- 1;
7905 result
++, hi
&= hi
- 1;
7913 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7919 /* Fold function call to builtin_bswap and the long and long long
7920 variants. Return NULL_TREE if no simplification can be made. */
7922 fold_builtin_bswap (tree fndecl
, tree arg
)
7924 if (! validate_arg (arg
, INTEGER_TYPE
))
7927 /* Optimize constant value. */
7928 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7930 HOST_WIDE_INT hi
, width
, r_hi
= 0;
7931 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
7934 type
= TREE_TYPE (arg
);
7935 width
= TYPE_PRECISION (type
);
7936 lo
= TREE_INT_CST_LOW (arg
);
7937 hi
= TREE_INT_CST_HIGH (arg
);
7939 switch (DECL_FUNCTION_CODE (fndecl
))
7941 case BUILT_IN_BSWAP32
:
7942 case BUILT_IN_BSWAP64
:
7946 for (s
= 0; s
< width
; s
+= 8)
7948 int d
= width
- s
- 8;
7949 unsigned HOST_WIDE_INT byte
;
7951 if (s
< HOST_BITS_PER_WIDE_INT
)
7952 byte
= (lo
>> s
) & 0xff;
7954 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
7956 if (d
< HOST_BITS_PER_WIDE_INT
)
7959 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
7969 if (width
< HOST_BITS_PER_WIDE_INT
)
7970 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
7972 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
7978 /* Return true if EXPR is the real constant contained in VALUE. */
7981 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
7985 return ((TREE_CODE (expr
) == REAL_CST
7986 && !TREE_OVERFLOW (expr
)
7987 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
7988 || (TREE_CODE (expr
) == COMPLEX_CST
7989 && real_dconstp (TREE_REALPART (expr
), value
)
7990 && real_zerop (TREE_IMAGPART (expr
))));
7993 /* A subroutine of fold_builtin to fold the various logarithmic
7994 functions. Return NULL_TREE if no simplification can me made.
7995 FUNC is the corresponding MPFR logarithm function. */
7998 fold_builtin_logarithm (tree fndecl
, tree arg
,
7999 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8001 if (validate_arg (arg
, REAL_TYPE
))
8003 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8005 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8007 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8008 instead we'll look for 'e' truncated to MODE. So only do
8009 this if flag_unsafe_math_optimizations is set. */
8010 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
8012 const REAL_VALUE_TYPE e_truncated
=
8013 real_value_truncate (TYPE_MODE (type
), dconste
);
8014 if (real_dconstp (arg
, &e_truncated
))
8015 return build_real (type
, dconst1
);
8018 /* Calculate the result when the argument is a constant. */
8019 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8022 /* Special case, optimize logN(expN(x)) = x. */
8023 if (flag_unsafe_math_optimizations
8024 && ((func
== mpfr_log
8025 && (fcode
== BUILT_IN_EXP
8026 || fcode
== BUILT_IN_EXPF
8027 || fcode
== BUILT_IN_EXPL
))
8028 || (func
== mpfr_log2
8029 && (fcode
== BUILT_IN_EXP2
8030 || fcode
== BUILT_IN_EXP2F
8031 || fcode
== BUILT_IN_EXP2L
))
8032 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8033 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8035 /* Optimize logN(func()) for various exponential functions. We
8036 want to determine the value "x" and the power "exponent" in
8037 order to transform logN(x**exponent) into exponent*logN(x). */
8038 if (flag_unsafe_math_optimizations
)
8040 tree exponent
= 0, x
= 0;
8044 CASE_FLT_FN (BUILT_IN_EXP
):
8045 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8046 x
= build_real (type
,
8047 real_value_truncate (TYPE_MODE (type
), dconste
));
8048 exponent
= CALL_EXPR_ARG (arg
, 0);
8050 CASE_FLT_FN (BUILT_IN_EXP2
):
8051 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8052 x
= build_real (type
, dconst2
);
8053 exponent
= CALL_EXPR_ARG (arg
, 0);
8055 CASE_FLT_FN (BUILT_IN_EXP10
):
8056 CASE_FLT_FN (BUILT_IN_POW10
):
8057 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8058 x
= build_real (type
, dconst10
);
8059 exponent
= CALL_EXPR_ARG (arg
, 0);
8061 CASE_FLT_FN (BUILT_IN_SQRT
):
8062 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8063 x
= CALL_EXPR_ARG (arg
, 0);
8064 exponent
= build_real (type
, dconsthalf
);
8066 CASE_FLT_FN (BUILT_IN_CBRT
):
8067 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8068 x
= CALL_EXPR_ARG (arg
, 0);
8069 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8072 CASE_FLT_FN (BUILT_IN_POW
):
8073 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8074 x
= CALL_EXPR_ARG (arg
, 0);
8075 exponent
= CALL_EXPR_ARG (arg
, 1);
8081 /* Now perform the optimization. */
8084 tree logfn
= build_call_expr (fndecl
, 1, x
);
8085 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8093 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8094 NULL_TREE if no simplification can be made. */
8097 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8099 tree res
, narg0
, narg1
;
8101 if (!validate_arg (arg0
, REAL_TYPE
)
8102 || !validate_arg (arg1
, REAL_TYPE
))
8105 /* Calculate the result when the argument is a constant. */
8106 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8109 /* If either argument to hypot has a negate or abs, strip that off.
8110 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8111 narg0
= fold_strip_sign_ops (arg0
);
8112 narg1
= fold_strip_sign_ops (arg1
);
8115 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8116 narg1
? narg1
: arg1
);
8119 /* If either argument is zero, hypot is fabs of the other. */
8120 if (real_zerop (arg0
))
8121 return fold_build1 (ABS_EXPR
, type
, arg1
);
8122 else if (real_zerop (arg1
))
8123 return fold_build1 (ABS_EXPR
, type
, arg0
);
8125 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8126 if (flag_unsafe_math_optimizations
8127 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8129 const REAL_VALUE_TYPE sqrt2_trunc
8130 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
8131 return fold_build2 (MULT_EXPR
, type
,
8132 fold_build1 (ABS_EXPR
, type
, arg0
),
8133 build_real (type
, sqrt2_trunc
));
8140 /* Fold a builtin function call to pow, powf, or powl. Return
8141 NULL_TREE if no simplification can be made. */
8143 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8147 if (!validate_arg (arg0
, REAL_TYPE
)
8148 || !validate_arg (arg1
, REAL_TYPE
))
8151 /* Calculate the result when the argument is a constant. */
8152 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8155 /* Optimize pow(1.0,y) = 1.0. */
8156 if (real_onep (arg0
))
8157 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8159 if (TREE_CODE (arg1
) == REAL_CST
8160 && !TREE_OVERFLOW (arg1
))
8162 REAL_VALUE_TYPE cint
;
8166 c
= TREE_REAL_CST (arg1
);
8168 /* Optimize pow(x,0.0) = 1.0. */
8169 if (REAL_VALUES_EQUAL (c
, dconst0
))
8170 return omit_one_operand (type
, build_real (type
, dconst1
),
8173 /* Optimize pow(x,1.0) = x. */
8174 if (REAL_VALUES_EQUAL (c
, dconst1
))
8177 /* Optimize pow(x,-1.0) = 1.0/x. */
8178 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8179 return fold_build2 (RDIV_EXPR
, type
,
8180 build_real (type
, dconst1
), arg0
);
8182 /* Optimize pow(x,0.5) = sqrt(x). */
8183 if (flag_unsafe_math_optimizations
8184 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8186 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8188 if (sqrtfn
!= NULL_TREE
)
8189 return build_call_expr (sqrtfn
, 1, arg0
);
8192 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8193 if (flag_unsafe_math_optimizations
)
8195 const REAL_VALUE_TYPE dconstroot
8196 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8198 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8200 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8201 if (cbrtfn
!= NULL_TREE
)
8202 return build_call_expr (cbrtfn
, 1, arg0
);
8206 /* Check for an integer exponent. */
8207 n
= real_to_integer (&c
);
8208 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8209 if (real_identical (&c
, &cint
))
8211 /* Attempt to evaluate pow at compile-time. */
8212 if (TREE_CODE (arg0
) == REAL_CST
8213 && !TREE_OVERFLOW (arg0
))
8218 x
= TREE_REAL_CST (arg0
);
8219 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8220 if (flag_unsafe_math_optimizations
|| !inexact
)
8221 return build_real (type
, x
);
8224 /* Strip sign ops from even integer powers. */
8225 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8227 tree narg0
= fold_strip_sign_ops (arg0
);
8229 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8234 if (flag_unsafe_math_optimizations
)
8236 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8238 /* Optimize pow(expN(x),y) = expN(x*y). */
8239 if (BUILTIN_EXPONENT_P (fcode
))
8241 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8242 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8243 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8244 return build_call_expr (expfn
, 1, arg
);
8247 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8248 if (BUILTIN_SQRT_P (fcode
))
8250 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8251 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8252 build_real (type
, dconsthalf
));
8253 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8256 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8257 if (BUILTIN_CBRT_P (fcode
))
8259 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8260 if (tree_expr_nonnegative_p (arg
))
8262 const REAL_VALUE_TYPE dconstroot
8263 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8264 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8265 build_real (type
, dconstroot
));
8266 return build_call_expr (fndecl
, 2, arg
, narg1
);
8270 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8271 if (fcode
== BUILT_IN_POW
8272 || fcode
== BUILT_IN_POWF
8273 || fcode
== BUILT_IN_POWL
)
8275 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8276 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8277 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8278 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8285 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8286 Return NULL_TREE if no simplification can be made. */
8288 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8289 tree arg0
, tree arg1
, tree type
)
8291 if (!validate_arg (arg0
, REAL_TYPE
)
8292 || !validate_arg (arg1
, INTEGER_TYPE
))
8295 /* Optimize pow(1.0,y) = 1.0. */
8296 if (real_onep (arg0
))
8297 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8299 if (host_integerp (arg1
, 0))
8301 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8303 /* Evaluate powi at compile-time. */
8304 if (TREE_CODE (arg0
) == REAL_CST
8305 && !TREE_OVERFLOW (arg0
))
8308 x
= TREE_REAL_CST (arg0
);
8309 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8310 return build_real (type
, x
);
8313 /* Optimize pow(x,0) = 1.0. */
8315 return omit_one_operand (type
, build_real (type
, dconst1
),
8318 /* Optimize pow(x,1) = x. */
8322 /* Optimize pow(x,-1) = 1.0/x. */
8324 return fold_build2 (RDIV_EXPR
, type
,
8325 build_real (type
, dconst1
), arg0
);
8331 /* A subroutine of fold_builtin to fold the various exponent
8332 functions. Return NULL_TREE if no simplification can be made.
8333 FUNC is the corresponding MPFR exponent function. */
8336 fold_builtin_exponent (tree fndecl
, tree arg
,
8337 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8339 if (validate_arg (arg
, REAL_TYPE
))
8341 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8344 /* Calculate the result when the argument is a constant. */
8345 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8348 /* Optimize expN(logN(x)) = x. */
8349 if (flag_unsafe_math_optimizations
)
8351 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8353 if ((func
== mpfr_exp
8354 && (fcode
== BUILT_IN_LOG
8355 || fcode
== BUILT_IN_LOGF
8356 || fcode
== BUILT_IN_LOGL
))
8357 || (func
== mpfr_exp2
8358 && (fcode
== BUILT_IN_LOG2
8359 || fcode
== BUILT_IN_LOG2F
8360 || fcode
== BUILT_IN_LOG2L
))
8361 || (func
== mpfr_exp10
8362 && (fcode
== BUILT_IN_LOG10
8363 || fcode
== BUILT_IN_LOG10F
8364 || fcode
== BUILT_IN_LOG10L
)))
8365 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8372 /* Return true if VAR is a VAR_DECL or a component thereof. */
8375 var_decl_component_p (tree var
)
8378 while (handled_component_p (inner
))
8379 inner
= TREE_OPERAND (inner
, 0);
8380 return SSA_VAR_P (inner
);
8383 /* Fold function call to builtin memset. Return
8384 NULL_TREE if no simplification can be made. */
8387 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8390 unsigned HOST_WIDE_INT length
, cval
;
8392 if (! validate_arg (dest
, POINTER_TYPE
)
8393 || ! validate_arg (c
, INTEGER_TYPE
)
8394 || ! validate_arg (len
, INTEGER_TYPE
))
8397 if (! host_integerp (len
, 1))
8400 /* If the LEN parameter is zero, return DEST. */
8401 if (integer_zerop (len
))
8402 return omit_one_operand (type
, dest
, c
);
8404 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8409 if (TREE_CODE (var
) != ADDR_EXPR
)
8412 var
= TREE_OPERAND (var
, 0);
8413 if (TREE_THIS_VOLATILE (var
))
8416 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8417 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8420 if (! var_decl_component_p (var
))
8423 length
= tree_low_cst (len
, 1);
8424 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8425 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8429 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8432 if (integer_zerop (c
))
8436 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8439 cval
= tree_low_cst (c
, 1);
8443 cval
|= (cval
<< 31) << 1;
8446 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8447 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8451 return omit_one_operand (type
, dest
, ret
);
8454 /* Fold function call to builtin memset. Return
8455 NULL_TREE if no simplification can be made. */
8458 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8460 if (! validate_arg (dest
, POINTER_TYPE
)
8461 || ! validate_arg (size
, INTEGER_TYPE
))
8467 /* New argument list transforming bzero(ptr x, int y) to
8468 memset(ptr x, int 0, size_t y). This is done this way
8469 so that if it isn't expanded inline, we fallback to
8470 calling bzero instead of memset. */
8472 return fold_builtin_memset (dest
, integer_zero_node
,
8473 fold_convert (sizetype
, size
),
8474 void_type_node
, ignore
);
8477 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8478 NULL_TREE if no simplification can be made.
8479 If ENDP is 0, return DEST (like memcpy).
8480 If ENDP is 1, return DEST+LEN (like mempcpy).
8481 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8482 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8486 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8488 tree destvar
, srcvar
, expr
;
8490 if (! validate_arg (dest
, POINTER_TYPE
)
8491 || ! validate_arg (src
, POINTER_TYPE
)
8492 || ! validate_arg (len
, INTEGER_TYPE
))
8495 /* If the LEN parameter is zero, return DEST. */
8496 if (integer_zerop (len
))
8497 return omit_one_operand (type
, dest
, src
);
8499 /* If SRC and DEST are the same (and not volatile), return
8500 DEST{,+LEN,+LEN-1}. */
8501 if (operand_equal_p (src
, dest
, 0))
8505 tree srctype
, desttype
;
8508 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8509 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8511 /* Both DEST and SRC must be pointer types.
8512 ??? This is what old code did. Is the testing for pointer types
8515 If either SRC is readonly or length is 1, we can use memcpy. */
8516 if (dest_align
&& src_align
8517 && (readonly_data_expr (src
)
8518 || (host_integerp (len
, 1)
8519 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8520 tree_low_cst (len
, 1)))))
8522 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8525 return build_call_expr (fn
, 3, dest
, src
, len
);
8530 if (!host_integerp (len
, 0))
8533 This logic lose for arguments like (type *)malloc (sizeof (type)),
8534 since we strip the casts of up to VOID return value from malloc.
8535 Perhaps we ought to inherit type from non-VOID argument here? */
8538 srctype
= TREE_TYPE (TREE_TYPE (src
));
8539 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8540 if (!srctype
|| !desttype
8541 || !TYPE_SIZE_UNIT (srctype
)
8542 || !TYPE_SIZE_UNIT (desttype
)
8543 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8544 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8545 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8546 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8549 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8550 < (int) TYPE_ALIGN (desttype
)
8551 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8552 < (int) TYPE_ALIGN (srctype
)))
8556 dest
= builtin_save_expr (dest
);
8558 srcvar
= build_fold_indirect_ref (src
);
8559 if (TREE_THIS_VOLATILE (srcvar
))
8561 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8563 /* With memcpy, it is possible to bypass aliasing rules, so without
8564 this check i. e. execute/20060930-2.c would be misoptimized, because
8565 it use conflicting alias set to hold argument for the memcpy call.
8566 This check is probably unnecesary with -fno-strict-aliasing.
8567 Similarly for destvar. See also PR29286. */
8568 if (!var_decl_component_p (srcvar
)
8569 /* Accept: memcpy (*char_var, "test", 1); that simplify
8571 || is_gimple_min_invariant (srcvar
)
8572 || readonly_data_expr (src
))
8575 destvar
= build_fold_indirect_ref (dest
);
8576 if (TREE_THIS_VOLATILE (destvar
))
8578 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8580 if (!var_decl_component_p (destvar
))
8583 if (srctype
== desttype
8584 || (gimple_in_ssa_p (cfun
)
8585 && tree_ssa_useless_type_conversion_1 (desttype
, srctype
)))
8587 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8588 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8589 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8590 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8591 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8593 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8594 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8600 if (endp
== 0 || endp
== 3)
8601 return omit_one_operand (type
, dest
, expr
);
8607 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8610 len
= fold_convert (TREE_TYPE (dest
), len
);
8611 dest
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8612 dest
= fold_convert (type
, dest
);
8614 dest
= omit_one_operand (type
, dest
, expr
);
8618 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8619 If LEN is not NULL, it represents the length of the string to be
8620 copied. Return NULL_TREE if no simplification can be made. */
8623 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8627 if (!validate_arg (dest
, POINTER_TYPE
)
8628 || !validate_arg (src
, POINTER_TYPE
))
8631 /* If SRC and DEST are the same (and not volatile), return DEST. */
8632 if (operand_equal_p (src
, dest
, 0))
8633 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8638 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8644 len
= c_strlen (src
, 1);
8645 if (! len
|| TREE_SIDE_EFFECTS (len
))
8649 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8650 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8651 build_call_expr (fn
, 3, dest
, src
, len
));
8654 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8655 If SLEN is not NULL, it represents the length of the source string.
8656 Return NULL_TREE if no simplification can be made. */
8659 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8663 if (!validate_arg (dest
, POINTER_TYPE
)
8664 || !validate_arg (src
, POINTER_TYPE
)
8665 || !validate_arg (len
, INTEGER_TYPE
))
8668 /* If the LEN parameter is zero, return DEST. */
8669 if (integer_zerop (len
))
8670 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8672 /* We can't compare slen with len as constants below if len is not a
8674 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8678 slen
= c_strlen (src
, 1);
8680 /* Now, we must be passed a constant src ptr parameter. */
8681 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8684 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8686 /* We do not support simplification of this case, though we do
8687 support it when expanding trees into RTL. */
8688 /* FIXME: generate a call to __builtin_memset. */
8689 if (tree_int_cst_lt (slen
, len
))
8692 /* OK transform into builtin memcpy. */
8693 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8696 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8697 build_call_expr (fn
, 3, dest
, src
, len
));
8700 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8701 arguments to the call, and TYPE is its return type.
8702 Return NULL_TREE if no simplification can be made. */
8705 fold_builtin_memchr (tree arg1
, tree arg2
, tree len
, tree type
)
8707 if (!validate_arg (arg1
, POINTER_TYPE
)
8708 || !validate_arg (arg2
, INTEGER_TYPE
)
8709 || !validate_arg (len
, INTEGER_TYPE
))
8715 if (TREE_CODE (arg2
) != INTEGER_CST
8716 || !host_integerp (len
, 1))
8719 p1
= c_getstr (arg1
);
8720 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8726 if (target_char_cast (arg2
, &c
))
8729 r
= memchr (p1
, c
, tree_low_cst (len
, 1));
8732 return build_int_cst (TREE_TYPE (arg1
), 0);
8734 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8735 build_int_cst (TREE_TYPE (arg1
), r
- p1
));
8736 return fold_convert (type
, tem
);
8742 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8743 Return NULL_TREE if no simplification can be made. */
8746 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
8748 const char *p1
, *p2
;
8750 if (!validate_arg (arg1
, POINTER_TYPE
)
8751 || !validate_arg (arg2
, POINTER_TYPE
)
8752 || !validate_arg (len
, INTEGER_TYPE
))
8755 /* If the LEN parameter is zero, return zero. */
8756 if (integer_zerop (len
))
8757 return omit_two_operands (integer_type_node
, integer_zero_node
,
8760 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8761 if (operand_equal_p (arg1
, arg2
, 0))
8762 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8764 p1
= c_getstr (arg1
);
8765 p2
= c_getstr (arg2
);
8767 /* If all arguments are constant, and the value of len is not greater
8768 than the lengths of arg1 and arg2, evaluate at compile-time. */
8769 if (host_integerp (len
, 1) && p1
&& p2
8770 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8771 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8773 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8776 return integer_one_node
;
8778 return integer_minus_one_node
;
8780 return integer_zero_node
;
8783 /* If len parameter is one, return an expression corresponding to
8784 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8785 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8787 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8788 tree cst_uchar_ptr_node
8789 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8791 tree ind1
= fold_convert (integer_type_node
,
8792 build1 (INDIRECT_REF
, cst_uchar_node
,
8793 fold_convert (cst_uchar_ptr_node
,
8795 tree ind2
= fold_convert (integer_type_node
,
8796 build1 (INDIRECT_REF
, cst_uchar_node
,
8797 fold_convert (cst_uchar_ptr_node
,
8799 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8805 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8806 Return NULL_TREE if no simplification can be made. */
8809 fold_builtin_strcmp (tree arg1
, tree arg2
)
8811 const char *p1
, *p2
;
8813 if (!validate_arg (arg1
, POINTER_TYPE
)
8814 || !validate_arg (arg2
, POINTER_TYPE
))
8817 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8818 if (operand_equal_p (arg1
, arg2
, 0))
8819 return integer_zero_node
;
8821 p1
= c_getstr (arg1
);
8822 p2
= c_getstr (arg2
);
8826 const int i
= strcmp (p1
, p2
);
8828 return integer_minus_one_node
;
8830 return integer_one_node
;
8832 return integer_zero_node
;
8835 /* If the second arg is "", return *(const unsigned char*)arg1. */
8836 if (p2
&& *p2
== '\0')
8838 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8839 tree cst_uchar_ptr_node
8840 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8842 return fold_convert (integer_type_node
,
8843 build1 (INDIRECT_REF
, cst_uchar_node
,
8844 fold_convert (cst_uchar_ptr_node
,
8848 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8849 if (p1
&& *p1
== '\0')
8851 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8852 tree cst_uchar_ptr_node
8853 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8855 tree temp
= fold_convert (integer_type_node
,
8856 build1 (INDIRECT_REF
, cst_uchar_node
,
8857 fold_convert (cst_uchar_ptr_node
,
8859 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8865 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8866 Return NULL_TREE if no simplification can be made. */
8869 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
8871 const char *p1
, *p2
;
8873 if (!validate_arg (arg1
, POINTER_TYPE
)
8874 || !validate_arg (arg2
, POINTER_TYPE
)
8875 || !validate_arg (len
, INTEGER_TYPE
))
8878 /* If the LEN parameter is zero, return zero. */
8879 if (integer_zerop (len
))
8880 return omit_two_operands (integer_type_node
, integer_zero_node
,
8883 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8884 if (operand_equal_p (arg1
, arg2
, 0))
8885 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8887 p1
= c_getstr (arg1
);
8888 p2
= c_getstr (arg2
);
8890 if (host_integerp (len
, 1) && p1
&& p2
)
8892 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
8894 return integer_one_node
;
8896 return integer_minus_one_node
;
8898 return integer_zero_node
;
8901 /* If the second arg is "", and the length is greater than zero,
8902 return *(const unsigned char*)arg1. */
8903 if (p2
&& *p2
== '\0'
8904 && TREE_CODE (len
) == INTEGER_CST
8905 && tree_int_cst_sgn (len
) == 1)
8907 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8908 tree cst_uchar_ptr_node
8909 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8911 return fold_convert (integer_type_node
,
8912 build1 (INDIRECT_REF
, cst_uchar_node
,
8913 fold_convert (cst_uchar_ptr_node
,
8917 /* If the first arg is "", and the length is greater than zero,
8918 return -*(const unsigned char*)arg2. */
8919 if (p1
&& *p1
== '\0'
8920 && TREE_CODE (len
) == INTEGER_CST
8921 && tree_int_cst_sgn (len
) == 1)
8923 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8924 tree cst_uchar_ptr_node
8925 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8927 tree temp
= fold_convert (integer_type_node
,
8928 build1 (INDIRECT_REF
, cst_uchar_node
,
8929 fold_convert (cst_uchar_ptr_node
,
8931 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8934 /* If len parameter is one, return an expression corresponding to
8935 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8936 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8938 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8939 tree cst_uchar_ptr_node
8940 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8942 tree ind1
= fold_convert (integer_type_node
,
8943 build1 (INDIRECT_REF
, cst_uchar_node
,
8944 fold_convert (cst_uchar_ptr_node
,
8946 tree ind2
= fold_convert (integer_type_node
,
8947 build1 (INDIRECT_REF
, cst_uchar_node
,
8948 fold_convert (cst_uchar_ptr_node
,
8950 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8956 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8957 ARG. Return NULL_TREE if no simplification can be made. */
8960 fold_builtin_signbit (tree arg
, tree type
)
8964 if (!validate_arg (arg
, REAL_TYPE
))
8967 /* If ARG is a compile-time constant, determine the result. */
8968 if (TREE_CODE (arg
) == REAL_CST
8969 && !TREE_OVERFLOW (arg
))
8973 c
= TREE_REAL_CST (arg
);
8974 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
8975 return fold_convert (type
, temp
);
8978 /* If ARG is non-negative, the result is always zero. */
8979 if (tree_expr_nonnegative_p (arg
))
8980 return omit_one_operand (type
, integer_zero_node
, arg
);
8982 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8983 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8984 return fold_build2 (LT_EXPR
, type
, arg
,
8985 build_real (TREE_TYPE (arg
), dconst0
));
8990 /* Fold function call to builtin copysign, copysignf or copysignl with
8991 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8995 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
8999 if (!validate_arg (arg1
, REAL_TYPE
)
9000 || !validate_arg (arg2
, REAL_TYPE
))
9003 /* copysign(X,X) is X. */
9004 if (operand_equal_p (arg1
, arg2
, 0))
9005 return fold_convert (type
, arg1
);
9007 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9008 if (TREE_CODE (arg1
) == REAL_CST
9009 && TREE_CODE (arg2
) == REAL_CST
9010 && !TREE_OVERFLOW (arg1
)
9011 && !TREE_OVERFLOW (arg2
))
9013 REAL_VALUE_TYPE c1
, c2
;
9015 c1
= TREE_REAL_CST (arg1
);
9016 c2
= TREE_REAL_CST (arg2
);
9017 /* c1.sign := c2.sign. */
9018 real_copysign (&c1
, &c2
);
9019 return build_real (type
, c1
);
9022 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9023 Remember to evaluate Y for side-effects. */
9024 if (tree_expr_nonnegative_p (arg2
))
9025 return omit_one_operand (type
,
9026 fold_build1 (ABS_EXPR
, type
, arg1
),
9029 /* Strip sign changing operations for the first argument. */
9030 tem
= fold_strip_sign_ops (arg1
);
9032 return build_call_expr (fndecl
, 2, tem
, arg2
);
9037 /* Fold a call to builtin isascii with argument ARG. */
9040 fold_builtin_isascii (tree arg
)
9042 if (!validate_arg (arg
, INTEGER_TYPE
))
9046 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9047 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9048 build_int_cst (NULL_TREE
,
9049 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9050 return fold_build2 (EQ_EXPR
, integer_type_node
,
9051 arg
, integer_zero_node
);
9055 /* Fold a call to builtin toascii with argument ARG. */
9058 fold_builtin_toascii (tree arg
)
9060 if (!validate_arg (arg
, INTEGER_TYPE
))
9063 /* Transform toascii(c) -> (c & 0x7f). */
9064 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9065 build_int_cst (NULL_TREE
, 0x7f));
9068 /* Fold a call to builtin isdigit with argument ARG. */
9071 fold_builtin_isdigit (tree arg
)
9073 if (!validate_arg (arg
, INTEGER_TYPE
))
9077 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9078 /* According to the C standard, isdigit is unaffected by locale.
9079 However, it definitely is affected by the target character set. */
9080 unsigned HOST_WIDE_INT target_digit0
9081 = lang_hooks
.to_target_charset ('0');
9083 if (target_digit0
== 0)
9086 arg
= fold_convert (unsigned_type_node
, arg
);
9087 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9088 build_int_cst (unsigned_type_node
, target_digit0
));
9089 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9090 build_int_cst (unsigned_type_node
, 9));
9094 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9097 fold_builtin_fabs (tree arg
, tree type
)
9099 if (!validate_arg (arg
, REAL_TYPE
))
9102 arg
= fold_convert (type
, arg
);
9103 if (TREE_CODE (arg
) == REAL_CST
)
9104 return fold_abs_const (arg
, type
);
9105 return fold_build1 (ABS_EXPR
, type
, arg
);
9108 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9111 fold_builtin_abs (tree arg
, tree type
)
9113 if (!validate_arg (arg
, INTEGER_TYPE
))
9116 arg
= fold_convert (type
, arg
);
9117 if (TREE_CODE (arg
) == INTEGER_CST
)
9118 return fold_abs_const (arg
, type
);
9119 return fold_build1 (ABS_EXPR
, type
, arg
);
9122 /* Fold a call to builtin fmin or fmax. */
9125 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9127 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9129 /* Calculate the result when the argument is a constant. */
9130 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9135 /* If either argument is NaN, return the other one. Avoid the
9136 transformation if we get (and honor) a signalling NaN. Using
9137 omit_one_operand() ensures we create a non-lvalue. */
9138 if (TREE_CODE (arg0
) == REAL_CST
9139 && real_isnan (&TREE_REAL_CST (arg0
))
9140 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9141 || ! TREE_REAL_CST (arg0
).signalling
))
9142 return omit_one_operand (type
, arg1
, arg0
);
9143 if (TREE_CODE (arg1
) == REAL_CST
9144 && real_isnan (&TREE_REAL_CST (arg1
))
9145 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9146 || ! TREE_REAL_CST (arg1
).signalling
))
9147 return omit_one_operand (type
, arg0
, arg1
);
9149 /* Transform fmin/fmax(x,x) -> x. */
9150 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9151 return omit_one_operand (type
, arg0
, arg1
);
9153 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9154 functions to return the numeric arg if the other one is NaN.
9155 These tree codes don't honor that, so only transform if
9156 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9157 handled, so we don't have to worry about it either. */
9158 if (flag_finite_math_only
)
9159 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9160 fold_convert (type
, arg0
),
9161 fold_convert (type
, arg1
));
9166 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9169 fold_builtin_carg (tree arg
, tree type
)
9171 if (validate_arg (arg
, COMPLEX_TYPE
))
9173 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9177 tree new_arg
= builtin_save_expr (arg
);
9178 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9179 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9180 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9187 /* Fold a call to builtin logb/ilogb. */
9190 fold_builtin_logb (tree arg
, tree rettype
)
9192 if (! validate_arg (arg
, REAL_TYPE
))
9197 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9199 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9205 /* If arg is Inf or NaN and we're logb, return it. */
9206 if (TREE_CODE (rettype
) == REAL_TYPE
)
9207 return fold_convert (rettype
, arg
);
9208 /* Fall through... */
9210 /* Zero may set errno and/or raise an exception for logb, also
9211 for ilogb we don't know FP_ILOGB0. */
9214 /* For normal numbers, proceed iff radix == 2. In GCC,
9215 normalized significands are in the range [0.5, 1.0). We
9216 want the exponent as if they were [1.0, 2.0) so get the
9217 exponent and subtract 1. */
9218 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9219 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9220 REAL_EXP (value
)-1));
9228 /* Fold a call to builtin significand, if radix == 2. */
9231 fold_builtin_significand (tree arg
, tree rettype
)
9233 if (! validate_arg (arg
, REAL_TYPE
))
9238 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9240 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9247 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9248 return fold_convert (rettype
, arg
);
9250 /* For normal numbers, proceed iff radix == 2. */
9251 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9253 REAL_VALUE_TYPE result
= *value
;
9254 /* In GCC, normalized significands are in the range [0.5,
9255 1.0). We want them to be [1.0, 2.0) so set the
9257 SET_REAL_EXP (&result
, 1);
9258 return build_real (rettype
, result
);
9267 /* Fold a call to builtin frexp, we can assume the base is 2. */
9270 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9272 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9277 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9280 arg1
= build_fold_indirect_ref (arg1
);
9282 /* Proceed if a valid pointer type was passed in. */
9283 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9285 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9291 /* For +-0, return (*exp = 0, +-0). */
9292 exp
= integer_zero_node
;
9297 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9298 return omit_one_operand (rettype
, arg0
, arg1
);
9301 /* Since the frexp function always expects base 2, and in
9302 GCC normalized significands are already in the range
9303 [0.5, 1.0), we have exactly what frexp wants. */
9304 REAL_VALUE_TYPE frac_rvt
= *value
;
9305 SET_REAL_EXP (&frac_rvt
, 0);
9306 frac
= build_real (rettype
, frac_rvt
);
9307 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9314 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9315 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9316 TREE_SIDE_EFFECTS (arg1
) = 1;
9317 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9323 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9324 then we can assume the base is two. If it's false, then we have to
9325 check the mode of the TYPE parameter in certain cases. */
9328 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9330 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9335 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9336 if (real_zerop (arg0
) || integer_zerop (arg1
)
9337 || (TREE_CODE (arg0
) == REAL_CST
9338 && (real_isnan (&TREE_REAL_CST (arg0
))
9339 || real_isinf (&TREE_REAL_CST (arg0
)))))
9340 return omit_one_operand (type
, arg0
, arg1
);
9342 /* If both arguments are constant, then try to evaluate it. */
9343 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9344 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9345 && host_integerp (arg1
, 0))
9347 /* Bound the maximum adjustment to twice the range of the
9348 mode's valid exponents. Use abs to ensure the range is
9349 positive as a sanity check. */
9350 const long max_exp_adj
= 2 *
9351 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9352 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9354 /* Get the user-requested adjustment. */
9355 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9357 /* The requested adjustment must be inside this range. This
9358 is a preliminary cap to avoid things like overflow, we
9359 may still fail to compute the result for other reasons. */
9360 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9362 REAL_VALUE_TYPE initial_result
;
9364 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9366 /* Ensure we didn't overflow. */
9367 if (! real_isinf (&initial_result
))
9369 const REAL_VALUE_TYPE trunc_result
9370 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9372 /* Only proceed if the target mode can hold the
9374 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9375 return build_real (type
, trunc_result
);
9384 /* Fold a call to builtin modf. */
9387 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9389 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9394 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9397 arg1
= build_fold_indirect_ref (arg1
);
9399 /* Proceed if a valid pointer type was passed in. */
9400 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9402 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9403 REAL_VALUE_TYPE trunc
, frac
;
9409 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9410 trunc
= frac
= *value
;
9413 /* For +-Inf, return (*arg1 = arg0, +-0). */
9415 frac
.sign
= value
->sign
;
9419 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9420 real_trunc (&trunc
, VOIDmode
, value
);
9421 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9422 /* If the original number was negative and already
9423 integral, then the fractional part is -0.0. */
9424 if (value
->sign
&& frac
.cl
== rvc_zero
)
9425 frac
.sign
= value
->sign
;
9429 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9430 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9431 build_real (rettype
, trunc
));
9432 TREE_SIDE_EFFECTS (arg1
) = 1;
9433 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9434 build_real (rettype
, frac
));
9440 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9441 ARG is the argument for the call. */
9444 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9446 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9449 if (!validate_arg (arg
, REAL_TYPE
))
9451 error ("non-floating-point argument to function %qs",
9452 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9453 return error_mark_node
;
9456 switch (builtin_index
)
9458 case BUILT_IN_ISINF
:
9459 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9460 return omit_one_operand (type
, integer_zero_node
, arg
);
9462 if (TREE_CODE (arg
) == REAL_CST
)
9464 r
= TREE_REAL_CST (arg
);
9465 if (real_isinf (&r
))
9466 return real_compare (GT_EXPR
, &r
, &dconst0
)
9467 ? integer_one_node
: integer_minus_one_node
;
9469 return integer_zero_node
;
9474 case BUILT_IN_FINITE
:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9476 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9477 return omit_one_operand (type
, integer_one_node
, arg
);
9479 if (TREE_CODE (arg
) == REAL_CST
)
9481 r
= TREE_REAL_CST (arg
);
9482 return real_isinf (&r
) || real_isnan (&r
)
9483 ? integer_zero_node
: integer_one_node
;
9488 case BUILT_IN_ISNAN
:
9489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9490 return omit_one_operand (type
, integer_zero_node
, arg
);
9492 if (TREE_CODE (arg
) == REAL_CST
)
9494 r
= TREE_REAL_CST (arg
);
9495 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9498 arg
= builtin_save_expr (arg
);
9499 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9506 /* Fold a call to an unordered comparison function such as
9507 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9508 being called and ARG0 and ARG1 are the arguments for the call.
9509 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9510 the opposite of the desired result. UNORDERED_CODE is used
9511 for modes that can hold NaNs and ORDERED_CODE is used for
9515 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9516 enum tree_code unordered_code
,
9517 enum tree_code ordered_code
)
9519 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9520 enum tree_code code
;
9522 enum tree_code code0
, code1
;
9523 tree cmp_type
= NULL_TREE
;
9525 type0
= TREE_TYPE (arg0
);
9526 type1
= TREE_TYPE (arg1
);
9528 code0
= TREE_CODE (type0
);
9529 code1
= TREE_CODE (type1
);
9531 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9532 /* Choose the wider of two real types. */
9533 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9535 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9537 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9541 error ("non-floating-point argument to function %qs",
9542 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9543 return error_mark_node
;
9546 arg0
= fold_convert (cmp_type
, arg0
);
9547 arg1
= fold_convert (cmp_type
, arg1
);
9549 if (unordered_code
== UNORDERED_EXPR
)
9551 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9552 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9553 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9556 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9558 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9559 fold_build2 (code
, type
, arg0
, arg1
));
9562 /* Fold a call to built-in function FNDECL with 0 arguments.
9563 IGNORE is true if the result of the function call is ignored. This
9564 function returns NULL_TREE if no simplification was possible. */
9567 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9569 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9570 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9573 CASE_FLT_FN (BUILT_IN_INF
):
9574 case BUILT_IN_INFD32
:
9575 case BUILT_IN_INFD64
:
9576 case BUILT_IN_INFD128
:
9577 return fold_builtin_inf (type
, true);
9579 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9580 return fold_builtin_inf (type
, false);
9582 case BUILT_IN_CLASSIFY_TYPE
:
9583 return fold_builtin_classify_type (NULL_TREE
);
9591 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9592 IGNORE is true if the result of the function call is ignored. This
9593 function returns NULL_TREE if no simplification was possible. */
9596 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9598 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9599 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9603 case BUILT_IN_CONSTANT_P
:
9605 tree val
= fold_builtin_constant_p (arg0
);
9607 /* Gimplification will pull the CALL_EXPR for the builtin out of
9608 an if condition. When not optimizing, we'll not CSE it back.
9609 To avoid link error types of regressions, return false now. */
9610 if (!val
&& !optimize
)
9611 val
= integer_zero_node
;
9616 case BUILT_IN_CLASSIFY_TYPE
:
9617 return fold_builtin_classify_type (arg0
);
9619 case BUILT_IN_STRLEN
:
9620 return fold_builtin_strlen (arg0
);
9622 CASE_FLT_FN (BUILT_IN_FABS
):
9623 return fold_builtin_fabs (arg0
, type
);
9627 case BUILT_IN_LLABS
:
9628 case BUILT_IN_IMAXABS
:
9629 return fold_builtin_abs (arg0
, type
);
9631 CASE_FLT_FN (BUILT_IN_CONJ
):
9632 if (validate_arg (arg0
, COMPLEX_TYPE
))
9633 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9636 CASE_FLT_FN (BUILT_IN_CREAL
):
9637 if (validate_arg (arg0
, COMPLEX_TYPE
))
9638 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9641 CASE_FLT_FN (BUILT_IN_CIMAG
):
9642 if (validate_arg (arg0
, COMPLEX_TYPE
))
9643 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9646 CASE_FLT_FN (BUILT_IN_CCOS
):
9647 CASE_FLT_FN (BUILT_IN_CCOSH
):
9648 /* These functions are "even", i.e. f(x) == f(-x). */
9649 if (validate_arg (arg0
, COMPLEX_TYPE
))
9651 tree narg
= fold_strip_sign_ops (arg0
);
9653 return build_call_expr (fndecl
, 1, narg
);
9657 CASE_FLT_FN (BUILT_IN_CABS
):
9658 return fold_builtin_cabs (arg0
, type
, fndecl
);
9660 CASE_FLT_FN (BUILT_IN_CARG
):
9661 return fold_builtin_carg (arg0
, type
);
9663 CASE_FLT_FN (BUILT_IN_SQRT
):
9664 return fold_builtin_sqrt (arg0
, type
);
9666 CASE_FLT_FN (BUILT_IN_CBRT
):
9667 return fold_builtin_cbrt (arg0
, type
);
9669 CASE_FLT_FN (BUILT_IN_ASIN
):
9670 if (validate_arg (arg0
, REAL_TYPE
))
9671 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9672 &dconstm1
, &dconst1
, true);
9675 CASE_FLT_FN (BUILT_IN_ACOS
):
9676 if (validate_arg (arg0
, REAL_TYPE
))
9677 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9678 &dconstm1
, &dconst1
, true);
9681 CASE_FLT_FN (BUILT_IN_ATAN
):
9682 if (validate_arg (arg0
, REAL_TYPE
))
9683 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9686 CASE_FLT_FN (BUILT_IN_ASINH
):
9687 if (validate_arg (arg0
, REAL_TYPE
))
9688 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9691 CASE_FLT_FN (BUILT_IN_ACOSH
):
9692 if (validate_arg (arg0
, REAL_TYPE
))
9693 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9694 &dconst1
, NULL
, true);
9697 CASE_FLT_FN (BUILT_IN_ATANH
):
9698 if (validate_arg (arg0
, REAL_TYPE
))
9699 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9700 &dconstm1
, &dconst1
, false);
9703 CASE_FLT_FN (BUILT_IN_SIN
):
9704 if (validate_arg (arg0
, REAL_TYPE
))
9705 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9708 CASE_FLT_FN (BUILT_IN_COS
):
9709 return fold_builtin_cos (arg0
, type
, fndecl
);
9712 CASE_FLT_FN (BUILT_IN_TAN
):
9713 return fold_builtin_tan (arg0
, type
);
9715 CASE_FLT_FN (BUILT_IN_CEXP
):
9716 return fold_builtin_cexp (arg0
, type
);
9718 CASE_FLT_FN (BUILT_IN_CEXPI
):
9719 if (validate_arg (arg0
, REAL_TYPE
))
9720 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9723 CASE_FLT_FN (BUILT_IN_SINH
):
9724 if (validate_arg (arg0
, REAL_TYPE
))
9725 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9728 CASE_FLT_FN (BUILT_IN_COSH
):
9729 return fold_builtin_cosh (arg0
, type
, fndecl
);
9731 CASE_FLT_FN (BUILT_IN_TANH
):
9732 if (validate_arg (arg0
, REAL_TYPE
))
9733 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9736 CASE_FLT_FN (BUILT_IN_ERF
):
9737 if (validate_arg (arg0
, REAL_TYPE
))
9738 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9741 CASE_FLT_FN (BUILT_IN_ERFC
):
9742 if (validate_arg (arg0
, REAL_TYPE
))
9743 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9746 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9747 if (validate_arg (arg0
, REAL_TYPE
))
9748 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9751 CASE_FLT_FN (BUILT_IN_EXP
):
9752 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
9754 CASE_FLT_FN (BUILT_IN_EXP2
):
9755 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
9757 CASE_FLT_FN (BUILT_IN_EXP10
):
9758 CASE_FLT_FN (BUILT_IN_POW10
):
9759 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
9761 CASE_FLT_FN (BUILT_IN_EXPM1
):
9762 if (validate_arg (arg0
, REAL_TYPE
))
9763 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9766 CASE_FLT_FN (BUILT_IN_LOG
):
9767 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
9769 CASE_FLT_FN (BUILT_IN_LOG2
):
9770 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
9772 CASE_FLT_FN (BUILT_IN_LOG10
):
9773 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
9775 CASE_FLT_FN (BUILT_IN_LOG1P
):
9776 if (validate_arg (arg0
, REAL_TYPE
))
9777 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9778 &dconstm1
, NULL
, false);
9781 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9782 CASE_FLT_FN (BUILT_IN_J0
):
9783 if (validate_arg (arg0
, REAL_TYPE
))
9784 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9788 CASE_FLT_FN (BUILT_IN_J1
):
9789 if (validate_arg (arg0
, REAL_TYPE
))
9790 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9794 CASE_FLT_FN (BUILT_IN_Y0
):
9795 if (validate_arg (arg0
, REAL_TYPE
))
9796 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9797 &dconst0
, NULL
, false);
9800 CASE_FLT_FN (BUILT_IN_Y1
):
9801 if (validate_arg (arg0
, REAL_TYPE
))
9802 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9803 &dconst0
, NULL
, false);
9807 CASE_FLT_FN (BUILT_IN_NAN
):
9808 case BUILT_IN_NAND32
:
9809 case BUILT_IN_NAND64
:
9810 case BUILT_IN_NAND128
:
9811 return fold_builtin_nan (arg0
, type
, true);
9813 CASE_FLT_FN (BUILT_IN_NANS
):
9814 return fold_builtin_nan (arg0
, type
, false);
9816 CASE_FLT_FN (BUILT_IN_FLOOR
):
9817 return fold_builtin_floor (fndecl
, arg0
);
9819 CASE_FLT_FN (BUILT_IN_CEIL
):
9820 return fold_builtin_ceil (fndecl
, arg0
);
9822 CASE_FLT_FN (BUILT_IN_TRUNC
):
9823 return fold_builtin_trunc (fndecl
, arg0
);
9825 CASE_FLT_FN (BUILT_IN_ROUND
):
9826 return fold_builtin_round (fndecl
, arg0
);
9828 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9829 CASE_FLT_FN (BUILT_IN_RINT
):
9830 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
9832 CASE_FLT_FN (BUILT_IN_LCEIL
):
9833 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9834 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9835 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9836 CASE_FLT_FN (BUILT_IN_LROUND
):
9837 CASE_FLT_FN (BUILT_IN_LLROUND
):
9838 return fold_builtin_int_roundingfn (fndecl
, arg0
);
9840 CASE_FLT_FN (BUILT_IN_LRINT
):
9841 CASE_FLT_FN (BUILT_IN_LLRINT
):
9842 return fold_fixed_mathfn (fndecl
, arg0
);
9844 case BUILT_IN_BSWAP32
:
9845 case BUILT_IN_BSWAP64
:
9846 return fold_builtin_bswap (fndecl
, arg0
);
9848 CASE_INT_FN (BUILT_IN_FFS
):
9849 CASE_INT_FN (BUILT_IN_CLZ
):
9850 CASE_INT_FN (BUILT_IN_CTZ
):
9851 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9852 CASE_INT_FN (BUILT_IN_PARITY
):
9853 return fold_builtin_bitop (fndecl
, arg0
);
9855 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9856 return fold_builtin_signbit (arg0
, type
);
9858 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9859 return fold_builtin_significand (arg0
, type
);
9861 CASE_FLT_FN (BUILT_IN_ILOGB
):
9862 CASE_FLT_FN (BUILT_IN_LOGB
):
9863 return fold_builtin_logb (arg0
, type
);
9865 case BUILT_IN_ISASCII
:
9866 return fold_builtin_isascii (arg0
);
9868 case BUILT_IN_TOASCII
:
9869 return fold_builtin_toascii (arg0
);
9871 case BUILT_IN_ISDIGIT
:
9872 return fold_builtin_isdigit (arg0
);
9874 CASE_FLT_FN (BUILT_IN_FINITE
):
9875 case BUILT_IN_FINITED32
:
9876 case BUILT_IN_FINITED64
:
9877 case BUILT_IN_FINITED128
:
9878 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_FINITE
);
9880 CASE_FLT_FN (BUILT_IN_ISINF
):
9881 case BUILT_IN_ISINFD32
:
9882 case BUILT_IN_ISINFD64
:
9883 case BUILT_IN_ISINFD128
:
9884 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
9886 CASE_FLT_FN (BUILT_IN_ISNAN
):
9887 case BUILT_IN_ISNAND32
:
9888 case BUILT_IN_ISNAND64
:
9889 case BUILT_IN_ISNAND128
:
9890 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
9892 case BUILT_IN_PRINTF
:
9893 case BUILT_IN_PRINTF_UNLOCKED
:
9894 case BUILT_IN_VPRINTF
:
9895 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
9905 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9906 IGNORE is true if the result of the function call is ignored. This
9907 function returns NULL_TREE if no simplification was possible. */
9910 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
9912 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9913 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9917 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9918 CASE_FLT_FN (BUILT_IN_JN
):
9919 if (validate_arg (arg0
, INTEGER_TYPE
)
9920 && validate_arg (arg1
, REAL_TYPE
))
9921 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
9924 CASE_FLT_FN (BUILT_IN_YN
):
9925 if (validate_arg (arg0
, INTEGER_TYPE
)
9926 && validate_arg (arg1
, REAL_TYPE
))
9927 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
9931 CASE_FLT_FN (BUILT_IN_DREM
):
9932 CASE_FLT_FN (BUILT_IN_REMAINDER
):
9933 if (validate_arg (arg0
, REAL_TYPE
)
9934 && validate_arg(arg1
, REAL_TYPE
))
9935 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
9938 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9939 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9940 if (validate_arg (arg0
, REAL_TYPE
)
9941 && validate_arg(arg1
, POINTER_TYPE
))
9942 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9946 CASE_FLT_FN (BUILT_IN_ATAN2
):
9947 if (validate_arg (arg0
, REAL_TYPE
)
9948 && validate_arg(arg1
, REAL_TYPE
))
9949 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9952 CASE_FLT_FN (BUILT_IN_FDIM
):
9953 if (validate_arg (arg0
, REAL_TYPE
)
9954 && validate_arg(arg1
, REAL_TYPE
))
9955 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9958 CASE_FLT_FN (BUILT_IN_HYPOT
):
9959 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
9961 CASE_FLT_FN (BUILT_IN_LDEXP
):
9962 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
9963 CASE_FLT_FN (BUILT_IN_SCALBN
):
9964 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9965 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
9967 CASE_FLT_FN (BUILT_IN_FREXP
):
9968 return fold_builtin_frexp (arg0
, arg1
, type
);
9970 CASE_FLT_FN (BUILT_IN_MODF
):
9971 return fold_builtin_modf (arg0
, arg1
, type
);
9973 case BUILT_IN_BZERO
:
9974 return fold_builtin_bzero (arg0
, arg1
, ignore
);
9976 case BUILT_IN_FPUTS
:
9977 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
9979 case BUILT_IN_FPUTS_UNLOCKED
:
9980 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
9982 case BUILT_IN_STRSTR
:
9983 return fold_builtin_strstr (arg0
, arg1
, type
);
9985 case BUILT_IN_STRCAT
:
9986 return fold_builtin_strcat (arg0
, arg1
);
9988 case BUILT_IN_STRSPN
:
9989 return fold_builtin_strspn (arg0
, arg1
);
9991 case BUILT_IN_STRCSPN
:
9992 return fold_builtin_strcspn (arg0
, arg1
);
9994 case BUILT_IN_STRCHR
:
9995 case BUILT_IN_INDEX
:
9996 return fold_builtin_strchr (arg0
, arg1
, type
);
9998 case BUILT_IN_STRRCHR
:
9999 case BUILT_IN_RINDEX
:
10000 return fold_builtin_strrchr (arg0
, arg1
, type
);
10002 case BUILT_IN_STRCPY
:
10003 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
10005 case BUILT_IN_STRCMP
:
10006 return fold_builtin_strcmp (arg0
, arg1
);
10008 case BUILT_IN_STRPBRK
:
10009 return fold_builtin_strpbrk (arg0
, arg1
, type
);
10011 case BUILT_IN_EXPECT
:
10012 return fold_builtin_expect (arg0
);
10014 CASE_FLT_FN (BUILT_IN_POW
):
10015 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
10017 CASE_FLT_FN (BUILT_IN_POWI
):
10018 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
10020 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10021 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
10023 CASE_FLT_FN (BUILT_IN_FMIN
):
10024 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
10026 CASE_FLT_FN (BUILT_IN_FMAX
):
10027 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
10029 case BUILT_IN_ISGREATER
:
10030 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10031 case BUILT_IN_ISGREATEREQUAL
:
10032 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10033 case BUILT_IN_ISLESS
:
10034 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10035 case BUILT_IN_ISLESSEQUAL
:
10036 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10037 case BUILT_IN_ISLESSGREATER
:
10038 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10039 case BUILT_IN_ISUNORDERED
:
10040 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
10043 /* We do the folding for va_start in the expander. */
10044 case BUILT_IN_VA_START
:
10047 case BUILT_IN_SPRINTF
:
10048 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
10050 case BUILT_IN_OBJECT_SIZE
:
10051 return fold_builtin_object_size (arg0
, arg1
);
10053 case BUILT_IN_PRINTF
:
10054 case BUILT_IN_PRINTF_UNLOCKED
:
10055 case BUILT_IN_VPRINTF
:
10056 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
10058 case BUILT_IN_PRINTF_CHK
:
10059 case BUILT_IN_VPRINTF_CHK
:
10060 if (!validate_arg (arg0
, INTEGER_TYPE
)
10061 || TREE_SIDE_EFFECTS (arg0
))
10064 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
10067 case BUILT_IN_FPRINTF
:
10068 case BUILT_IN_FPRINTF_UNLOCKED
:
10069 case BUILT_IN_VFPRINTF
:
10070 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
10079 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10080 and ARG2. IGNORE is true if the result of the function call is ignored.
10081 This function returns NULL_TREE if no simplification was possible. */
10084 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10086 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10087 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10091 CASE_FLT_FN (BUILT_IN_SINCOS
):
10092 return fold_builtin_sincos (arg0
, arg1
, arg2
);
10094 CASE_FLT_FN (BUILT_IN_FMA
):
10095 if (validate_arg (arg0
, REAL_TYPE
)
10096 && validate_arg(arg1
, REAL_TYPE
)
10097 && validate_arg(arg2
, REAL_TYPE
))
10098 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10101 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10102 CASE_FLT_FN (BUILT_IN_REMQUO
):
10103 if (validate_arg (arg0
, REAL_TYPE
)
10104 && validate_arg(arg1
, REAL_TYPE
)
10105 && validate_arg(arg2
, POINTER_TYPE
))
10106 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10110 case BUILT_IN_MEMSET
:
10111 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
10113 case BUILT_IN_BCOPY
:
10114 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
10116 case BUILT_IN_MEMCPY
:
10117 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
10119 case BUILT_IN_MEMPCPY
:
10120 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
10122 case BUILT_IN_MEMMOVE
:
10123 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
10125 case BUILT_IN_STRNCAT
:
10126 return fold_builtin_strncat (arg0
, arg1
, arg2
);
10128 case BUILT_IN_STRNCPY
:
10129 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10131 case BUILT_IN_STRNCMP
:
10132 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
10134 case BUILT_IN_MEMCHR
:
10135 return fold_builtin_memchr (arg0
, arg1
, arg2
, type
);
10137 case BUILT_IN_BCMP
:
10138 case BUILT_IN_MEMCMP
:
10139 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
10141 case BUILT_IN_SPRINTF
:
10142 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
10144 case BUILT_IN_STRCPY_CHK
:
10145 case BUILT_IN_STPCPY_CHK
:
10146 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10149 case BUILT_IN_STRCAT_CHK
:
10150 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10152 case BUILT_IN_PRINTF_CHK
:
10153 case BUILT_IN_VPRINTF_CHK
:
10154 if (!validate_arg (arg0
, INTEGER_TYPE
)
10155 || TREE_SIDE_EFFECTS (arg0
))
10158 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10161 case BUILT_IN_FPRINTF
:
10162 case BUILT_IN_FPRINTF_UNLOCKED
:
10163 case BUILT_IN_VFPRINTF
:
10164 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10166 case BUILT_IN_FPRINTF_CHK
:
10167 case BUILT_IN_VFPRINTF_CHK
:
10168 if (!validate_arg (arg1
, INTEGER_TYPE
)
10169 || TREE_SIDE_EFFECTS (arg1
))
10172 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10181 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10182 ARG2, and ARG3. IGNORE is true if the result of the function call is
10183 ignored. This function returns NULL_TREE if no simplification was
10187 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10190 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10194 case BUILT_IN_MEMCPY_CHK
:
10195 case BUILT_IN_MEMPCPY_CHK
:
10196 case BUILT_IN_MEMMOVE_CHK
:
10197 case BUILT_IN_MEMSET_CHK
:
10198 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10200 DECL_FUNCTION_CODE (fndecl
));
10202 case BUILT_IN_STRNCPY_CHK
:
10203 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10205 case BUILT_IN_STRNCAT_CHK
:
10206 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10208 case BUILT_IN_FPRINTF_CHK
:
10209 case BUILT_IN_VFPRINTF_CHK
:
10210 if (!validate_arg (arg1
, INTEGER_TYPE
)
10211 || TREE_SIDE_EFFECTS (arg1
))
10214 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10224 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10225 arguments, where NARGS <= 4. IGNORE is true if the result of the
10226 function call is ignored. This function returns NULL_TREE if no
10227 simplification was possible. Note that this only folds builtins with
10228 fixed argument patterns. Foldings that do varargs-to-varargs
10229 transformations, or that match calls with more than 4 arguments,
10230 need to be handled with fold_builtin_varargs instead. */
10232 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10235 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10237 tree ret
= NULL_TREE
;
10241 ret
= fold_builtin_0 (fndecl
, ignore
);
10244 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10247 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10250 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10253 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10261 ret
= build1 (NOP_EXPR
, GENERIC_TREE_TYPE (ret
), ret
);
10262 TREE_NO_WARNING (ret
) = 1;
10268 /* Builtins with folding operations that operate on "..." arguments
10269 need special handling; we need to store the arguments in a convenient
10270 data structure before attempting any folding. Fortunately there are
10271 only a few builtins that fall into this category. FNDECL is the
10272 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10273 result of the function call is ignored. */
10276 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10278 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10279 tree ret
= NULL_TREE
;
10283 case BUILT_IN_SPRINTF_CHK
:
10284 case BUILT_IN_VSPRINTF_CHK
:
10285 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10288 case BUILT_IN_SNPRINTF_CHK
:
10289 case BUILT_IN_VSNPRINTF_CHK
:
10290 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10297 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10298 TREE_NO_WARNING (ret
) = 1;
10304 /* A wrapper function for builtin folding that prevents warnings for
10305 "statement without effect" and the like, caused by removing the
10306 call node earlier than the warning is generated. */
10309 fold_call_expr (tree exp
, bool ignore
)
10311 tree ret
= NULL_TREE
;
10312 tree fndecl
= get_callee_fndecl (exp
);
10314 && TREE_CODE (fndecl
) == FUNCTION_DECL
10315 && DECL_BUILT_IN (fndecl
))
10317 /* FIXME: Don't use a list in this interface. */
10318 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10319 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10322 int nargs
= call_expr_nargs (exp
);
10323 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10325 tree
*args
= CALL_EXPR_ARGP (exp
);
10326 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10329 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10332 /* Propagate location information from original call to
10333 expansion of builtin. Otherwise things like
10334 maybe_emit_chk_warning, that operate on the expansion
10335 of a builtin, will use the wrong location information. */
10336 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10338 tree realret
= ret
;
10339 if (TREE_CODE (ret
) == NOP_EXPR
)
10340 realret
= TREE_OPERAND (ret
, 0);
10341 if (CAN_HAVE_LOCATION_P (realret
)
10342 && !EXPR_HAS_LOCATION (realret
))
10343 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10352 /* Conveniently construct a function call expression. FNDECL names the
10353 function to be called and ARGLIST is a TREE_LIST of arguments. */
10356 build_function_call_expr (tree fndecl
, tree arglist
)
10358 tree fntype
= TREE_TYPE (fndecl
);
10359 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10360 int n
= list_length (arglist
);
10361 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10364 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10365 argarray
[i
] = TREE_VALUE (arglist
);
10366 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10369 /* Conveniently construct a function call expression. FNDECL names the
10370 function to be called, N is the number of arguments, and the "..."
10371 parameters are the argument expressions. */
10374 build_call_expr (tree fndecl
, int n
, ...)
10377 tree fntype
= TREE_TYPE (fndecl
);
10378 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10379 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10383 for (i
= 0; i
< n
; i
++)
10384 argarray
[i
] = va_arg (ap
, tree
);
10386 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10389 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10390 N arguments are passed in the array ARGARRAY. */
10393 fold_builtin_call_array (tree type
,
10398 tree ret
= NULL_TREE
;
10402 if (TREE_CODE (fn
) == ADDR_EXPR
)
10404 tree fndecl
= TREE_OPERAND (fn
, 0);
10405 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10406 && DECL_BUILT_IN (fndecl
))
10408 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10410 tree arglist
= NULL_TREE
;
10411 for (i
= n
- 1; i
>= 0; i
--)
10412 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10413 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10417 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10419 /* First try the transformations that don't require consing up
10421 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10426 /* If we got this far, we need to build an exp. */
10427 exp
= build_call_array (type
, fn
, n
, argarray
);
10428 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10429 return ret
? ret
: exp
;
10433 return build_call_array (type
, fn
, n
, argarray
);
10436 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10437 along with N new arguments specified as the "..." parameters. SKIP
10438 is the number of arguments in EXP to be omitted. This function is used
10439 to do varargs-to-varargs transformations. */
10442 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10444 int oldnargs
= call_expr_nargs (exp
);
10445 int nargs
= oldnargs
- skip
+ n
;
10446 tree fntype
= TREE_TYPE (fndecl
);
10447 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10455 buffer
= alloca (nargs
* sizeof (tree
));
10457 for (i
= 0; i
< n
; i
++)
10458 buffer
[i
] = va_arg (ap
, tree
);
10460 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10461 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10464 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10466 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10469 /* Validate a single argument ARG against a tree code CODE representing
10473 validate_arg (tree arg
, enum tree_code code
)
10477 else if (code
== POINTER_TYPE
)
10478 return POINTER_TYPE_P (TREE_TYPE (arg
));
10479 return code
== TREE_CODE (TREE_TYPE (arg
));
10482 /* This function validates the types of a function call argument list
10483 against a specified list of tree_codes. If the last specifier is a 0,
10484 that represents an ellipses, otherwise the last specifier must be a
10488 validate_arglist (tree callexpr
, ...)
10490 enum tree_code code
;
10493 call_expr_arg_iterator iter
;
10496 va_start (ap
, callexpr
);
10497 init_call_expr_arg_iterator (callexpr
, &iter
);
10501 code
= va_arg (ap
, enum tree_code
);
10505 /* This signifies an ellipses, any further arguments are all ok. */
10509 /* This signifies an endlink, if no arguments remain, return
10510 true, otherwise return false. */
10511 res
= !more_call_expr_args_p (&iter
);
10514 /* If no parameters remain or the parameter's code does not
10515 match the specified code, return false. Otherwise continue
10516 checking any remaining arguments. */
10517 arg
= next_call_expr_arg (&iter
);
10518 if (!validate_arg (arg
, code
))
10525 /* We need gotos here since we can only have one VA_CLOSE in a
10533 /* Default target-specific builtin expander that does nothing. */
10536 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10537 rtx target ATTRIBUTE_UNUSED
,
10538 rtx subtarget ATTRIBUTE_UNUSED
,
10539 enum machine_mode mode ATTRIBUTE_UNUSED
,
10540 int ignore ATTRIBUTE_UNUSED
)
10545 /* Returns true is EXP represents data that would potentially reside
10546 in a readonly section. */
10549 readonly_data_expr (tree exp
)
10553 if (TREE_CODE (exp
) != ADDR_EXPR
)
10556 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10560 /* Make sure we call decl_readonly_section only for trees it
10561 can handle (since it returns true for everything it doesn't
10563 if (TREE_CODE (exp
) == STRING_CST
10564 || TREE_CODE (exp
) == CONSTRUCTOR
10565 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10566 return decl_readonly_section (exp
, 0);
10571 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10572 to the call, and TYPE is its return type.
10574 Return NULL_TREE if no simplification was possible, otherwise return the
10575 simplified form of the call as a tree.
10577 The simplified form may be a constant or other expression which
10578 computes the same value, but in a more efficient manner (including
10579 calls to other builtin functions).
10581 The call may contain arguments which need to be evaluated, but
10582 which are not useful to determine the result of the call. In
10583 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10584 COMPOUND_EXPR will be an argument which must be evaluated.
10585 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10586 COMPOUND_EXPR in the chain will contain the tree for the simplified
10587 form of the builtin function call. */
10590 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
10592 if (!validate_arg (s1
, POINTER_TYPE
)
10593 || !validate_arg (s2
, POINTER_TYPE
))
10598 const char *p1
, *p2
;
10600 p2
= c_getstr (s2
);
10604 p1
= c_getstr (s1
);
10607 const char *r
= strstr (p1
, p2
);
10611 return build_int_cst (TREE_TYPE (s1
), 0);
10613 /* Return an offset into the constant string argument. */
10614 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10615 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10616 return fold_convert (type
, tem
);
10619 /* The argument is const char *, and the result is char *, so we need
10620 a type conversion here to avoid a warning. */
10622 return fold_convert (type
, s1
);
10627 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10631 /* New argument list transforming strstr(s1, s2) to
10632 strchr(s1, s2[0]). */
10633 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10637 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10638 the call, and TYPE is its return type.
10640 Return NULL_TREE if no simplification was possible, otherwise return the
10641 simplified form of the call as a tree.
10643 The simplified form may be a constant or other expression which
10644 computes the same value, but in a more efficient manner (including
10645 calls to other builtin functions).
10647 The call may contain arguments which need to be evaluated, but
10648 which are not useful to determine the result of the call. In
10649 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10650 COMPOUND_EXPR will be an argument which must be evaluated.
10651 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10652 COMPOUND_EXPR in the chain will contain the tree for the simplified
10653 form of the builtin function call. */
10656 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
10658 if (!validate_arg (s1
, POINTER_TYPE
)
10659 || !validate_arg (s2
, INTEGER_TYPE
))
10665 if (TREE_CODE (s2
) != INTEGER_CST
)
10668 p1
= c_getstr (s1
);
10675 if (target_char_cast (s2
, &c
))
10678 r
= strchr (p1
, c
);
10681 return build_int_cst (TREE_TYPE (s1
), 0);
10683 /* Return an offset into the constant string argument. */
10684 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10685 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10686 return fold_convert (type
, tem
);
10692 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10693 the call, and TYPE is its return type.
10695 Return NULL_TREE if no simplification was possible, otherwise return the
10696 simplified form of the call as a tree.
10698 The simplified form may be a constant or other expression which
10699 computes the same value, but in a more efficient manner (including
10700 calls to other builtin functions).
10702 The call may contain arguments which need to be evaluated, but
10703 which are not useful to determine the result of the call. In
10704 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10705 COMPOUND_EXPR will be an argument which must be evaluated.
10706 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10707 COMPOUND_EXPR in the chain will contain the tree for the simplified
10708 form of the builtin function call. */
10711 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
10713 if (!validate_arg (s1
, POINTER_TYPE
)
10714 || !validate_arg (s2
, INTEGER_TYPE
))
10721 if (TREE_CODE (s2
) != INTEGER_CST
)
10724 p1
= c_getstr (s1
);
10731 if (target_char_cast (s2
, &c
))
10734 r
= strrchr (p1
, c
);
10737 return build_int_cst (TREE_TYPE (s1
), 0);
10739 /* Return an offset into the constant string argument. */
10740 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10741 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10742 return fold_convert (type
, tem
);
10745 if (! integer_zerop (s2
))
10748 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10752 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10753 return build_call_expr (fn
, 2, s1
, s2
);
10757 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10758 to the call, and TYPE is its return type.
10760 Return NULL_TREE if no simplification was possible, otherwise return the
10761 simplified form of the call as a tree.
10763 The simplified form may be a constant or other expression which
10764 computes the same value, but in a more efficient manner (including
10765 calls to other builtin functions).
10767 The call may contain arguments which need to be evaluated, but
10768 which are not useful to determine the result of the call. In
10769 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10770 COMPOUND_EXPR will be an argument which must be evaluated.
10771 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10772 COMPOUND_EXPR in the chain will contain the tree for the simplified
10773 form of the builtin function call. */
10776 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
10778 if (!validate_arg (s1
, POINTER_TYPE
)
10779 || !validate_arg (s2
, POINTER_TYPE
))
10784 const char *p1
, *p2
;
10786 p2
= c_getstr (s2
);
10790 p1
= c_getstr (s1
);
10793 const char *r
= strpbrk (p1
, p2
);
10797 return build_int_cst (TREE_TYPE (s1
), 0);
10799 /* Return an offset into the constant string argument. */
10800 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10801 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10802 return fold_convert (type
, tem
);
10806 /* strpbrk(x, "") == NULL.
10807 Evaluate and ignore s1 in case it had side-effects. */
10808 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
10811 return NULL_TREE
; /* Really call strpbrk. */
10813 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10817 /* New argument list transforming strpbrk(s1, s2) to
10818 strchr(s1, s2[0]). */
10819 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10823 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10826 Return NULL_TREE if no simplification was possible, otherwise return the
10827 simplified form of the call as a tree.
10829 The simplified form may be a constant or other expression which
10830 computes the same value, but in a more efficient manner (including
10831 calls to other builtin functions).
10833 The call may contain arguments which need to be evaluated, but
10834 which are not useful to determine the result of the call. In
10835 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10836 COMPOUND_EXPR will be an argument which must be evaluated.
10837 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10838 COMPOUND_EXPR in the chain will contain the tree for the simplified
10839 form of the builtin function call. */
10842 fold_builtin_strcat (tree dst
, tree src
)
10844 if (!validate_arg (dst
, POINTER_TYPE
)
10845 || !validate_arg (src
, POINTER_TYPE
))
10849 const char *p
= c_getstr (src
);
10851 /* If the string length is zero, return the dst parameter. */
10852 if (p
&& *p
== '\0')
10859 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10860 arguments to the call.
10862 Return NULL_TREE if no simplification was possible, otherwise return the
10863 simplified form of the call as a tree.
10865 The simplified form may be a constant or other expression which
10866 computes the same value, but in a more efficient manner (including
10867 calls to other builtin functions).
10869 The call may contain arguments which need to be evaluated, but
10870 which are not useful to determine the result of the call. In
10871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10872 COMPOUND_EXPR will be an argument which must be evaluated.
10873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10874 COMPOUND_EXPR in the chain will contain the tree for the simplified
10875 form of the builtin function call. */
10878 fold_builtin_strncat (tree dst
, tree src
, tree len
)
10880 if (!validate_arg (dst
, POINTER_TYPE
)
10881 || !validate_arg (src
, POINTER_TYPE
)
10882 || !validate_arg (len
, INTEGER_TYPE
))
10886 const char *p
= c_getstr (src
);
10888 /* If the requested length is zero, or the src parameter string
10889 length is zero, return the dst parameter. */
10890 if (integer_zerop (len
) || (p
&& *p
== '\0'))
10891 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
10893 /* If the requested len is greater than or equal to the string
10894 length, call strcat. */
10895 if (TREE_CODE (len
) == INTEGER_CST
&& p
10896 && compare_tree_int (len
, strlen (p
)) >= 0)
10898 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
10900 /* If the replacement _DECL isn't initialized, don't do the
10905 return build_call_expr (fn
, 2, dst
, src
);
10911 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10914 Return NULL_TREE if no simplification was possible, otherwise return the
10915 simplified form of the call as a tree.
10917 The simplified form may be a constant or other expression which
10918 computes the same value, but in a more efficient manner (including
10919 calls to other builtin functions).
10921 The call may contain arguments which need to be evaluated, but
10922 which are not useful to determine the result of the call. In
10923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10924 COMPOUND_EXPR will be an argument which must be evaluated.
10925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10926 COMPOUND_EXPR in the chain will contain the tree for the simplified
10927 form of the builtin function call. */
10930 fold_builtin_strspn (tree s1
, tree s2
)
10932 if (!validate_arg (s1
, POINTER_TYPE
)
10933 || !validate_arg (s2
, POINTER_TYPE
))
10937 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10939 /* If both arguments are constants, evaluate at compile-time. */
10942 const size_t r
= strspn (p1
, p2
);
10943 return size_int (r
);
10946 /* If either argument is "", return NULL_TREE. */
10947 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10948 /* Evaluate and ignore both arguments in case either one has
10950 return omit_two_operands (integer_type_node
, integer_zero_node
,
10956 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10959 Return NULL_TREE if no simplification was possible, otherwise return the
10960 simplified form of the call as a tree.
10962 The simplified form may be a constant or other expression which
10963 computes the same value, but in a more efficient manner (including
10964 calls to other builtin functions).
10966 The call may contain arguments which need to be evaluated, but
10967 which are not useful to determine the result of the call. In
10968 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10969 COMPOUND_EXPR will be an argument which must be evaluated.
10970 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10971 COMPOUND_EXPR in the chain will contain the tree for the simplified
10972 form of the builtin function call. */
10975 fold_builtin_strcspn (tree s1
, tree s2
)
10977 if (!validate_arg (s1
, POINTER_TYPE
)
10978 || !validate_arg (s2
, POINTER_TYPE
))
10982 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10984 /* If both arguments are constants, evaluate at compile-time. */
10987 const size_t r
= strcspn (p1
, p2
);
10988 return size_int (r
);
10991 /* If the first argument is "", return NULL_TREE. */
10992 if (p1
&& *p1
== '\0')
10994 /* Evaluate and ignore argument s2 in case it has
10996 return omit_one_operand (integer_type_node
,
10997 integer_zero_node
, s2
);
11000 /* If the second argument is "", return __builtin_strlen(s1). */
11001 if (p2
&& *p2
== '\0')
11003 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11005 /* If the replacement _DECL isn't initialized, don't do the
11010 return build_call_expr (fn
, 1, s1
);
11016 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11017 to the call. IGNORE is true if the value returned
11018 by the builtin will be ignored. UNLOCKED is true is true if this
11019 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11020 the known length of the string. Return NULL_TREE if no simplification
11024 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
11026 /* If we're using an unlocked function, assume the other unlocked
11027 functions exist explicitly. */
11028 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11029 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11030 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11031 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11033 /* If the return value is used, don't do the transformation. */
11037 /* Verify the arguments in the original call. */
11038 if (!validate_arg (arg0
, POINTER_TYPE
)
11039 || !validate_arg (arg1
, POINTER_TYPE
))
11043 len
= c_strlen (arg0
, 0);
11045 /* Get the length of the string passed to fputs. If the length
11046 can't be determined, punt. */
11048 || TREE_CODE (len
) != INTEGER_CST
)
11051 switch (compare_tree_int (len
, 1))
11053 case -1: /* length is 0, delete the call entirely . */
11054 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
11056 case 0: /* length is 1, call fputc. */
11058 const char *p
= c_getstr (arg0
);
11063 return build_call_expr (fn_fputc
, 2,
11064 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11070 case 1: /* length is greater than 1, call fwrite. */
11072 /* If optimizing for size keep fputs. */
11075 /* New argument list transforming fputs(string, stream) to
11076 fwrite(string, 1, len, stream). */
11078 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
11083 gcc_unreachable ();
11088 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11089 produced. False otherwise. This is done so that we don't output the error
11090 or warning twice or three times. */
11092 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11094 tree fntype
= TREE_TYPE (current_function_decl
);
11095 int nargs
= call_expr_nargs (exp
);
11098 if (TYPE_ARG_TYPES (fntype
) == 0
11099 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
11100 == void_type_node
))
11102 error ("%<va_start%> used in function with fixed args");
11108 if (va_start_p
&& (nargs
!= 2))
11110 error ("wrong number of arguments to function %<va_start%>");
11113 arg
= CALL_EXPR_ARG (exp
, 1);
11115 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11116 when we checked the arguments and if needed issued a warning. */
11121 /* Evidently an out of date version of <stdarg.h>; can't validate
11122 va_start's second argument, but can still work as intended. */
11123 warning (0, "%<__builtin_next_arg%> called without an argument");
11126 else if (nargs
> 1)
11128 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11131 arg
= CALL_EXPR_ARG (exp
, 0);
11134 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11135 or __builtin_next_arg (0) the first time we see it, after checking
11136 the arguments and if needed issuing a warning. */
11137 if (!integer_zerop (arg
))
11139 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11141 /* Strip off all nops for the sake of the comparison. This
11142 is not quite the same as STRIP_NOPS. It does more.
11143 We must also strip off INDIRECT_EXPR for C++ reference
11145 while (TREE_CODE (arg
) == NOP_EXPR
11146 || TREE_CODE (arg
) == CONVERT_EXPR
11147 || TREE_CODE (arg
) == NON_LVALUE_EXPR
11148 || TREE_CODE (arg
) == INDIRECT_REF
)
11149 arg
= TREE_OPERAND (arg
, 0);
11150 if (arg
!= last_parm
)
11152 /* FIXME: Sometimes with the tree optimizers we can get the
11153 not the last argument even though the user used the last
11154 argument. We just warn and set the arg to be the last
11155 argument so that we will get wrong-code because of
11157 warning (0, "second parameter of %<va_start%> not last named argument");
11159 /* We want to verify the second parameter just once before the tree
11160 optimizers are run and then avoid keeping it in the tree,
11161 as otherwise we could warn even for correct code like:
11162 void foo (int i, ...)
11163 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11165 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11167 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11173 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11174 ORIG may be null if this is a 2-argument call. We don't attempt to
11175 simplify calls with more than 3 arguments.
11177 Return NULL_TREE if no simplification was possible, otherwise return the
11178 simplified form of the call as a tree. If IGNORED is true, it means that
11179 the caller does not use the returned value of the function. */
11182 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11185 const char *fmt_str
= NULL
;
11187 /* Verify the required arguments in the original call. We deal with two
11188 types of sprintf() calls: 'sprintf (str, fmt)' and
11189 'sprintf (dest, "%s", orig)'. */
11190 if (!validate_arg (dest
, POINTER_TYPE
)
11191 || !validate_arg (fmt
, POINTER_TYPE
))
11193 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11196 /* Check whether the format is a literal string constant. */
11197 fmt_str
= c_getstr (fmt
);
11198 if (fmt_str
== NULL
)
11202 retval
= NULL_TREE
;
11204 if (!init_target_chars ())
11207 /* If the format doesn't contain % args or %%, use strcpy. */
11208 if (strchr (fmt_str
, target_percent
) == NULL
)
11210 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11215 /* Don't optimize sprintf (buf, "abc", ptr++). */
11219 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11220 'format' is known to contain no % formats. */
11221 call
= build_call_expr (fn
, 2, dest
, fmt
);
11223 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11226 /* If the format is "%s", use strcpy if the result isn't used. */
11227 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11230 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11235 /* Don't crash on sprintf (str1, "%s"). */
11239 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11242 retval
= c_strlen (orig
, 1);
11243 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11246 call
= build_call_expr (fn
, 2, dest
, orig
);
11249 if (call
&& retval
)
11251 retval
= fold_convert
11252 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11254 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11260 /* Expand a call EXP to __builtin_object_size. */
11263 expand_builtin_object_size (tree exp
)
11266 int object_size_type
;
11267 tree fndecl
= get_callee_fndecl (exp
);
11268 location_t locus
= EXPR_LOCATION (exp
);
11270 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11272 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11274 expand_builtin_trap ();
11278 ost
= CALL_EXPR_ARG (exp
, 1);
11281 if (TREE_CODE (ost
) != INTEGER_CST
11282 || tree_int_cst_sgn (ost
) < 0
11283 || compare_tree_int (ost
, 3) > 0)
11285 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11287 expand_builtin_trap ();
11291 object_size_type
= tree_low_cst (ost
, 0);
11293 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11296 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11297 FCODE is the BUILT_IN_* to use.
11298 Return NULL_RTX if we failed; the caller should emit a normal call,
11299 otherwise try to get the result in TARGET, if convenient (and in
11300 mode MODE if that's convenient). */
11303 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11304 enum built_in_function fcode
)
11306 tree dest
, src
, len
, size
;
11308 if (!validate_arglist (exp
,
11310 fcode
== BUILT_IN_MEMSET_CHK
11311 ? INTEGER_TYPE
: POINTER_TYPE
,
11312 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11315 dest
= CALL_EXPR_ARG (exp
, 0);
11316 src
= CALL_EXPR_ARG (exp
, 1);
11317 len
= CALL_EXPR_ARG (exp
, 2);
11318 size
= CALL_EXPR_ARG (exp
, 3);
11320 if (! host_integerp (size
, 1))
11323 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11327 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11329 location_t locus
= EXPR_LOCATION (exp
);
11330 warning (0, "%Hcall to %D will always overflow destination buffer",
11331 &locus
, get_callee_fndecl (exp
));
11336 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11337 mem{cpy,pcpy,move,set} is available. */
11340 case BUILT_IN_MEMCPY_CHK
:
11341 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11343 case BUILT_IN_MEMPCPY_CHK
:
11344 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11346 case BUILT_IN_MEMMOVE_CHK
:
11347 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11349 case BUILT_IN_MEMSET_CHK
:
11350 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11359 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11360 if (TREE_CODE (fn
) == CALL_EXPR
)
11361 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11362 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11364 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11368 unsigned int dest_align
11369 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11371 /* If DEST is not a pointer type, call the normal function. */
11372 if (dest_align
== 0)
11375 /* If SRC and DEST are the same (and not volatile), do nothing. */
11376 if (operand_equal_p (src
, dest
, 0))
11380 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11382 /* Evaluate and ignore LEN in case it has side-effects. */
11383 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11384 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11387 len
= fold_convert (TREE_TYPE (dest
), len
);
11388 expr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11389 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11392 /* __memmove_chk special case. */
11393 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11395 unsigned int src_align
11396 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11398 if (src_align
== 0)
11401 /* If src is categorized for a readonly section we can use
11402 normal __memcpy_chk. */
11403 if (readonly_data_expr (src
))
11405 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11408 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11409 if (TREE_CODE (fn
) == CALL_EXPR
)
11410 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11411 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11418 /* Emit warning if a buffer overflow is detected at compile time. */
11421 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11429 case BUILT_IN_STRCPY_CHK
:
11430 case BUILT_IN_STPCPY_CHK
:
11431 /* For __strcat_chk the warning will be emitted only if overflowing
11432 by at least strlen (dest) + 1 bytes. */
11433 case BUILT_IN_STRCAT_CHK
:
11434 len
= CALL_EXPR_ARG (exp
, 1);
11435 size
= CALL_EXPR_ARG (exp
, 2);
11438 case BUILT_IN_STRNCAT_CHK
:
11439 case BUILT_IN_STRNCPY_CHK
:
11440 len
= CALL_EXPR_ARG (exp
, 2);
11441 size
= CALL_EXPR_ARG (exp
, 3);
11443 case BUILT_IN_SNPRINTF_CHK
:
11444 case BUILT_IN_VSNPRINTF_CHK
:
11445 len
= CALL_EXPR_ARG (exp
, 1);
11446 size
= CALL_EXPR_ARG (exp
, 3);
11449 gcc_unreachable ();
11455 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11460 len
= c_strlen (len
, 1);
11461 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11464 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11466 tree src
= CALL_EXPR_ARG (exp
, 1);
11467 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11469 src
= c_strlen (src
, 1);
11470 if (! src
|| ! host_integerp (src
, 1))
11472 locus
= EXPR_LOCATION (exp
);
11473 warning (0, "%Hcall to %D might overflow destination buffer",
11474 &locus
, get_callee_fndecl (exp
));
11477 else if (tree_int_cst_lt (src
, size
))
11480 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11483 locus
= EXPR_LOCATION (exp
);
11484 warning (0, "%Hcall to %D will always overflow destination buffer",
11485 &locus
, get_callee_fndecl (exp
));
11488 /* Emit warning if a buffer overflow is detected at compile time
11489 in __sprintf_chk/__vsprintf_chk calls. */
11492 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11494 tree dest
, size
, len
, fmt
, flag
;
11495 const char *fmt_str
;
11496 int nargs
= call_expr_nargs (exp
);
11498 /* Verify the required arguments in the original call. */
11502 dest
= CALL_EXPR_ARG (exp
, 0);
11503 flag
= CALL_EXPR_ARG (exp
, 1);
11504 size
= CALL_EXPR_ARG (exp
, 2);
11505 fmt
= CALL_EXPR_ARG (exp
, 3);
11507 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11510 /* Check whether the format is a literal string constant. */
11511 fmt_str
= c_getstr (fmt
);
11512 if (fmt_str
== NULL
)
11515 if (!init_target_chars ())
11518 /* If the format doesn't contain % args or %%, we know its size. */
11519 if (strchr (fmt_str
, target_percent
) == 0)
11520 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11521 /* If the format is "%s" and first ... argument is a string literal,
11523 else if (fcode
== BUILT_IN_SPRINTF_CHK
11524 && strcmp (fmt_str
, target_percent_s
) == 0)
11530 arg
= CALL_EXPR_ARG (exp
, 4);
11531 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11534 len
= c_strlen (arg
, 1);
11535 if (!len
|| ! host_integerp (len
, 1))
11541 if (! tree_int_cst_lt (len
, size
))
11543 location_t locus
= EXPR_LOCATION (exp
);
11544 warning (0, "%Hcall to %D will always overflow destination buffer",
11545 &locus
, get_callee_fndecl (exp
));
11549 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11553 fold_builtin_object_size (tree ptr
, tree ost
)
11555 tree ret
= NULL_TREE
;
11556 int object_size_type
;
11558 if (!validate_arg (ptr
, POINTER_TYPE
)
11559 || !validate_arg (ost
, INTEGER_TYPE
))
11564 if (TREE_CODE (ost
) != INTEGER_CST
11565 || tree_int_cst_sgn (ost
) < 0
11566 || compare_tree_int (ost
, 3) > 0)
11569 object_size_type
= tree_low_cst (ost
, 0);
11571 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11572 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11573 and (size_t) 0 for types 2 and 3. */
11574 if (TREE_SIDE_EFFECTS (ptr
))
11575 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11577 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11578 ret
= build_int_cstu (size_type_node
,
11579 compute_builtin_object_size (ptr
, object_size_type
));
11581 else if (TREE_CODE (ptr
) == SSA_NAME
)
11583 unsigned HOST_WIDE_INT bytes
;
11585 /* If object size is not known yet, delay folding until
11586 later. Maybe subsequent passes will help determining
11588 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11589 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
11591 ret
= build_int_cstu (size_type_node
, bytes
);
11596 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
11597 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
11598 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
11605 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11606 DEST, SRC, LEN, and SIZE are the arguments to the call.
11607 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11608 code of the builtin. If MAXLEN is not NULL, it is maximum length
11609 passed as third argument. */
11612 fold_builtin_memory_chk (tree fndecl
,
11613 tree dest
, tree src
, tree len
, tree size
,
11614 tree maxlen
, bool ignore
,
11615 enum built_in_function fcode
)
11619 if (!validate_arg (dest
, POINTER_TYPE
)
11620 || !validate_arg (src
,
11621 (fcode
== BUILT_IN_MEMSET_CHK
11622 ? INTEGER_TYPE
: POINTER_TYPE
))
11623 || !validate_arg (len
, INTEGER_TYPE
)
11624 || !validate_arg (size
, INTEGER_TYPE
))
11627 /* If SRC and DEST are the same (and not volatile), return DEST
11628 (resp. DEST+LEN for __mempcpy_chk). */
11629 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
11631 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11632 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11635 tree temp
= fold_convert (TREE_TYPE (dest
), len
);
11636 temp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, temp
);
11637 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
11641 if (! host_integerp (size
, 1))
11644 if (! integer_all_onesp (size
))
11646 if (! host_integerp (len
, 1))
11648 /* If LEN is not constant, try MAXLEN too.
11649 For MAXLEN only allow optimizing into non-_ocs function
11650 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11651 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11653 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
11655 /* (void) __mempcpy_chk () can be optimized into
11656 (void) __memcpy_chk (). */
11657 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11661 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
11669 if (tree_int_cst_lt (size
, maxlen
))
11674 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11675 mem{cpy,pcpy,move,set} is available. */
11678 case BUILT_IN_MEMCPY_CHK
:
11679 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11681 case BUILT_IN_MEMPCPY_CHK
:
11682 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11684 case BUILT_IN_MEMMOVE_CHK
:
11685 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11687 case BUILT_IN_MEMSET_CHK
:
11688 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11697 return build_call_expr (fn
, 3, dest
, src
, len
);
11700 /* Fold a call to the __st[rp]cpy_chk builtin.
11701 DEST, SRC, and SIZE are the arguments to the call.
11702 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11703 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11704 strings passed as second argument. */
11707 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
11708 tree maxlen
, bool ignore
,
11709 enum built_in_function fcode
)
11713 if (!validate_arg (dest
, POINTER_TYPE
)
11714 || !validate_arg (src
, POINTER_TYPE
)
11715 || !validate_arg (size
, INTEGER_TYPE
))
11718 /* If SRC and DEST are the same (and not volatile), return DEST. */
11719 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
11720 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
11722 if (! host_integerp (size
, 1))
11725 if (! integer_all_onesp (size
))
11727 len
= c_strlen (src
, 1);
11728 if (! len
|| ! host_integerp (len
, 1))
11730 /* If LEN is not constant, try MAXLEN too.
11731 For MAXLEN only allow optimizing into non-_ocs function
11732 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11733 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11735 if (fcode
== BUILT_IN_STPCPY_CHK
)
11740 /* If return value of __stpcpy_chk is ignored,
11741 optimize into __strcpy_chk. */
11742 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
11746 return build_call_expr (fn
, 3, dest
, src
, size
);
11749 if (! len
|| TREE_SIDE_EFFECTS (len
))
11752 /* If c_strlen returned something, but not a constant,
11753 transform __strcpy_chk into __memcpy_chk. */
11754 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11758 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
11759 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
11760 build_call_expr (fn
, 4,
11761 dest
, src
, len
, size
));
11767 if (! tree_int_cst_lt (maxlen
, size
))
11771 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11772 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
11773 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
11777 return build_call_expr (fn
, 2, dest
, src
);
11780 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11781 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11782 length passed as third argument. */
11785 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
11790 if (!validate_arg (dest
, POINTER_TYPE
)
11791 || !validate_arg (src
, POINTER_TYPE
)
11792 || !validate_arg (len
, INTEGER_TYPE
)
11793 || !validate_arg (size
, INTEGER_TYPE
))
11796 if (! host_integerp (size
, 1))
11799 if (! integer_all_onesp (size
))
11801 if (! host_integerp (len
, 1))
11803 /* If LEN is not constant, try MAXLEN too.
11804 For MAXLEN only allow optimizing into non-_ocs function
11805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11806 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11812 if (tree_int_cst_lt (size
, maxlen
))
11816 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11817 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
11821 return build_call_expr (fn
, 3, dest
, src
, len
);
11824 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11825 are the arguments to the call. */
11828 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
11833 if (!validate_arg (dest
, POINTER_TYPE
)
11834 || !validate_arg (src
, POINTER_TYPE
)
11835 || !validate_arg (size
, INTEGER_TYPE
))
11838 p
= c_getstr (src
);
11839 /* If the SRC parameter is "", return DEST. */
11840 if (p
&& *p
== '\0')
11841 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11843 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
11846 /* If __builtin_strcat_chk is used, assume strcat is available. */
11847 fn
= built_in_decls
[BUILT_IN_STRCAT
];
11851 return build_call_expr (fn
, 2, dest
, src
);
11854 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11858 fold_builtin_strncat_chk (tree fndecl
,
11859 tree dest
, tree src
, tree len
, tree size
)
11864 if (!validate_arg (dest
, POINTER_TYPE
)
11865 || !validate_arg (src
, POINTER_TYPE
)
11866 || !validate_arg (size
, INTEGER_TYPE
)
11867 || !validate_arg (size
, INTEGER_TYPE
))
11870 p
= c_getstr (src
);
11871 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11872 if (p
&& *p
== '\0')
11873 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11874 else if (integer_zerop (len
))
11875 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11877 if (! host_integerp (size
, 1))
11880 if (! integer_all_onesp (size
))
11882 tree src_len
= c_strlen (src
, 1);
11884 && host_integerp (src_len
, 1)
11885 && host_integerp (len
, 1)
11886 && ! tree_int_cst_lt (len
, src_len
))
11888 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11889 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
11893 return build_call_expr (fn
, 3, dest
, src
, size
);
11898 /* If __builtin_strncat_chk is used, assume strncat is available. */
11899 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
11903 return build_call_expr (fn
, 3, dest
, src
, len
);
11906 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11907 a normal call should be emitted rather than expanding the function
11908 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11911 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
11913 tree dest
, size
, len
, fn
, fmt
, flag
;
11914 const char *fmt_str
;
11915 int nargs
= call_expr_nargs (exp
);
11917 /* Verify the required arguments in the original call. */
11920 dest
= CALL_EXPR_ARG (exp
, 0);
11921 if (!validate_arg (dest
, POINTER_TYPE
))
11923 flag
= CALL_EXPR_ARG (exp
, 1);
11924 if (!validate_arg (flag
, INTEGER_TYPE
))
11926 size
= CALL_EXPR_ARG (exp
, 2);
11927 if (!validate_arg (size
, INTEGER_TYPE
))
11929 fmt
= CALL_EXPR_ARG (exp
, 3);
11930 if (!validate_arg (fmt
, POINTER_TYPE
))
11933 if (! host_integerp (size
, 1))
11938 if (!init_target_chars ())
11941 /* Check whether the format is a literal string constant. */
11942 fmt_str
= c_getstr (fmt
);
11943 if (fmt_str
!= NULL
)
11945 /* If the format doesn't contain % args or %%, we know the size. */
11946 if (strchr (fmt_str
, target_percent
) == 0)
11948 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
11949 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11951 /* If the format is "%s" and first ... argument is a string literal,
11952 we know the size too. */
11953 else if (fcode
== BUILT_IN_SPRINTF_CHK
11954 && strcmp (fmt_str
, target_percent_s
) == 0)
11960 arg
= CALL_EXPR_ARG (exp
, 4);
11961 if (validate_arg (arg
, POINTER_TYPE
))
11963 len
= c_strlen (arg
, 1);
11964 if (! len
|| ! host_integerp (len
, 1))
11971 if (! integer_all_onesp (size
))
11973 if (! len
|| ! tree_int_cst_lt (len
, size
))
11977 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11978 or if format doesn't contain % chars or is "%s". */
11979 if (! integer_zerop (flag
))
11981 if (fmt_str
== NULL
)
11983 if (strchr (fmt_str
, target_percent
) != NULL
11984 && strcmp (fmt_str
, target_percent_s
))
11988 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11989 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
11990 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
11994 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
11997 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11998 a normal call should be emitted rather than expanding the function
11999 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12000 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12001 passed as second argument. */
12004 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
12005 enum built_in_function fcode
)
12007 tree dest
, size
, len
, fn
, fmt
, flag
;
12008 const char *fmt_str
;
12010 /* Verify the required arguments in the original call. */
12011 if (call_expr_nargs (exp
) < 5)
12013 dest
= CALL_EXPR_ARG (exp
, 0);
12014 if (!validate_arg (dest
, POINTER_TYPE
))
12016 len
= CALL_EXPR_ARG (exp
, 1);
12017 if (!validate_arg (len
, INTEGER_TYPE
))
12019 flag
= CALL_EXPR_ARG (exp
, 2);
12020 if (!validate_arg (flag
, INTEGER_TYPE
))
12022 size
= CALL_EXPR_ARG (exp
, 3);
12023 if (!validate_arg (size
, INTEGER_TYPE
))
12025 fmt
= CALL_EXPR_ARG (exp
, 4);
12026 if (!validate_arg (fmt
, POINTER_TYPE
))
12029 if (! host_integerp (size
, 1))
12032 if (! integer_all_onesp (size
))
12034 if (! host_integerp (len
, 1))
12036 /* If LEN is not constant, try MAXLEN too.
12037 For MAXLEN only allow optimizing into non-_ocs function
12038 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12039 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12045 if (tree_int_cst_lt (size
, maxlen
))
12049 if (!init_target_chars ())
12052 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12053 or if format doesn't contain % chars or is "%s". */
12054 if (! integer_zerop (flag
))
12056 fmt_str
= c_getstr (fmt
);
12057 if (fmt_str
== NULL
)
12059 if (strchr (fmt_str
, target_percent
) != NULL
12060 && strcmp (fmt_str
, target_percent_s
))
12064 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12066 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12067 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12071 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
12074 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12075 FMT and ARG are the arguments to the call; we don't fold cases with
12076 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12078 Return NULL_TREE if no simplification was possible, otherwise return the
12079 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12080 code of the function to be simplified. */
12083 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
12084 enum built_in_function fcode
)
12086 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12087 const char *fmt_str
= NULL
;
12089 /* If the return value is used, don't do the transformation. */
12093 /* Verify the required arguments in the original call. */
12094 if (!validate_arg (fmt
, POINTER_TYPE
))
12097 /* Check whether the format is a literal string constant. */
12098 fmt_str
= c_getstr (fmt
);
12099 if (fmt_str
== NULL
)
12102 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12104 /* If we're using an unlocked function, assume the other
12105 unlocked functions exist explicitly. */
12106 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12107 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12111 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12112 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12115 if (!init_target_chars ())
12118 if (strcmp (fmt_str
, target_percent_s
) == 0
12119 || strchr (fmt_str
, target_percent
) == NULL
)
12123 if (strcmp (fmt_str
, target_percent_s
) == 0)
12125 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12128 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12131 str
= c_getstr (arg
);
12137 /* The format specifier doesn't contain any '%' characters. */
12138 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12144 /* If the string was "", printf does nothing. */
12145 if (str
[0] == '\0')
12146 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12148 /* If the string has length of 1, call putchar. */
12149 if (str
[1] == '\0')
12151 /* Given printf("c"), (where c is any one character,)
12152 convert "c"[0] to an int and pass that to the replacement
12154 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12156 call
= build_call_expr (fn_putchar
, 1, newarg
);
12160 /* If the string was "string\n", call puts("string"). */
12161 size_t len
= strlen (str
);
12162 if ((unsigned char)str
[len
- 1] == target_newline
)
12164 /* Create a NUL-terminated string that's one char shorter
12165 than the original, stripping off the trailing '\n'. */
12166 char *newstr
= alloca (len
);
12167 memcpy (newstr
, str
, len
- 1);
12168 newstr
[len
- 1] = 0;
12170 newarg
= build_string_literal (len
, newstr
);
12172 call
= build_call_expr (fn_puts
, 1, newarg
);
12175 /* We'd like to arrange to call fputs(string,stdout) here,
12176 but we need stdout and don't have a way to get it yet. */
12181 /* The other optimizations can be done only on the non-va_list variants. */
12182 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12185 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12186 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12188 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12191 call
= build_call_expr (fn_puts
, 1, arg
);
12194 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12195 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12197 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12200 call
= build_call_expr (fn_putchar
, 1, arg
);
12206 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12209 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12210 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12211 more than 3 arguments, and ARG may be null in the 2-argument case.
12213 Return NULL_TREE if no simplification was possible, otherwise return the
12214 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12215 code of the function to be simplified. */
12218 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12219 enum built_in_function fcode
)
12221 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12222 const char *fmt_str
= NULL
;
12224 /* If the return value is used, don't do the transformation. */
12228 /* Verify the required arguments in the original call. */
12229 if (!validate_arg (fp
, POINTER_TYPE
))
12231 if (!validate_arg (fmt
, POINTER_TYPE
))
12234 /* Check whether the format is a literal string constant. */
12235 fmt_str
= c_getstr (fmt
);
12236 if (fmt_str
== NULL
)
12239 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12241 /* If we're using an unlocked function, assume the other
12242 unlocked functions exist explicitly. */
12243 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12244 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12248 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12249 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12252 if (!init_target_chars ())
12255 /* If the format doesn't contain % args or %%, use strcpy. */
12256 if (strchr (fmt_str
, target_percent
) == NULL
)
12258 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12262 /* If the format specifier was "", fprintf does nothing. */
12263 if (fmt_str
[0] == '\0')
12265 /* If FP has side-effects, just wait until gimplification is
12267 if (TREE_SIDE_EFFECTS (fp
))
12270 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12273 /* When "string" doesn't contain %, replace all cases of
12274 fprintf (fp, string) with fputs (string, fp). The fputs
12275 builtin will take care of special cases like length == 1. */
12277 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12280 /* The other optimizations can be done only on the non-va_list variants. */
12281 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12284 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12285 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12287 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12290 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12293 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12294 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12296 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12299 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12304 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12307 /* Initialize format string characters in the target charset. */
12310 init_target_chars (void)
12315 target_newline
= lang_hooks
.to_target_charset ('\n');
12316 target_percent
= lang_hooks
.to_target_charset ('%');
12317 target_c
= lang_hooks
.to_target_charset ('c');
12318 target_s
= lang_hooks
.to_target_charset ('s');
12319 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12323 target_percent_c
[0] = target_percent
;
12324 target_percent_c
[1] = target_c
;
12325 target_percent_c
[2] = '\0';
12327 target_percent_s
[0] = target_percent
;
12328 target_percent_s
[1] = target_s
;
12329 target_percent_s
[2] = '\0';
12331 target_percent_s_newline
[0] = target_percent
;
12332 target_percent_s_newline
[1] = target_s
;
12333 target_percent_s_newline
[2] = target_newline
;
12334 target_percent_s_newline
[3] = '\0';
12341 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12342 and no overflow/underflow occurred. INEXACT is true if M was not
12343 exactly calculated. TYPE is the tree type for the result. This
12344 function assumes that you cleared the MPFR flags and then
12345 calculated M to see if anything subsequently set a flag prior to
12346 entering this function. Return NULL_TREE if any checks fail. */
12349 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12351 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12352 overflow/underflow occurred. If -frounding-math, proceed iff the
12353 result of calling FUNC was exact. */
12354 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12355 && (!flag_rounding_math
|| !inexact
))
12357 REAL_VALUE_TYPE rr
;
12359 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12360 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12361 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12362 but the mpft_t is not, then we underflowed in the
12364 if (!real_isnan (&rr
) && !real_isinf (&rr
)
12365 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12367 REAL_VALUE_TYPE rmode
;
12369 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12370 /* Proceed iff the specified mode can hold the value. */
12371 if (real_identical (&rmode
, &rr
))
12372 return build_real (type
, rmode
);
12378 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12379 FUNC on it and return the resulting value as a tree with type TYPE.
12380 If MIN and/or MAX are not NULL, then the supplied ARG must be
12381 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12382 acceptable values, otherwise they are not. The mpfr precision is
12383 set to the precision of TYPE. We assume that function FUNC returns
12384 zero if the result could be calculated exactly within the requested
12388 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12389 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12392 tree result
= NULL_TREE
;
12396 /* To proceed, MPFR must exactly represent the target floating point
12397 format, which only happens when the target base equals two. */
12398 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12399 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12401 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12403 if (!real_isnan (ra
) && !real_isinf (ra
)
12404 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12405 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12407 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12411 mpfr_init2 (m
, prec
);
12412 mpfr_from_real (m
, ra
, GMP_RNDN
);
12413 mpfr_clear_flags ();
12414 inexact
= func (m
, m
, GMP_RNDN
);
12415 result
= do_mpfr_ckconv (m
, type
, inexact
);
12423 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12424 FUNC on it and return the resulting value as a tree with type TYPE.
12425 The mpfr precision is set to the precision of TYPE. We assume that
12426 function FUNC returns zero if the result could be calculated
12427 exactly within the requested precision. */
12430 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12431 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12433 tree result
= NULL_TREE
;
12438 /* To proceed, MPFR must exactly represent the target floating point
12439 format, which only happens when the target base equals two. */
12440 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12441 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12442 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12444 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12445 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12447 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12448 && !real_isnan (ra2
) && !real_isinf (ra2
))
12450 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12454 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12455 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12456 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12457 mpfr_clear_flags ();
12458 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12459 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12460 mpfr_clears (m1
, m2
, NULL
);
12467 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12468 FUNC on it and return the resulting value as a tree with type TYPE.
12469 The mpfr precision is set to the precision of TYPE. We assume that
12470 function FUNC returns zero if the result could be calculated
12471 exactly within the requested precision. */
12474 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12475 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12477 tree result
= NULL_TREE
;
12483 /* To proceed, MPFR must exactly represent the target floating point
12484 format, which only happens when the target base equals two. */
12485 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12486 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12487 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12488 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12490 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12491 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12492 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12494 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12495 && !real_isnan (ra2
) && !real_isinf (ra2
)
12496 && !real_isnan (ra3
) && !real_isinf (ra3
))
12498 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12502 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12503 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12504 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12505 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12506 mpfr_clear_flags ();
12507 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12508 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12509 mpfr_clears (m1
, m2
, m3
, NULL
);
12516 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12517 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12518 If ARG_SINP and ARG_COSP are NULL then the result is returned
12519 as a complex value.
12520 The type is taken from the type of ARG and is used for setting the
12521 precision of the calculation and results. */
12524 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12526 tree
const type
= TREE_TYPE (arg
);
12527 tree result
= NULL_TREE
;
12531 /* To proceed, MPFR must exactly represent the target floating point
12532 format, which only happens when the target base equals two. */
12533 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12534 && TREE_CODE (arg
) == REAL_CST
12535 && !TREE_OVERFLOW (arg
))
12537 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12539 if (!real_isnan (ra
) && !real_isinf (ra
))
12541 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12542 tree result_s
, result_c
;
12546 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12547 mpfr_from_real (m
, ra
, GMP_RNDN
);
12548 mpfr_clear_flags ();
12549 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
12550 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12551 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12552 mpfr_clears (m
, ms
, mc
, NULL
);
12553 if (result_s
&& result_c
)
12555 /* If we are to return in a complex value do so. */
12556 if (!arg_sinp
&& !arg_cosp
)
12557 return build_complex (build_complex_type (type
),
12558 result_c
, result_s
);
12560 /* Dereference the sin/cos pointer arguments. */
12561 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12562 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12563 /* Proceed if valid pointer type were passed in. */
12564 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12565 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12567 /* Set the values. */
12568 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12570 TREE_SIDE_EFFECTS (result_s
) = 1;
12571 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12573 TREE_SIDE_EFFECTS (result_c
) = 1;
12574 /* Combine the assignments into a compound expr. */
12575 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12576 result_s
, result_c
));
12584 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12585 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12586 two-argument mpfr order N Bessel function FUNC on them and return
12587 the resulting value as a tree with type TYPE. The mpfr precision
12588 is set to the precision of TYPE. We assume that function FUNC
12589 returns zero if the result could be calculated exactly within the
12590 requested precision. */
12592 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12593 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12594 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12596 tree result
= NULL_TREE
;
12601 /* To proceed, MPFR must exactly represent the target floating point
12602 format, which only happens when the target base equals two. */
12603 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12604 && host_integerp (arg1
, 0)
12605 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12607 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
12608 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12611 && !real_isnan (ra
) && !real_isinf (ra
)
12612 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12614 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12618 mpfr_init2 (m
, prec
);
12619 mpfr_from_real (m
, ra
, GMP_RNDN
);
12620 mpfr_clear_flags ();
12621 inexact
= func (m
, n
, m
, GMP_RNDN
);
12622 result
= do_mpfr_ckconv (m
, type
, inexact
);
12630 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12631 the pointer *(ARG_QUO) and return the result. The type is taken
12632 from the type of ARG0 and is used for setting the precision of the
12633 calculation and results. */
12636 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12638 tree
const type
= TREE_TYPE (arg0
);
12639 tree result
= NULL_TREE
;
12644 /* To proceed, MPFR must exactly represent the target floating point
12645 format, which only happens when the target base equals two. */
12646 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12647 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12648 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12650 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12651 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12653 if (!real_isnan (ra0
) && !real_isinf (ra0
)
12654 && !real_isnan (ra1
) && !real_isinf (ra1
))
12656 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12661 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12662 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12663 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12664 mpfr_clear_flags ();
12665 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, GMP_RNDN
);
12666 /* Remquo is independent of the rounding mode, so pass
12667 inexact=0 to do_mpfr_ckconv(). */
12668 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12669 mpfr_clears (m0
, m1
, NULL
);
12672 /* MPFR calculates quo in the host's long so it may
12673 return more bits in quo than the target int can hold
12674 if sizeof(host long) > sizeof(target int). This can
12675 happen even for native compilers in LP64 mode. In
12676 these cases, modulo the quo value with the largest
12677 number that the target int can hold while leaving one
12678 bit for the sign. */
12679 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12680 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12682 /* Dereference the quo pointer argument. */
12683 arg_quo
= build_fold_indirect_ref (arg_quo
);
12684 /* Proceed iff a valid pointer type was passed in. */
12685 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12687 /* Set the value. */
12688 tree result_quo
= fold_build2 (MODIFY_EXPR
,
12689 TREE_TYPE (arg_quo
), arg_quo
,
12690 build_int_cst (NULL
, integer_quo
));
12691 TREE_SIDE_EFFECTS (result_quo
) = 1;
12692 /* Combine the quo assignment with the rem. */
12693 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12694 result_quo
, result_rem
));
12702 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12703 resulting value as a tree with type TYPE. The mpfr precision is
12704 set to the precision of TYPE. We assume that this mpfr function
12705 returns zero if the result could be calculated exactly within the
12706 requested precision. In addition, the integer pointer represented
12707 by ARG_SG will be dereferenced and set to the appropriate signgam
12711 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12713 tree result
= NULL_TREE
;
12717 /* To proceed, MPFR must exactly represent the target floating point
12718 format, which only happens when the target base equals two. Also
12719 verify ARG is a constant and that ARG_SG is an int pointer. */
12720 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12721 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12722 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12723 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12725 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12727 /* In addition to NaN and Inf, the argument cannot be zero or a
12728 negative integer. */
12729 if (!real_isnan (ra
) && !real_isinf (ra
)
12730 && ra
->cl
!= rvc_zero
12731 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
12733 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12738 mpfr_init2 (m
, prec
);
12739 mpfr_from_real (m
, ra
, GMP_RNDN
);
12740 mpfr_clear_flags ();
12741 inexact
= mpfr_lgamma (m
, &sg
, m
, GMP_RNDN
);
12742 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12748 /* Dereference the arg_sg pointer argument. */
12749 arg_sg
= build_fold_indirect_ref (arg_sg
);
12750 /* Assign the signgam value into *arg_sg. */
12751 result_sg
= fold_build2 (MODIFY_EXPR
,
12752 TREE_TYPE (arg_sg
), arg_sg
,
12753 build_int_cst (NULL
, sg
));
12754 TREE_SIDE_EFFECTS (result_sg
) = 1;
12755 /* Combine the signgam assignment with the lgamma result. */
12756 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12757 result_sg
, result_lg
));