]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
* builtin-types.def (BT_FN_INT_INT_INT_INT_INT_INT_VAR): New.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
64 {
65 #include "builtins.def"
66 };
67 #undef DEF_BUILTIN
68
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
76
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
195
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
204
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
218
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
240
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
244
245 static bool called_as_built_in (tree node)
246 {
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
253 }
254
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
259
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
262
263 int
264 get_pointer_alignment (tree exp, unsigned int max_align)
265 {
266 unsigned int align, inner;
267
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
271
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
274
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
277
278 while (1)
279 {
280 switch (TREE_CODE (exp))
281 {
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
286
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
290
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
297
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
302
303 exp = TREE_OPERAND (exp, 0);
304 break;
305
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
311 {
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
316
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
323 {
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
329
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
333 }
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
336 {
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
342
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
345 }
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
348 }
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
361
362 default:
363 return align;
364 }
365 }
366 }
367
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
371
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
378
379 The value returned is of type `ssizetype'.
380
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
383
384 tree
385 c_strlen (tree src, int only_value)
386 {
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
391
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
395 {
396 tree len1, len2;
397
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
402 }
403
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
407
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
411
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
414
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
416 {
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
421
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
425
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
432
433 return size_diffop (size_int (max), offset_node);
434 }
435
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
444
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
448 {
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
451 {
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
454 }
455 return NULL_TREE;
456 }
457
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
461
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
465 }
466
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
469
470 static const char *
471 c_getstr (tree src)
472 {
473 tree offset_node;
474
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
478
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
484
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
486 }
487
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
490
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
493 {
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
497
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
499
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
504 {
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
513
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
517 }
518 return immed_double_const (c[0], c[1], mode);
519 }
520
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
524
525 static int
526 target_char_cast (tree cst, char *p)
527 {
528 unsigned HOST_WIDE_INT val, hostval;
529
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
533
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
537
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
541
542 if (val != hostval)
543 return 1;
544
545 *p = hostval;
546 return 0;
547 }
548
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
552
553 static tree
554 builtin_save_expr (tree exp)
555 {
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
560
561 return save_expr (exp);
562 }
563
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
567
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 {
571 int i;
572
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
577
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
582
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
590 {
591 tem = hard_frame_pointer_rtx;
592
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
595 }
596 #endif
597
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
605
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
614
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
617 {
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
626 }
627
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
636
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
646 }
647
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
650
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
654
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
657 {
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
661
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
664
665 buf_addr = convert_memory_address (Pmode, buf_addr);
666
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
668
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
672
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
676
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
679
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
682
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
688
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
694
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
698
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
701 }
702
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
705
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
708 {
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
712
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
716
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
722 {
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
727 }
728
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
731 {
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
735
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
740
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
743 {
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
748 }
749 }
750 #endif
751
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
763
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
768 }
769
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
774
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
777 {
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
783
784 buf_addr = convert_memory_address (Pmode, buf_addr);
785
786 buf_addr = force_reg (Pmode, buf_addr);
787
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
794
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
801 {
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
805
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
811
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
822 {
823 lab = copy_to_reg (lab);
824
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 gen_rtx_SCRATCH (VOIDmode))));
828 emit_insn (gen_rtx_CLOBBER (VOIDmode,
829 gen_rtx_MEM (BLKmode,
830 hard_frame_pointer_rtx)));
831
832 emit_move_insn (hard_frame_pointer_rtx, fp);
833 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
834
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
837 emit_indirect_jump (lab);
838 }
839 }
840
841 /* Search backwards and mark the jump insn as a non-local goto.
842 Note that this precludes the use of __builtin_longjmp to a
843 __builtin_setjmp target in the same function. However, we've
844 already cautioned the user that these functions are for
845 internal exception handling use only. */
846 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
847 {
848 gcc_assert (insn != last);
849
850 if (JUMP_P (insn))
851 {
852 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
853 REG_NOTES (insn));
854 break;
855 }
856 else if (CALL_P (insn))
857 break;
858 }
859 }
860
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
863
864 static rtx
865 expand_builtin_nonlocal_goto (tree exp)
866 {
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
869
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
871 return NULL_RTX;
872
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
875
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 r_fp = gen_rtx_MEM (Pmode, r_save_area);
881 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
882 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
883
884 crtl->has_nonlocal_goto = 1;
885
886 #ifdef HAVE_nonlocal_goto
887 /* ??? We no longer need to pass the static chain value, afaik. */
888 if (HAVE_nonlocal_goto)
889 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
890 else
891 #endif
892 {
893 r_label = copy_to_reg (r_label);
894
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 gen_rtx_SCRATCH (VOIDmode))));
898
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 hard_frame_pointer_rtx)));
902
903 /* Restore frame pointer for containing function.
904 This sets the actual hard register used for the frame pointer
905 to the location of the function's incoming static chain info.
906 The non-local goto handler will then adjust it to contain the
907 proper value and reload the argument pointer, if needed. */
908 emit_move_insn (hard_frame_pointer_rtx, r_fp);
909 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
910
911 /* USE of hard_frame_pointer_rtx added for consistency;
912 not clear if really needed. */
913 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
914 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
915
916 /* If the architecture is using a GP register, we must
917 conservatively assume that the target function makes use of it.
918 The prologue of functions with nonlocal gotos must therefore
919 initialize the GP register to the appropriate value, and we
920 must then make sure that this value is live at the point
921 of the jump. (Note that this doesn't necessarily apply
922 to targets with a nonlocal_goto pattern; they are free
923 to implement it in their own way. Note also that this is
924 a no-op if the GP register is a global invariant.) */
925 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
926 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
927 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
928
929 emit_indirect_jump (r_label);
930 }
931
932 /* Search backwards to the jump insn and mark it as a
933 non-local goto. */
934 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
935 {
936 if (JUMP_P (insn))
937 {
938 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
939 const0_rtx, REG_NOTES (insn));
940 break;
941 }
942 else if (CALL_P (insn))
943 break;
944 }
945
946 return const0_rtx;
947 }
948
949 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
950 (not all will be used on all machines) that was passed to __builtin_setjmp.
951 It updates the stack pointer in that block to correspond to the current
952 stack pointer. */
953
954 static void
955 expand_builtin_update_setjmp_buf (rtx buf_addr)
956 {
957 enum machine_mode sa_mode = Pmode;
958 rtx stack_save;
959
960
961 #ifdef HAVE_save_stack_nonlocal
962 if (HAVE_save_stack_nonlocal)
963 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
964 #endif
965 #ifdef STACK_SAVEAREA_MODE
966 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 #endif
968
969 stack_save
970 = gen_rtx_MEM (sa_mode,
971 memory_address
972 (sa_mode,
973 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
974
975 #ifdef HAVE_setjmp
976 if (HAVE_setjmp)
977 emit_insn (gen_setjmp ());
978 #endif
979
980 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
981 }
982
983 /* Expand a call to __builtin_prefetch. For a target that does not support
984 data prefetch, evaluate the memory address argument in case it has side
985 effects. */
986
987 static void
988 expand_builtin_prefetch (tree exp)
989 {
990 tree arg0, arg1, arg2;
991 int nargs;
992 rtx op0, op1, op2;
993
994 if (!validate_arglist (exp, POINTER_TYPE, 0))
995 return;
996
997 arg0 = CALL_EXPR_ARG (exp, 0);
998
999 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1000 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1001 locality). */
1002 nargs = call_expr_nargs (exp);
1003 if (nargs > 1)
1004 arg1 = CALL_EXPR_ARG (exp, 1);
1005 else
1006 arg1 = integer_zero_node;
1007 if (nargs > 2)
1008 arg2 = CALL_EXPR_ARG (exp, 2);
1009 else
1010 arg2 = build_int_cst (NULL_TREE, 3);
1011
1012 /* Argument 0 is an address. */
1013 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1014
1015 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1016 if (TREE_CODE (arg1) != INTEGER_CST)
1017 {
1018 error ("second argument to %<__builtin_prefetch%> must be a constant");
1019 arg1 = integer_zero_node;
1020 }
1021 op1 = expand_normal (arg1);
1022 /* Argument 1 must be either zero or one. */
1023 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1024 {
1025 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1026 " using zero");
1027 op1 = const0_rtx;
1028 }
1029
1030 /* Argument 2 (locality) must be a compile-time constant int. */
1031 if (TREE_CODE (arg2) != INTEGER_CST)
1032 {
1033 error ("third argument to %<__builtin_prefetch%> must be a constant");
1034 arg2 = integer_zero_node;
1035 }
1036 op2 = expand_normal (arg2);
1037 /* Argument 2 must be 0, 1, 2, or 3. */
1038 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1039 {
1040 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 op2 = const0_rtx;
1042 }
1043
1044 #ifdef HAVE_prefetch
1045 if (HAVE_prefetch)
1046 {
1047 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1048 (op0,
1049 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1050 || (GET_MODE (op0) != Pmode))
1051 {
1052 op0 = convert_memory_address (Pmode, op0);
1053 op0 = force_reg (Pmode, op0);
1054 }
1055 emit_insn (gen_prefetch (op0, op1, op2));
1056 }
1057 #endif
1058
1059 /* Don't do anything with direct references to volatile memory, but
1060 generate code to handle other side effects. */
1061 if (!MEM_P (op0) && side_effects_p (op0))
1062 emit_insn (op0);
1063 }
1064
1065 /* Get a MEM rtx for expression EXP which is the address of an operand
1066 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1067 the maximum length of the block of memory that might be accessed or
1068 NULL if unknown. */
1069
1070 static rtx
1071 get_memory_rtx (tree exp, tree len)
1072 {
1073 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1074 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1075
1076 /* Get an expression we can use to find the attributes to assign to MEM.
1077 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1078 we can. First remove any nops. */
1079 while (CONVERT_EXPR_P (exp)
1080 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1081 exp = TREE_OPERAND (exp, 0);
1082
1083 if (TREE_CODE (exp) == ADDR_EXPR)
1084 exp = TREE_OPERAND (exp, 0);
1085 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1086 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1087 else
1088 exp = NULL;
1089
1090 /* Honor attributes derived from exp, except for the alias set
1091 (as builtin stringops may alias with anything) and the size
1092 (as stringops may access multiple array elements). */
1093 if (exp)
1094 {
1095 set_mem_attributes (mem, exp, 0);
1096
1097 /* Allow the string and memory builtins to overflow from one
1098 field into another, see http://gcc.gnu.org/PR23561.
1099 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1100 memory accessed by the string or memory builtin will fit
1101 within the field. */
1102 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1103 {
1104 tree mem_expr = MEM_EXPR (mem);
1105 HOST_WIDE_INT offset = -1, length = -1;
1106 tree inner = exp;
1107
1108 while (TREE_CODE (inner) == ARRAY_REF
1109 || CONVERT_EXPR_P (inner)
1110 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner) == SAVE_EXPR)
1112 inner = TREE_OPERAND (inner, 0);
1113
1114 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1115
1116 if (MEM_OFFSET (mem)
1117 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1118 offset = INTVAL (MEM_OFFSET (mem));
1119
1120 if (offset >= 0 && len && host_integerp (len, 0))
1121 length = tree_low_cst (len, 0);
1122
1123 while (TREE_CODE (inner) == COMPONENT_REF)
1124 {
1125 tree field = TREE_OPERAND (inner, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field));
1127 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1128 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1129
1130 if (length >= 0
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1133 {
1134 HOST_WIDE_INT size
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1139 if (offset <= size
1140 && length <= size
1141 && offset + length <= size)
1142 break;
1143 }
1144
1145 if (offset >= 0
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 / BITS_PER_UNIT;
1150 else
1151 {
1152 offset = -1;
1153 length = -1;
1154 }
1155
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1158 }
1159
1160 if (mem_expr == NULL)
1161 offset = -1;
1162 if (mem_expr != MEM_EXPR (mem))
1163 {
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1166 }
1167 }
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1170 }
1171
1172 return mem;
1173 }
1174 \f
1175 /* Built-in functions to perform an untyped call and return. */
1176
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1183
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1190
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1196
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1199
1200 static int
1201 apply_args_size (void)
1202 {
1203 static int size = -1;
1204 int align;
1205 unsigned int regno;
1206 enum machine_mode mode;
1207
1208 /* The values computed by this function never change. */
1209 if (size < 0)
1210 {
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1213
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1218
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1221 {
1222 mode = reg_raw_mode[regno];
1223
1224 gcc_assert (mode != VOIDmode);
1225
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1232 }
1233 else
1234 {
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1237 }
1238 }
1239 return size;
1240 }
1241
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1244
1245 static int
1246 apply_result_size (void)
1247 {
1248 static int size = -1;
1249 int align, regno;
1250 enum machine_mode mode;
1251
1252 /* The values computed by this function never change. */
1253 if (size < 0)
1254 {
1255 size = 0;
1256
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1259 {
1260 mode = reg_raw_mode[regno];
1261
1262 gcc_assert (mode != VOIDmode);
1263
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1269 }
1270 else
1271 apply_result_mode[regno] = VOIDmode;
1272
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1277 #endif
1278 }
1279 return size;
1280 }
1281
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1286
1287 static rtx
1288 result_vector (int savep, rtx result)
1289 {
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1292 rtx reg, mem;
1293 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1294
1295 size = nelts = 0;
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1298 {
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1308 }
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1310 }
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1312
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1315
1316 static rtx
1317 expand_builtin_apply_args_1 (void)
1318 {
1319 rtx registers, tem;
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1323
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1327
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1332
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1336 {
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1340
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1342
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1345 }
1346
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1353 tem
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1355 NULL_RTX);
1356 #endif
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1358
1359 size = GET_MODE_SIZE (Pmode);
1360
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1364 {
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1368 }
1369
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1372 }
1373
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1379 saved. */
1380
1381 static rtx
1382 expand_builtin_apply_args (void)
1383 {
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1388 {
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1392 rtx temp;
1393 rtx seq;
1394
1395 start_sequence ();
1396 temp = expand_builtin_apply_args_1 ();
1397 seq = get_insns ();
1398 end_sequence ();
1399
1400 apply_args_value = temp;
1401
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1405 function. */
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1409 return temp;
1410 }
1411 }
1412
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1415
1416 static rtx
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1418 {
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1425
1426 arguments = convert_memory_address (Pmode, arguments);
1427
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1430
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1437 #endif
1438
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1441 manipulations. */
1442 do_pending_stack_adjust ();
1443 NO_DEFER_POP;
1444
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1449 else
1450 #endif
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1452
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1460 else
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1462 #endif
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1468
1469 /* Refer to the argument block. */
1470 apply_args_size ();
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1473
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1476 if (struct_value)
1477 size += GET_MODE_SIZE (Pmode);
1478
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1483 {
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1491 }
1492
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (struct_value)
1497 {
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1504 }
1505
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1508
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1514
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1520 else
1521 #endif
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1524 {
1525 rtx valreg = 0;
1526
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1533 {
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1535
1536 valreg = gen_rtx_REG (mode, regno);
1537 }
1538
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1542
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1544 }
1545 else
1546 #endif
1547 gcc_unreachable ();
1548
1549 /* Find the CALL insn we just emitted, and attach the register usage
1550 information. */
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1553
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1558 else
1559 #endif
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1561
1562 OK_DEFER_POP;
1563
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1567 }
1568
1569 /* Perform an untyped return. */
1570
1571 static void
1572 expand_builtin_return (rtx result)
1573 {
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx reg;
1577 rtx call_fusage = 0;
1578
1579 result = convert_memory_address (Pmode, result);
1580
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1583
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1586 {
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1588 emit_barrier ();
1589 return;
1590 }
1591 #endif
1592
1593 /* Restore the return value and note that each value is used. */
1594 size = 0;
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1597 {
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1603
1604 push_to_sequence (call_fusage);
1605 emit_insn (gen_rtx_USE (VOIDmode, reg));
1606 call_fusage = get_insns ();
1607 end_sequence ();
1608 size += GET_MODE_SIZE (mode);
1609 }
1610
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1613
1614 /* Return whatever values was restored by jumping directly to the end
1615 of the function. */
1616 expand_naked_return ();
1617 }
1618
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1620
1621 static enum type_class
1622 type_to_class (tree type)
1623 {
1624 switch (TREE_CODE (type))
1625 {
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1638 case UNION_TYPE:
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1644 }
1645 }
1646
1647 /* Expand a call EXP to __builtin_classify_type. */
1648
1649 static rtx
1650 expand_builtin_classify_type (tree exp)
1651 {
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1655 }
1656
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1670
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1675
1676 static tree
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1678 {
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1682
1683 switch (fn)
1684 {
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1768
1769 default:
1770 return NULL_TREE;
1771 }
1772
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1779 else
1780 return NULL_TREE;
1781 }
1782
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1784
1785 tree
1786 mathfn_built_in (tree type, enum built_in_function fn)
1787 {
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1789 }
1790
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1793 errno to EDOM. */
1794
1795 static void
1796 expand_errno_check (tree exp, rtx target)
1797 {
1798 rtx lab = gen_label_rtx ();
1799
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1803 0, lab);
1804
1805 #ifdef TARGET_EDOM
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1808 {
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1811 #else
1812 rtx errno_rtx
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1814 #endif
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1816 emit_label (lab);
1817 return;
1818 }
1819 #endif
1820
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1823
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1826 NO_DEFER_POP;
1827 expand_call (exp, target, 0);
1828 OK_DEFER_POP;
1829 emit_label (lab);
1830 }
1831
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1837
1838 static rtx
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1840 {
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1846 tree arg;
1847
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1849 return NULL_RTX;
1850
1851 arg = CALL_EXPR_ARG (exp, 0);
1852
1853 switch (DECL_FUNCTION_CODE (fndecl))
1854 {
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1858 break;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1897 break;
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1901 default:
1902 gcc_unreachable ();
1903 }
1904
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1907
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1909 errno_set = false;
1910
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1913 {
1914 target = gen_reg_rtx (mode);
1915
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1920
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1922
1923 start_sequence ();
1924
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1928
1929 if (target != 0)
1930 {
1931 if (errno_set)
1932 expand_errno_check (exp, target);
1933
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1936 end_sequence ();
1937 emit_insn (insns);
1938 return target;
1939 }
1940
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1944 end_sequence ();
1945 }
1946
1947 before_call = get_last_insn ();
1948
1949 target = expand_call (exp, target, target == const0_rtx);
1950
1951 /* If this is a sqrt operation and we don't care about errno, try to
1952 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1953 This allows the semantics of the libcall to be visible to the RTL
1954 optimizers. */
1955 if (builtin_optab == sqrt_optab && !errno_set)
1956 {
1957 /* Search backwards through the insns emitted by expand_call looking
1958 for the instruction with the REG_RETVAL note. */
1959 rtx last = get_last_insn ();
1960 while (last != before_call)
1961 {
1962 if (find_reg_note (last, REG_RETVAL, NULL))
1963 {
1964 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1965 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1966 two elements, i.e. symbol_ref(sqrt) and the operand. */
1967 if (note
1968 && GET_CODE (note) == EXPR_LIST
1969 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1970 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1971 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1972 {
1973 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1974 /* Check operand is a register with expected mode. */
1975 if (operand
1976 && REG_P (operand)
1977 && GET_MODE (operand) == mode)
1978 {
1979 /* Replace the REG_EQUAL note with a SQRT rtx. */
1980 rtx equiv = gen_rtx_SQRT (mode, operand);
1981 set_unique_reg_note (last, REG_EQUAL, equiv);
1982 }
1983 }
1984 break;
1985 }
1986 last = PREV_INSN (last);
1987 }
1988 }
1989
1990 return target;
1991 }
1992
1993 /* Expand a call to the builtin binary math functions (pow and atan2).
1994 Return NULL_RTX if a normal call should be emitted rather than expanding the
1995 function in-line. EXP is the expression that is a call to the builtin
1996 function; if convenient, the result should be placed in TARGET.
1997 SUBTARGET may be used as the target for computing one of EXP's
1998 operands. */
1999
2000 static rtx
2001 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2002 {
2003 optab builtin_optab;
2004 rtx op0, op1, insns;
2005 int op1_type = REAL_TYPE;
2006 tree fndecl = get_callee_fndecl (exp);
2007 tree arg0, arg1;
2008 enum machine_mode mode;
2009 bool errno_set = true;
2010
2011 switch (DECL_FUNCTION_CODE (fndecl))
2012 {
2013 CASE_FLT_FN (BUILT_IN_SCALBN):
2014 CASE_FLT_FN (BUILT_IN_SCALBLN):
2015 CASE_FLT_FN (BUILT_IN_LDEXP):
2016 op1_type = INTEGER_TYPE;
2017 default:
2018 break;
2019 }
2020
2021 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2022 return NULL_RTX;
2023
2024 arg0 = CALL_EXPR_ARG (exp, 0);
2025 arg1 = CALL_EXPR_ARG (exp, 1);
2026
2027 switch (DECL_FUNCTION_CODE (fndecl))
2028 {
2029 CASE_FLT_FN (BUILT_IN_POW):
2030 builtin_optab = pow_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN2):
2032 builtin_optab = atan2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_SCALB):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2035 return 0;
2036 builtin_optab = scalb_optab; break;
2037 CASE_FLT_FN (BUILT_IN_SCALBN):
2038 CASE_FLT_FN (BUILT_IN_SCALBLN):
2039 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2040 return 0;
2041 /* Fall through... */
2042 CASE_FLT_FN (BUILT_IN_LDEXP):
2043 builtin_optab = ldexp_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FMOD):
2045 builtin_optab = fmod_optab; break;
2046 CASE_FLT_FN (BUILT_IN_REMAINDER):
2047 CASE_FLT_FN (BUILT_IN_DREM):
2048 builtin_optab = remainder_optab; break;
2049 default:
2050 gcc_unreachable ();
2051 }
2052
2053 /* Make a suitable register to place result in. */
2054 mode = TYPE_MODE (TREE_TYPE (exp));
2055
2056 /* Before working hard, check whether the instruction is available. */
2057 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2058 return NULL_RTX;
2059
2060 target = gen_reg_rtx (mode);
2061
2062 if (! flag_errno_math || ! HONOR_NANS (mode))
2063 errno_set = false;
2064
2065 /* Always stabilize the argument list. */
2066 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2067 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2068
2069 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2070 op1 = expand_normal (arg1);
2071
2072 start_sequence ();
2073
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target = expand_binop (mode, builtin_optab, op0, op1,
2077 target, 0, OPTAB_DIRECT);
2078
2079 /* If we were unable to expand via the builtin, stop the sequence
2080 (without outputting the insns) and call to the library function
2081 with the stabilized argument list. */
2082 if (target == 0)
2083 {
2084 end_sequence ();
2085 return expand_call (exp, target, target == const0_rtx);
2086 }
2087
2088 if (errno_set)
2089 expand_errno_check (exp, target);
2090
2091 /* Output the entire sequence. */
2092 insns = get_insns ();
2093 end_sequence ();
2094 emit_insn (insns);
2095
2096 return target;
2097 }
2098
2099 /* Expand a call to the builtin sin and cos math functions.
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2104 operands. */
2105
2106 static rtx
2107 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2108 {
2109 optab builtin_optab;
2110 rtx op0, insns;
2111 tree fndecl = get_callee_fndecl (exp);
2112 enum machine_mode mode;
2113 tree arg;
2114
2115 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2116 return NULL_RTX;
2117
2118 arg = CALL_EXPR_ARG (exp, 0);
2119
2120 switch (DECL_FUNCTION_CODE (fndecl))
2121 {
2122 CASE_FLT_FN (BUILT_IN_SIN):
2123 CASE_FLT_FN (BUILT_IN_COS):
2124 builtin_optab = sincos_optab; break;
2125 default:
2126 gcc_unreachable ();
2127 }
2128
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2131
2132 /* Check if sincos insn is available, otherwise fallback
2133 to sin or cos insn. */
2134 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2135 switch (DECL_FUNCTION_CODE (fndecl))
2136 {
2137 CASE_FLT_FN (BUILT_IN_SIN):
2138 builtin_optab = sin_optab; break;
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 builtin_optab = cos_optab; break;
2141 default:
2142 gcc_unreachable ();
2143 }
2144
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2147 {
2148 target = gen_reg_rtx (mode);
2149
2150 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2151 need to expand the argument again. This way, we will not perform
2152 side-effects more the once. */
2153 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2154
2155 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2156
2157 start_sequence ();
2158
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab == sincos_optab)
2162 {
2163 int result;
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2169 break;
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2172 break;
2173 default:
2174 gcc_unreachable ();
2175 }
2176 gcc_assert (result);
2177 }
2178 else
2179 {
2180 target = expand_unop (mode, builtin_optab, op0, target, 0);
2181 }
2182
2183 if (target != 0)
2184 {
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2187 end_sequence ();
2188 emit_insn (insns);
2189 return target;
2190 }
2191
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2195 end_sequence ();
2196 }
2197
2198 target = expand_call (exp, target, target == const0_rtx);
2199
2200 return target;
2201 }
2202
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2205 isnan, etc).
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2210
2211 static rtx
2212 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2213 {
2214 optab builtin_optab = 0;
2215 enum insn_code icode = CODE_FOR_nothing;
2216 rtx op0;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 bool errno_set = false;
2220 tree arg;
2221
2222 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX;
2224
2225 arg = CALL_EXPR_ARG (exp, 0);
2226
2227 switch (DECL_FUNCTION_CODE (fndecl))
2228 {
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 /* These builtins have no optabs (yet). */
2237 break;
2238 default:
2239 gcc_unreachable ();
2240 }
2241
2242 /* There's no easy way to detect the case we need to set EDOM. */
2243 if (flag_errno_math && errno_set)
2244 return NULL_RTX;
2245
2246 /* Optab mode depends on the mode of the input argument. */
2247 mode = TYPE_MODE (TREE_TYPE (arg));
2248
2249 if (builtin_optab)
2250 icode = optab_handler (builtin_optab, mode)->insn_code;
2251
2252 /* Before working hard, check whether the instruction is available. */
2253 if (icode != CODE_FOR_nothing)
2254 {
2255 /* Make a suitable register to place result in. */
2256 if (!target
2257 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2258 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2259
2260 gcc_assert (insn_data[icode].operand[0].predicate
2261 (target, GET_MODE (target)));
2262
2263 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2264 need to expand the argument again. This way, we will not perform
2265 side-effects more the once. */
2266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2267
2268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2269
2270 if (mode != GET_MODE (op0))
2271 op0 = convert_to_mode (mode, op0, 0);
2272
2273 /* Compute into TARGET.
2274 Set TARGET to wherever the result comes back. */
2275 emit_unop_insn (icode, target, op0, UNKNOWN);
2276 return target;
2277 }
2278
2279 /* If there is no optab, try generic code. */
2280 switch (DECL_FUNCTION_CODE (fndecl))
2281 {
2282 tree result;
2283
2284 CASE_FLT_FN (BUILT_IN_ISINF):
2285 {
2286 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2287 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2288 tree const type = TREE_TYPE (arg);
2289 REAL_VALUE_TYPE r;
2290 char buf[128];
2291
2292 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2293 real_from_string (&r, buf);
2294 result = build_call_expr (isgr_fn, 2,
2295 fold_build1 (ABS_EXPR, type, arg),
2296 build_real (type, r));
2297 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 }
2299 CASE_FLT_FN (BUILT_IN_FINITE):
2300 case BUILT_IN_ISFINITE:
2301 {
2302 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2303 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2304 tree const type = TREE_TYPE (arg);
2305 REAL_VALUE_TYPE r;
2306 char buf[128];
2307
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&r, buf);
2310 result = build_call_expr (isle_fn, 2,
2311 fold_build1 (ABS_EXPR, type, arg),
2312 build_real (type, r));
2313 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2314 }
2315 case BUILT_IN_ISNORMAL:
2316 {
2317 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2318 islessequal(fabs(x),DBL_MAX). */
2319 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2320 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2321 tree const type = TREE_TYPE (arg);
2322 REAL_VALUE_TYPE rmax, rmin;
2323 char buf[128];
2324
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&rmax, buf);
2327 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2328 real_from_string (&rmin, buf);
2329 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2330 result = build_call_expr (isle_fn, 2, arg,
2331 build_real (type, rmax));
2332 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2333 build_call_expr (isge_fn, 2, arg,
2334 build_real (type, rmin)));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2336 }
2337 default:
2338 break;
2339 }
2340
2341 target = expand_call (exp, target, target == const0_rtx);
2342
2343 return target;
2344 }
2345
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2349 function. */
2350
2351 static rtx
2352 expand_builtin_sincos (tree exp)
2353 {
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2357 int result;
2358
2359 if (!validate_arglist (exp, REAL_TYPE,
2360 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2361 return NULL_RTX;
2362
2363 arg = CALL_EXPR_ARG (exp, 0);
2364 sinp = CALL_EXPR_ARG (exp, 1);
2365 cosp = CALL_EXPR_ARG (exp, 2);
2366
2367 /* Make a suitable register to place result in. */
2368 mode = TYPE_MODE (TREE_TYPE (arg));
2369
2370 /* Check if sincos insn is available, otherwise emit the call. */
2371 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2372 return NULL_RTX;
2373
2374 target1 = gen_reg_rtx (mode);
2375 target2 = gen_reg_rtx (mode);
2376
2377 op0 = expand_normal (arg);
2378 op1 = expand_normal (build_fold_indirect_ref (sinp));
2379 op2 = expand_normal (build_fold_indirect_ref (cosp));
2380
2381 /* Compute into target1 and target2.
2382 Set TARGET to wherever the result comes back. */
2383 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2384 gcc_assert (result);
2385
2386 /* Move target1 and target2 to the memory locations indicated
2387 by op1 and op2. */
2388 emit_move_insn (op1, target1);
2389 emit_move_insn (op2, target2);
2390
2391 return const0_rtx;
2392 }
2393
2394 /* Expand a call to the internal cexpi builtin to the sincos math function.
2395 EXP is the expression that is a call to the builtin function; if convenient,
2396 the result should be placed in TARGET. SUBTARGET may be used as the target
2397 for computing one of EXP's operands. */
2398
2399 static rtx
2400 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2401 {
2402 tree fndecl = get_callee_fndecl (exp);
2403 tree arg, type;
2404 enum machine_mode mode;
2405 rtx op0, op1, op2;
2406
2407 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2408 return NULL_RTX;
2409
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 type = TREE_TYPE (arg);
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2413
2414 /* Try expanding via a sincos optab, fall back to emitting a libcall
2415 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2416 is only generated from sincos, cexp or if we have either of them. */
2417 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2418 {
2419 op1 = gen_reg_rtx (mode);
2420 op2 = gen_reg_rtx (mode);
2421
2422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2423
2424 /* Compute into op1 and op2. */
2425 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2426 }
2427 else if (TARGET_HAS_SINCOS)
2428 {
2429 tree call, fn = NULL_TREE;
2430 tree top1, top2;
2431 rtx op1a, op2a;
2432
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = built_in_decls[BUILT_IN_SINCOSF];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = built_in_decls[BUILT_IN_SINCOS];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = built_in_decls[BUILT_IN_SINCOSL];
2439 else
2440 gcc_unreachable ();
2441
2442 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2444 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2445 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2446 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2447 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2448
2449 /* Make sure not to fold the sincos call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2452 call, 3, arg, top1, top2));
2453 }
2454 else
2455 {
2456 tree call, fn = NULL_TREE, narg;
2457 tree ctype = build_complex_type (type);
2458
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 fn = built_in_decls[BUILT_IN_CEXPF];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 fn = built_in_decls[BUILT_IN_CEXP];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 fn = built_in_decls[BUILT_IN_CEXPL];
2465 else
2466 gcc_unreachable ();
2467
2468 /* If we don't have a decl for cexp create one. This is the
2469 friendliest fallback if the user calls __builtin_cexpi
2470 without full target C99 function support. */
2471 if (fn == NULL_TREE)
2472 {
2473 tree fntype;
2474 const char *name = NULL;
2475
2476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2477 name = "cexpf";
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2479 name = "cexp";
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2481 name = "cexpl";
2482
2483 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2484 fn = build_fn_decl (name, fntype);
2485 }
2486
2487 narg = fold_build2 (COMPLEX_EXPR, ctype,
2488 build_real (type, dconst0), arg);
2489
2490 /* Make sure not to fold the cexp call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 return expand_expr (build_call_nary (ctype, call, 1, narg),
2493 target, VOIDmode, EXPAND_NORMAL);
2494 }
2495
2496 /* Now build the proper return type. */
2497 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2498 make_tree (TREE_TYPE (arg), op2),
2499 make_tree (TREE_TYPE (arg), op1)),
2500 target, VOIDmode, EXPAND_NORMAL);
2501 }
2502
2503 /* Expand a call to one of the builtin rounding functions gcc defines
2504 as an extension (lfloor and lceil). As these are gcc extensions we
2505 do not need to worry about setting errno to EDOM.
2506 If expanding via optab fails, lower expression to (int)(floor(x)).
2507 EXP is the expression that is a call to the builtin function;
2508 if convenient, the result should be placed in TARGET. SUBTARGET may
2509 be used as the target for computing one of EXP's operands. */
2510
2511 static rtx
2512 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2513 {
2514 convert_optab builtin_optab;
2515 rtx op0, insns, tmp;
2516 tree fndecl = get_callee_fndecl (exp);
2517 enum built_in_function fallback_fn;
2518 tree fallback_fndecl;
2519 enum machine_mode mode;
2520 tree arg;
2521
2522 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2523 gcc_unreachable ();
2524
2525 arg = CALL_EXPR_ARG (exp, 0);
2526
2527 switch (DECL_FUNCTION_CODE (fndecl))
2528 {
2529 CASE_FLT_FN (BUILT_IN_LCEIL):
2530 CASE_FLT_FN (BUILT_IN_LLCEIL):
2531 builtin_optab = lceil_optab;
2532 fallback_fn = BUILT_IN_CEIL;
2533 break;
2534
2535 CASE_FLT_FN (BUILT_IN_LFLOOR):
2536 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2537 builtin_optab = lfloor_optab;
2538 fallback_fn = BUILT_IN_FLOOR;
2539 break;
2540
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 /* Make a suitable register to place result in. */
2546 mode = TYPE_MODE (TREE_TYPE (exp));
2547
2548 target = gen_reg_rtx (mode);
2549
2550 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2551 need to expand the argument again. This way, we will not perform
2552 side-effects more the once. */
2553 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2554
2555 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2556
2557 start_sequence ();
2558
2559 /* Compute into TARGET. */
2560 if (expand_sfix_optab (target, op0, builtin_optab))
2561 {
2562 /* Output the entire sequence. */
2563 insns = get_insns ();
2564 end_sequence ();
2565 emit_insn (insns);
2566 return target;
2567 }
2568
2569 /* If we were unable to expand via the builtin, stop the sequence
2570 (without outputting the insns). */
2571 end_sequence ();
2572
2573 /* Fall back to floating point rounding optab. */
2574 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2575
2576 /* For non-C99 targets we may end up without a fallback fndecl here
2577 if the user called __builtin_lfloor directly. In this case emit
2578 a call to the floor/ceil variants nevertheless. This should result
2579 in the best user experience for not full C99 targets. */
2580 if (fallback_fndecl == NULL_TREE)
2581 {
2582 tree fntype;
2583 const char *name = NULL;
2584
2585 switch (DECL_FUNCTION_CODE (fndecl))
2586 {
2587 case BUILT_IN_LCEIL:
2588 case BUILT_IN_LLCEIL:
2589 name = "ceil";
2590 break;
2591 case BUILT_IN_LCEILF:
2592 case BUILT_IN_LLCEILF:
2593 name = "ceilf";
2594 break;
2595 case BUILT_IN_LCEILL:
2596 case BUILT_IN_LLCEILL:
2597 name = "ceill";
2598 break;
2599 case BUILT_IN_LFLOOR:
2600 case BUILT_IN_LLFLOOR:
2601 name = "floor";
2602 break;
2603 case BUILT_IN_LFLOORF:
2604 case BUILT_IN_LLFLOORF:
2605 name = "floorf";
2606 break;
2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2609 name = "floorl";
2610 break;
2611 default:
2612 gcc_unreachable ();
2613 }
2614
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2618 }
2619
2620 exp = build_call_expr (fallback_fndecl, 1, arg);
2621
2622 tmp = expand_normal (exp);
2623
2624 /* Truncate the result of floating point optab to integer
2625 via expand_fix (). */
2626 target = gen_reg_rtx (mode);
2627 expand_fix (target, tmp, 0);
2628
2629 return target;
2630 }
2631
2632 /* Expand a call to one of the builtin math functions doing integer
2633 conversion (lrint).
2634 Return 0 if a normal call should be emitted rather than expanding the
2635 function in-line. EXP is the expression that is a call to the builtin
2636 function; if convenient, the result should be placed in TARGET.
2637 SUBTARGET may be used as the target for computing one of EXP's operands. */
2638
2639 static rtx
2640 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2641 {
2642 convert_optab builtin_optab;
2643 rtx op0, insns;
2644 tree fndecl = get_callee_fndecl (exp);
2645 tree arg;
2646 enum machine_mode mode;
2647
2648 /* There's no easy way to detect the case we need to set EDOM. */
2649 if (flag_errno_math)
2650 return NULL_RTX;
2651
2652 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2653 gcc_unreachable ();
2654
2655 arg = CALL_EXPR_ARG (exp, 0);
2656
2657 switch (DECL_FUNCTION_CODE (fndecl))
2658 {
2659 CASE_FLT_FN (BUILT_IN_LRINT):
2660 CASE_FLT_FN (BUILT_IN_LLRINT):
2661 builtin_optab = lrint_optab; break;
2662 CASE_FLT_FN (BUILT_IN_LROUND):
2663 CASE_FLT_FN (BUILT_IN_LLROUND):
2664 builtin_optab = lround_optab; break;
2665 default:
2666 gcc_unreachable ();
2667 }
2668
2669 /* Make a suitable register to place result in. */
2670 mode = TYPE_MODE (TREE_TYPE (exp));
2671
2672 target = gen_reg_rtx (mode);
2673
2674 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2675 need to expand the argument again. This way, we will not perform
2676 side-effects more the once. */
2677 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2678
2679 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2680
2681 start_sequence ();
2682
2683 if (expand_sfix_optab (target, op0, builtin_optab))
2684 {
2685 /* Output the entire sequence. */
2686 insns = get_insns ();
2687 end_sequence ();
2688 emit_insn (insns);
2689 return target;
2690 }
2691
2692 /* If we were unable to expand via the builtin, stop the sequence
2693 (without outputting the insns) and call to the library function
2694 with the stabilized argument list. */
2695 end_sequence ();
2696
2697 target = expand_call (exp, target, target == const0_rtx);
2698
2699 return target;
2700 }
2701
2702 /* To evaluate powi(x,n), the floating point value x raised to the
2703 constant integer exponent n, we use a hybrid algorithm that
2704 combines the "window method" with look-up tables. For an
2705 introduction to exponentiation algorithms and "addition chains",
2706 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2707 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2708 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2709 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2710
2711 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2712 multiplications to inline before calling the system library's pow
2713 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2714 so this default never requires calling pow, powf or powl. */
2715
2716 #ifndef POWI_MAX_MULTS
2717 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2718 #endif
2719
2720 /* The size of the "optimal power tree" lookup table. All
2721 exponents less than this value are simply looked up in the
2722 powi_table below. This threshold is also used to size the
2723 cache of pseudo registers that hold intermediate results. */
2724 #define POWI_TABLE_SIZE 256
2725
2726 /* The size, in bits of the window, used in the "window method"
2727 exponentiation algorithm. This is equivalent to a radix of
2728 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2729 #define POWI_WINDOW_SIZE 3
2730
2731 /* The following table is an efficient representation of an
2732 "optimal power tree". For each value, i, the corresponding
2733 value, j, in the table states than an optimal evaluation
2734 sequence for calculating pow(x,i) can be found by evaluating
2735 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2736 100 integers is given in Knuth's "Seminumerical algorithms". */
2737
2738 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2739 {
2740 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2741 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2742 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2743 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2744 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2745 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2746 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2747 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2748 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2749 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2750 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2751 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2752 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2753 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2754 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2755 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2756 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2757 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2758 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2759 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2760 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2761 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2762 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2763 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2764 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2765 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2766 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2767 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2768 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2769 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2770 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2771 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2772 };
2773
2774
2775 /* Return the number of multiplications required to calculate
2776 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2777 subroutine of powi_cost. CACHE is an array indicating
2778 which exponents have already been calculated. */
2779
2780 static int
2781 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2782 {
2783 /* If we've already calculated this exponent, then this evaluation
2784 doesn't require any additional multiplications. */
2785 if (cache[n])
2786 return 0;
2787
2788 cache[n] = true;
2789 return powi_lookup_cost (n - powi_table[n], cache)
2790 + powi_lookup_cost (powi_table[n], cache) + 1;
2791 }
2792
2793 /* Return the number of multiplications required to calculate
2794 powi(x,n) for an arbitrary x, given the exponent N. This
2795 function needs to be kept in sync with expand_powi below. */
2796
2797 static int
2798 powi_cost (HOST_WIDE_INT n)
2799 {
2800 bool cache[POWI_TABLE_SIZE];
2801 unsigned HOST_WIDE_INT digit;
2802 unsigned HOST_WIDE_INT val;
2803 int result;
2804
2805 if (n == 0)
2806 return 0;
2807
2808 /* Ignore the reciprocal when calculating the cost. */
2809 val = (n < 0) ? -n : n;
2810
2811 /* Initialize the exponent cache. */
2812 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2813 cache[1] = true;
2814
2815 result = 0;
2816
2817 while (val >= POWI_TABLE_SIZE)
2818 {
2819 if (val & 1)
2820 {
2821 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2822 result += powi_lookup_cost (digit, cache)
2823 + POWI_WINDOW_SIZE + 1;
2824 val >>= POWI_WINDOW_SIZE;
2825 }
2826 else
2827 {
2828 val >>= 1;
2829 result++;
2830 }
2831 }
2832
2833 return result + powi_lookup_cost (val, cache);
2834 }
2835
2836 /* Recursive subroutine of expand_powi. This function takes the array,
2837 CACHE, of already calculated exponents and an exponent N and returns
2838 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2839
2840 static rtx
2841 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2842 {
2843 unsigned HOST_WIDE_INT digit;
2844 rtx target, result;
2845 rtx op0, op1;
2846
2847 if (n < POWI_TABLE_SIZE)
2848 {
2849 if (cache[n])
2850 return cache[n];
2851
2852 target = gen_reg_rtx (mode);
2853 cache[n] = target;
2854
2855 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2856 op1 = expand_powi_1 (mode, powi_table[n], cache);
2857 }
2858 else if (n & 1)
2859 {
2860 target = gen_reg_rtx (mode);
2861 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2862 op0 = expand_powi_1 (mode, n - digit, cache);
2863 op1 = expand_powi_1 (mode, digit, cache);
2864 }
2865 else
2866 {
2867 target = gen_reg_rtx (mode);
2868 op0 = expand_powi_1 (mode, n >> 1, cache);
2869 op1 = op0;
2870 }
2871
2872 result = expand_mult (mode, op0, op1, target, 0);
2873 if (result != target)
2874 emit_move_insn (target, result);
2875 return target;
2876 }
2877
2878 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2879 floating point operand in mode MODE, and N is the exponent. This
2880 function needs to be kept in sync with powi_cost above. */
2881
2882 static rtx
2883 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2884 {
2885 unsigned HOST_WIDE_INT val;
2886 rtx cache[POWI_TABLE_SIZE];
2887 rtx result;
2888
2889 if (n == 0)
2890 return CONST1_RTX (mode);
2891
2892 val = (n < 0) ? -n : n;
2893
2894 memset (cache, 0, sizeof (cache));
2895 cache[1] = x;
2896
2897 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2898
2899 /* If the original exponent was negative, reciprocate the result. */
2900 if (n < 0)
2901 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2902 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2903
2904 return result;
2905 }
2906
2907 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2911
2912 static rtx
2913 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2914 {
2915 tree arg0, arg1;
2916 tree fn, narg0;
2917 tree type = TREE_TYPE (exp);
2918 REAL_VALUE_TYPE cint, c, c2;
2919 HOST_WIDE_INT n;
2920 rtx op, op2;
2921 enum machine_mode mode = TYPE_MODE (type);
2922
2923 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2924 return NULL_RTX;
2925
2926 arg0 = CALL_EXPR_ARG (exp, 0);
2927 arg1 = CALL_EXPR_ARG (exp, 1);
2928
2929 if (TREE_CODE (arg1) != REAL_CST
2930 || TREE_OVERFLOW (arg1))
2931 return expand_builtin_mathfn_2 (exp, target, subtarget);
2932
2933 /* Handle constant exponents. */
2934
2935 /* For integer valued exponents we can expand to an optimal multiplication
2936 sequence using expand_powi. */
2937 c = TREE_REAL_CST (arg1);
2938 n = real_to_integer (&c);
2939 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2940 if (real_identical (&c, &cint)
2941 && ((n >= -1 && n <= 2)
2942 || (flag_unsafe_math_optimizations
2943 && !optimize_size
2944 && powi_cost (n) <= POWI_MAX_MULTS)))
2945 {
2946 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2947 if (n != 1)
2948 {
2949 op = force_reg (mode, op);
2950 op = expand_powi (op, mode, n);
2951 }
2952 return op;
2953 }
2954
2955 narg0 = builtin_save_expr (arg0);
2956
2957 /* If the exponent is not integer valued, check if it is half of an integer.
2958 In this case we can expand to sqrt (x) * x**(n/2). */
2959 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2960 if (fn != NULL_TREE)
2961 {
2962 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2963 n = real_to_integer (&c2);
2964 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2965 if (real_identical (&c2, &cint)
2966 && ((flag_unsafe_math_optimizations
2967 && !optimize_size
2968 && powi_cost (n/2) <= POWI_MAX_MULTS)
2969 || n == 1))
2970 {
2971 tree call_expr = build_call_expr (fn, 1, narg0);
2972 /* Use expand_expr in case the newly built call expression
2973 was folded to a non-call. */
2974 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2975 if (n != 1)
2976 {
2977 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2978 op2 = force_reg (mode, op2);
2979 op2 = expand_powi (op2, mode, abs (n / 2));
2980 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2981 0, OPTAB_LIB_WIDEN);
2982 /* If the original exponent was negative, reciprocate the
2983 result. */
2984 if (n < 0)
2985 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2986 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2987 }
2988 return op;
2989 }
2990 }
2991
2992 /* Try if the exponent is a third of an integer. In this case
2993 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2994 different from pow (x, 1./3.) due to rounding and behavior
2995 with negative x we need to constrain this transformation to
2996 unsafe math and positive x or finite math. */
2997 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2998 if (fn != NULL_TREE
2999 && flag_unsafe_math_optimizations
3000 && (tree_expr_nonnegative_p (arg0)
3001 || !HONOR_NANS (mode)))
3002 {
3003 REAL_VALUE_TYPE dconst3;
3004 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3005 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3006 real_round (&c2, mode, &c2);
3007 n = real_to_integer (&c2);
3008 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3009 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3010 real_convert (&c2, mode, &c2);
3011 if (real_identical (&c2, &c)
3012 && ((!optimize_size
3013 && powi_cost (n/3) <= POWI_MAX_MULTS)
3014 || n == 1))
3015 {
3016 tree call_expr = build_call_expr (fn, 1,narg0);
3017 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3018 if (abs (n) % 3 == 2)
3019 op = expand_simple_binop (mode, MULT, op, op, op,
3020 0, OPTAB_LIB_WIDEN);
3021 if (n != 1)
3022 {
3023 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3024 op2 = force_reg (mode, op2);
3025 op2 = expand_powi (op2, mode, abs (n / 3));
3026 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3027 0, OPTAB_LIB_WIDEN);
3028 /* If the original exponent was negative, reciprocate the
3029 result. */
3030 if (n < 0)
3031 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3032 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3033 }
3034 return op;
3035 }
3036 }
3037
3038 /* Fall back to optab expansion. */
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3040 }
3041
3042 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3043 a normal call should be emitted rather than expanding the function
3044 in-line. EXP is the expression that is a call to the builtin
3045 function; if convenient, the result should be placed in TARGET. */
3046
3047 static rtx
3048 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3049 {
3050 tree arg0, arg1;
3051 rtx op0, op1;
3052 enum machine_mode mode;
3053 enum machine_mode mode2;
3054
3055 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3056 return NULL_RTX;
3057
3058 arg0 = CALL_EXPR_ARG (exp, 0);
3059 arg1 = CALL_EXPR_ARG (exp, 1);
3060 mode = TYPE_MODE (TREE_TYPE (exp));
3061
3062 /* Handle constant power. */
3063
3064 if (TREE_CODE (arg1) == INTEGER_CST
3065 && !TREE_OVERFLOW (arg1))
3066 {
3067 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3068
3069 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3070 Otherwise, check the number of multiplications required. */
3071 if ((TREE_INT_CST_HIGH (arg1) == 0
3072 || TREE_INT_CST_HIGH (arg1) == -1)
3073 && ((n >= -1 && n <= 2)
3074 || (! optimize_size
3075 && powi_cost (n) <= POWI_MAX_MULTS)))
3076 {
3077 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3078 op0 = force_reg (mode, op0);
3079 return expand_powi (op0, mode, n);
3080 }
3081 }
3082
3083 /* Emit a libcall to libgcc. */
3084
3085 /* Mode of the 2nd argument must match that of an int. */
3086 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3087
3088 if (target == NULL_RTX)
3089 target = gen_reg_rtx (mode);
3090
3091 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3092 if (GET_MODE (op0) != mode)
3093 op0 = convert_to_mode (mode, op0, 0);
3094 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3095 if (GET_MODE (op1) != mode2)
3096 op1 = convert_to_mode (mode2, op1, 0);
3097
3098 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3099 target, LCT_CONST, mode, 2,
3100 op0, mode, op1, mode2);
3101
3102 return target;
3103 }
3104
3105 /* Expand expression EXP which is a call to the strlen builtin. Return
3106 NULL_RTX if we failed the caller should emit a normal call, otherwise
3107 try to get the result in TARGET, if convenient. */
3108
3109 static rtx
3110 expand_builtin_strlen (tree exp, rtx target,
3111 enum machine_mode target_mode)
3112 {
3113 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3114 return NULL_RTX;
3115 else
3116 {
3117 rtx pat;
3118 tree len;
3119 tree src = CALL_EXPR_ARG (exp, 0);
3120 rtx result, src_reg, char_rtx, before_strlen;
3121 enum machine_mode insn_mode = target_mode, char_mode;
3122 enum insn_code icode = CODE_FOR_nothing;
3123 int align;
3124
3125 /* If the length can be computed at compile-time, return it. */
3126 len = c_strlen (src, 0);
3127 if (len)
3128 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3129
3130 /* If the length can be computed at compile-time and is constant
3131 integer, but there are side-effects in src, evaluate
3132 src for side-effects, then return len.
3133 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3134 can be optimized into: i++; x = 3; */
3135 len = c_strlen (src, 1);
3136 if (len && TREE_CODE (len) == INTEGER_CST)
3137 {
3138 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3139 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3140 }
3141
3142 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3143
3144 /* If SRC is not a pointer type, don't do this operation inline. */
3145 if (align == 0)
3146 return NULL_RTX;
3147
3148 /* Bail out if we can't compute strlen in the right mode. */
3149 while (insn_mode != VOIDmode)
3150 {
3151 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3152 if (icode != CODE_FOR_nothing)
3153 break;
3154
3155 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3156 }
3157 if (insn_mode == VOIDmode)
3158 return NULL_RTX;
3159
3160 /* Make a place to write the result of the instruction. */
3161 result = target;
3162 if (! (result != 0
3163 && REG_P (result)
3164 && GET_MODE (result) == insn_mode
3165 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3166 result = gen_reg_rtx (insn_mode);
3167
3168 /* Make a place to hold the source address. We will not expand
3169 the actual source until we are sure that the expansion will
3170 not fail -- there are trees that cannot be expanded twice. */
3171 src_reg = gen_reg_rtx (Pmode);
3172
3173 /* Mark the beginning of the strlen sequence so we can emit the
3174 source operand later. */
3175 before_strlen = get_last_insn ();
3176
3177 char_rtx = const0_rtx;
3178 char_mode = insn_data[(int) icode].operand[2].mode;
3179 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3180 char_mode))
3181 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3182
3183 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3184 char_rtx, GEN_INT (align));
3185 if (! pat)
3186 return NULL_RTX;
3187 emit_insn (pat);
3188
3189 /* Now that we are assured of success, expand the source. */
3190 start_sequence ();
3191 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3192 if (pat != src_reg)
3193 emit_move_insn (src_reg, pat);
3194 pat = get_insns ();
3195 end_sequence ();
3196
3197 if (before_strlen)
3198 emit_insn_after (pat, before_strlen);
3199 else
3200 emit_insn_before (pat, get_insns ());
3201
3202 /* Return the value in the proper mode for this function. */
3203 if (GET_MODE (result) == target_mode)
3204 target = result;
3205 else if (target != 0)
3206 convert_move (target, result, 0);
3207 else
3208 target = convert_to_mode (target_mode, result, 0);
3209
3210 return target;
3211 }
3212 }
3213
3214 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3215 caller should emit a normal call, otherwise try to get the result
3216 in TARGET, if convenient (and in mode MODE if that's convenient). */
3217
3218 static rtx
3219 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3220 {
3221 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3222 {
3223 tree type = TREE_TYPE (exp);
3224 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3225 CALL_EXPR_ARG (exp, 1), type);
3226 if (result)
3227 return expand_expr (result, target, mode, EXPAND_NORMAL);
3228 }
3229 return NULL_RTX;
3230 }
3231
3232 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3233 caller should emit a normal call, otherwise try to get the result
3234 in TARGET, if convenient (and in mode MODE if that's convenient). */
3235
3236 static rtx
3237 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3238 {
3239 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 {
3241 tree type = TREE_TYPE (exp);
3242 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3243 CALL_EXPR_ARG (exp, 1), type);
3244 if (result)
3245 return expand_expr (result, target, mode, EXPAND_NORMAL);
3246
3247 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3248 }
3249 return NULL_RTX;
3250 }
3251
3252 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3253 caller should emit a normal call, otherwise try to get the result
3254 in TARGET, if convenient (and in mode MODE if that's convenient). */
3255
3256 static rtx
3257 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3258 {
3259 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3260 {
3261 tree type = TREE_TYPE (exp);
3262 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3263 CALL_EXPR_ARG (exp, 1), type);
3264 if (result)
3265 return expand_expr (result, target, mode, EXPAND_NORMAL);
3266 }
3267 return NULL_RTX;
3268 }
3269
3270 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3271 caller should emit a normal call, otherwise try to get the result
3272 in TARGET, if convenient (and in mode MODE if that's convenient). */
3273
3274 static rtx
3275 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3276 {
3277 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3278 {
3279 tree type = TREE_TYPE (exp);
3280 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3281 CALL_EXPR_ARG (exp, 1), type);
3282 if (result)
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3284 }
3285 return NULL_RTX;
3286 }
3287
3288 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3289 bytes from constant string DATA + OFFSET and return it as target
3290 constant. */
3291
3292 static rtx
3293 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3294 enum machine_mode mode)
3295 {
3296 const char *str = (const char *) data;
3297
3298 gcc_assert (offset >= 0
3299 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3300 <= strlen (str) + 1));
3301
3302 return c_readstr (str + offset, mode);
3303 }
3304
3305 /* Expand a call EXP to the memcpy builtin.
3306 Return NULL_RTX if we failed, the caller should emit a normal call,
3307 otherwise try to get the result in TARGET, if convenient (and in
3308 mode MODE if that's convenient). */
3309
3310 static rtx
3311 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3312 {
3313 tree fndecl = get_callee_fndecl (exp);
3314
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3317 return NULL_RTX;
3318 else
3319 {
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 1);
3322 tree len = CALL_EXPR_ARG (exp, 2);
3323 const char *src_str;
3324 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3325 unsigned int dest_align
3326 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3327 rtx dest_mem, src_mem, dest_addr, len_rtx;
3328 tree result = fold_builtin_memory_op (dest, src, len,
3329 TREE_TYPE (TREE_TYPE (fndecl)),
3330 false, /*endp=*/0);
3331 HOST_WIDE_INT expected_size = -1;
3332 unsigned int expected_align = 0;
3333
3334 if (result)
3335 {
3336 while (TREE_CODE (result) == COMPOUND_EXPR)
3337 {
3338 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3339 EXPAND_NORMAL);
3340 result = TREE_OPERAND (result, 1);
3341 }
3342 return expand_expr (result, target, mode, EXPAND_NORMAL);
3343 }
3344
3345 /* If DEST is not a pointer type, call the normal function. */
3346 if (dest_align == 0)
3347 return NULL_RTX;
3348
3349 /* If either SRC is not a pointer type, don't do this
3350 operation in-line. */
3351 if (src_align == 0)
3352 return NULL_RTX;
3353
3354 stringop_block_profile (exp, &expected_align, &expected_size);
3355 if (expected_align < dest_align)
3356 expected_align = dest_align;
3357 dest_mem = get_memory_rtx (dest, len);
3358 set_mem_align (dest_mem, dest_align);
3359 len_rtx = expand_normal (len);
3360 src_str = c_getstr (src);
3361
3362 /* If SRC is a string constant and block move would be done
3363 by pieces, we can avoid loading the string from memory
3364 and only stored the computed constants. */
3365 if (src_str
3366 && GET_CODE (len_rtx) == CONST_INT
3367 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3368 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3369 (void *) src_str, dest_align, false))
3370 {
3371 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3372 builtin_memcpy_read_str,
3373 (void *) src_str, dest_align, false, 0);
3374 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3375 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3376 return dest_mem;
3377 }
3378
3379 src_mem = get_memory_rtx (src, len);
3380 set_mem_align (src_mem, src_align);
3381
3382 /* Copy word part most expediently. */
3383 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3384 CALL_EXPR_TAILCALL (exp)
3385 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3386 expected_align, expected_size);
3387
3388 if (dest_addr == 0)
3389 {
3390 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3391 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3392 }
3393 return dest_addr;
3394 }
3395 }
3396
3397 /* Expand a call EXP to the mempcpy builtin.
3398 Return NULL_RTX if we failed; the caller should emit a normal call,
3399 otherwise try to get the result in TARGET, if convenient (and in
3400 mode MODE if that's convenient). If ENDP is 0 return the
3401 destination pointer, if ENDP is 1 return the end pointer ala
3402 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3403 stpcpy. */
3404
3405 static rtx
3406 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3407 {
3408 if (!validate_arglist (exp,
3409 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3410 return NULL_RTX;
3411 else
3412 {
3413 tree dest = CALL_EXPR_ARG (exp, 0);
3414 tree src = CALL_EXPR_ARG (exp, 1);
3415 tree len = CALL_EXPR_ARG (exp, 2);
3416 return expand_builtin_mempcpy_args (dest, src, len,
3417 TREE_TYPE (exp),
3418 target, mode, /*endp=*/ 1);
3419 }
3420 }
3421
3422 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3423 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3424 so that this can also be called without constructing an actual CALL_EXPR.
3425 TYPE is the return type of the call. The other arguments and return value
3426 are the same as for expand_builtin_mempcpy. */
3427
3428 static rtx
3429 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3430 rtx target, enum machine_mode mode, int endp)
3431 {
3432 /* If return value is ignored, transform mempcpy into memcpy. */
3433 if (target == const0_rtx)
3434 {
3435 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3436
3437 if (!fn)
3438 return NULL_RTX;
3439
3440 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3441 target, mode, EXPAND_NORMAL);
3442 }
3443 else
3444 {
3445 const char *src_str;
3446 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3447 unsigned int dest_align
3448 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3449 rtx dest_mem, src_mem, len_rtx;
3450 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3451
3452 if (result)
3453 {
3454 while (TREE_CODE (result) == COMPOUND_EXPR)
3455 {
3456 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3457 EXPAND_NORMAL);
3458 result = TREE_OPERAND (result, 1);
3459 }
3460 return expand_expr (result, target, mode, EXPAND_NORMAL);
3461 }
3462
3463 /* If either SRC or DEST is not a pointer type, don't do this
3464 operation in-line. */
3465 if (dest_align == 0 || src_align == 0)
3466 return NULL_RTX;
3467
3468 /* If LEN is not constant, call the normal function. */
3469 if (! host_integerp (len, 1))
3470 return NULL_RTX;
3471
3472 len_rtx = expand_normal (len);
3473 src_str = c_getstr (src);
3474
3475 /* If SRC is a string constant and block move would be done
3476 by pieces, we can avoid loading the string from memory
3477 and only stored the computed constants. */
3478 if (src_str
3479 && GET_CODE (len_rtx) == CONST_INT
3480 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3481 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3482 (void *) src_str, dest_align, false))
3483 {
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 (void *) src_str, dest_align,
3489 false, endp);
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3492 return dest_mem;
3493 }
3494
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3498 {
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3507 return dest_mem;
3508 }
3509
3510 return NULL_RTX;
3511 }
3512 }
3513
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3516
3517 static rtx
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3519 {
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3522 return NULL_RTX;
3523 else
3524 {
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3529 target, mode, ignore);
3530 }
3531 }
3532
3533 /* Helper function to do the actual work for expand_builtin_memmove. The
3534 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3535 so that this can also be called without constructing an actual CALL_EXPR.
3536 TYPE is the return type of the call. The other arguments and return value
3537 are the same as for expand_builtin_memmove. */
3538
3539 static rtx
3540 expand_builtin_memmove_args (tree dest, tree src, tree len,
3541 tree type, rtx target, enum machine_mode mode,
3542 int ignore)
3543 {
3544 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3545
3546 if (result)
3547 {
3548 STRIP_TYPE_NOPS (result);
3549 while (TREE_CODE (result) == COMPOUND_EXPR)
3550 {
3551 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3552 EXPAND_NORMAL);
3553 result = TREE_OPERAND (result, 1);
3554 }
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3556 }
3557
3558 /* Otherwise, call the normal function. */
3559 return NULL_RTX;
3560 }
3561
3562 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call. */
3564
3565 static rtx
3566 expand_builtin_bcopy (tree exp, int ignore)
3567 {
3568 tree type = TREE_TYPE (exp);
3569 tree src, dest, size;
3570
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574
3575 src = CALL_EXPR_ARG (exp, 0);
3576 dest = CALL_EXPR_ARG (exp, 1);
3577 size = CALL_EXPR_ARG (exp, 2);
3578
3579 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3580 This is done this way so that if it isn't expanded inline, we fall
3581 back to calling bcopy instead of memmove. */
3582 return expand_builtin_memmove_args (dest, src,
3583 fold_convert (sizetype, size),
3584 type, const0_rtx, VOIDmode,
3585 ignore);
3586 }
3587
3588 #ifndef HAVE_movstr
3589 # define HAVE_movstr 0
3590 # define CODE_FOR_movstr CODE_FOR_nothing
3591 #endif
3592
3593 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3594 we failed, the caller should emit a normal call, otherwise try to
3595 get the result in TARGET, if convenient. If ENDP is 0 return the
3596 destination pointer, if ENDP is 1 return the end pointer ala
3597 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3598 stpcpy. */
3599
3600 static rtx
3601 expand_movstr (tree dest, tree src, rtx target, int endp)
3602 {
3603 rtx end;
3604 rtx dest_mem;
3605 rtx src_mem;
3606 rtx insn;
3607 const struct insn_data * data;
3608
3609 if (!HAVE_movstr)
3610 return NULL_RTX;
3611
3612 dest_mem = get_memory_rtx (dest, NULL);
3613 src_mem = get_memory_rtx (src, NULL);
3614 if (!endp)
3615 {
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3619 }
3620 else
3621 {
3622 if (target == 0 || target == const0_rtx)
3623 {
3624 end = gen_reg_rtx (Pmode);
3625 if (target == 0)
3626 target = end;
3627 }
3628 else
3629 end = target;
3630 }
3631
3632 data = insn_data + CODE_FOR_movstr;
3633
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3636
3637 insn = data->genfun (end, dest_mem, src_mem);
3638
3639 gcc_assert (insn);
3640
3641 emit_insn (insn);
3642
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3645 adjust it. */
3646 if (endp == 1 && target != const0_rtx)
3647 {
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3650 }
3651
3652 return target;
3653 }
3654
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3658 convenient). */
3659
3660 static rtx
3661 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3662 {
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 {
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3668 }
3669 return NULL_RTX;
3670 }
3671
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3677
3678 static rtx
3679 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3680 rtx target, enum machine_mode mode)
3681 {
3682 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3683 if (result)
3684 return expand_expr (result, target, mode, EXPAND_NORMAL);
3685 return expand_movstr (dest, src, target, /*endp=*/0);
3686
3687 }
3688
3689 /* Expand a call EXP to the stpcpy builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient (and in
3692 mode MODE if that's convenient). */
3693
3694 static rtx
3695 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3696 {
3697 tree dst, src;
3698
3699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3701
3702 dst = CALL_EXPR_ARG (exp, 0);
3703 src = CALL_EXPR_ARG (exp, 1);
3704
3705 /* If return value is ignored, transform stpcpy into strcpy. */
3706 if (target == const0_rtx)
3707 {
3708 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3709 if (!fn)
3710 return NULL_RTX;
3711
3712 return expand_expr (build_call_expr (fn, 2, dst, src),
3713 target, mode, EXPAND_NORMAL);
3714 }
3715 else
3716 {
3717 tree len, lenp1;
3718 rtx ret;
3719
3720 /* Ensure we get an actual string whose length can be evaluated at
3721 compile-time, not an expression containing a string. This is
3722 because the latter will potentially produce pessimized code
3723 when used to produce the return value. */
3724 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3725 return expand_movstr (dst, src, target, /*endp=*/2);
3726
3727 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3728 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3729 target, mode, /*endp=*/2);
3730
3731 if (ret)
3732 return ret;
3733
3734 if (TREE_CODE (len) == INTEGER_CST)
3735 {
3736 rtx len_rtx = expand_normal (len);
3737
3738 if (GET_CODE (len_rtx) == CONST_INT)
3739 {
3740 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3741 dst, src, target, mode);
3742
3743 if (ret)
3744 {
3745 if (! target)
3746 {
3747 if (mode != VOIDmode)
3748 target = gen_reg_rtx (mode);
3749 else
3750 target = gen_reg_rtx (GET_MODE (ret));
3751 }
3752 if (GET_MODE (target) != GET_MODE (ret))
3753 ret = gen_lowpart (GET_MODE (target), ret);
3754
3755 ret = plus_constant (ret, INTVAL (len_rtx));
3756 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3757 gcc_assert (ret);
3758
3759 return target;
3760 }
3761 }
3762 }
3763
3764 return expand_movstr (dst, src, target, /*endp=*/2);
3765 }
3766 }
3767
3768 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3769 bytes from constant string DATA + OFFSET and return it as target
3770 constant. */
3771
3772 rtx
3773 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3774 enum machine_mode mode)
3775 {
3776 const char *str = (const char *) data;
3777
3778 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3779 return const0_rtx;
3780
3781 return c_readstr (str + offset, mode);
3782 }
3783
3784 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3785 NULL_RTX if we failed the caller should emit a normal call. */
3786
3787 static rtx
3788 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3789 {
3790 tree fndecl = get_callee_fndecl (exp);
3791
3792 if (validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3794 {
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3798 tree slen = c_strlen (src, 1);
3799 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3800
3801 if (result)
3802 {
3803 while (TREE_CODE (result) == COMPOUND_EXPR)
3804 {
3805 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3806 EXPAND_NORMAL);
3807 result = TREE_OPERAND (result, 1);
3808 }
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3810 }
3811
3812 /* We must be passed a constant len and src parameter. */
3813 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3814 return NULL_RTX;
3815
3816 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3817
3818 /* We're required to pad with trailing zeros if the requested
3819 len is greater than strlen(s2)+1. In that case try to
3820 use store_by_pieces, if it fails, punt. */
3821 if (tree_int_cst_lt (slen, len))
3822 {
3823 unsigned int dest_align
3824 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3825 const char *p = c_getstr (src);
3826 rtx dest_mem;
3827
3828 if (!p || dest_align == 0 || !host_integerp (len, 1)
3829 || !can_store_by_pieces (tree_low_cst (len, 1),
3830 builtin_strncpy_read_str,
3831 (void *) p, dest_align, false))
3832 return NULL_RTX;
3833
3834 dest_mem = get_memory_rtx (dest, len);
3835 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3836 builtin_strncpy_read_str,
3837 (void *) p, dest_align, false, 0);
3838 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3840 return dest_mem;
3841 }
3842 }
3843 return NULL_RTX;
3844 }
3845
3846 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3847 bytes from constant string DATA + OFFSET and return it as target
3848 constant. */
3849
3850 rtx
3851 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3852 enum machine_mode mode)
3853 {
3854 const char *c = (const char *) data;
3855 char *p = alloca (GET_MODE_SIZE (mode));
3856
3857 memset (p, *c, GET_MODE_SIZE (mode));
3858
3859 return c_readstr (p, mode);
3860 }
3861
3862 /* Callback routine for store_by_pieces. Return the RTL of a register
3863 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3864 char value given in the RTL register data. For example, if mode is
3865 4 bytes wide, return the RTL for 0x01010101*data. */
3866
3867 static rtx
3868 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3869 enum machine_mode mode)
3870 {
3871 rtx target, coeff;
3872 size_t size;
3873 char *p;
3874
3875 size = GET_MODE_SIZE (mode);
3876 if (size == 1)
3877 return (rtx) data;
3878
3879 p = alloca (size);
3880 memset (p, 1, size);
3881 coeff = c_readstr (p, mode);
3882
3883 target = convert_to_mode (mode, (rtx) data, 1);
3884 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3885 return force_reg (mode, target);
3886 }
3887
3888 /* Expand expression EXP, which is a call to the memset builtin. Return
3889 NULL_RTX if we failed the caller should emit a normal call, otherwise
3890 try to get the result in TARGET, if convenient (and in mode MODE if that's
3891 convenient). */
3892
3893 static rtx
3894 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3895 {
3896 if (!validate_arglist (exp,
3897 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 return NULL_RTX;
3899 else
3900 {
3901 tree dest = CALL_EXPR_ARG (exp, 0);
3902 tree val = CALL_EXPR_ARG (exp, 1);
3903 tree len = CALL_EXPR_ARG (exp, 2);
3904 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3905 }
3906 }
3907
3908 /* Helper function to do the actual work for expand_builtin_memset. The
3909 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3910 so that this can also be called without constructing an actual CALL_EXPR.
3911 The other arguments and return value are the same as for
3912 expand_builtin_memset. */
3913
3914 static rtx
3915 expand_builtin_memset_args (tree dest, tree val, tree len,
3916 rtx target, enum machine_mode mode, tree orig_exp)
3917 {
3918 tree fndecl, fn;
3919 enum built_in_function fcode;
3920 char c;
3921 unsigned int dest_align;
3922 rtx dest_mem, dest_addr, len_rtx;
3923 HOST_WIDE_INT expected_size = -1;
3924 unsigned int expected_align = 0;
3925
3926 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3927
3928 /* If DEST is not a pointer type, don't do this operation in-line. */
3929 if (dest_align == 0)
3930 return NULL_RTX;
3931
3932 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3933 if (expected_align < dest_align)
3934 expected_align = dest_align;
3935
3936 /* If the LEN parameter is zero, return DEST. */
3937 if (integer_zerop (len))
3938 {
3939 /* Evaluate and ignore VAL in case it has side-effects. */
3940 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3941 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3942 }
3943
3944 /* Stabilize the arguments in case we fail. */
3945 dest = builtin_save_expr (dest);
3946 val = builtin_save_expr (val);
3947 len = builtin_save_expr (len);
3948
3949 len_rtx = expand_normal (len);
3950 dest_mem = get_memory_rtx (dest, len);
3951
3952 if (TREE_CODE (val) != INTEGER_CST)
3953 {
3954 rtx val_rtx;
3955
3956 val_rtx = expand_normal (val);
3957 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3958 val_rtx, 0);
3959
3960 /* Assume that we can memset by pieces if we can store
3961 * the coefficients by pieces (in the required modes).
3962 * We can't pass builtin_memset_gen_str as that emits RTL. */
3963 c = 1;
3964 if (host_integerp (len, 1)
3965 && can_store_by_pieces (tree_low_cst (len, 1),
3966 builtin_memset_read_str, &c, dest_align,
3967 true))
3968 {
3969 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3970 val_rtx);
3971 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3972 builtin_memset_gen_str, val_rtx, dest_align,
3973 true, 0);
3974 }
3975 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3976 dest_align, expected_align,
3977 expected_size))
3978 goto do_libcall;
3979
3980 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3981 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3982 return dest_mem;
3983 }
3984
3985 if (target_char_cast (val, &c))
3986 goto do_libcall;
3987
3988 if (c)
3989 {
3990 if (host_integerp (len, 1)
3991 && can_store_by_pieces (tree_low_cst (len, 1),
3992 builtin_memset_read_str, &c, dest_align,
3993 true))
3994 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3995 builtin_memset_read_str, &c, dest_align, true, 0);
3996 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3997 dest_align, expected_align,
3998 expected_size))
3999 goto do_libcall;
4000
4001 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4002 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4003 return dest_mem;
4004 }
4005
4006 set_mem_align (dest_mem, dest_align);
4007 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4008 CALL_EXPR_TAILCALL (orig_exp)
4009 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4010 expected_align, expected_size);
4011
4012 if (dest_addr == 0)
4013 {
4014 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4015 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4016 }
4017
4018 return dest_addr;
4019
4020 do_libcall:
4021 fndecl = get_callee_fndecl (orig_exp);
4022 fcode = DECL_FUNCTION_CODE (fndecl);
4023 if (fcode == BUILT_IN_MEMSET)
4024 fn = build_call_expr (fndecl, 3, dest, val, len);
4025 else if (fcode == BUILT_IN_BZERO)
4026 fn = build_call_expr (fndecl, 2, dest, len);
4027 else
4028 gcc_unreachable ();
4029 if (TREE_CODE (fn) == CALL_EXPR)
4030 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4031 return expand_call (fn, target, target == const0_rtx);
4032 }
4033
4034 /* Expand expression EXP, which is a call to the bzero builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4036
4037 static rtx
4038 expand_builtin_bzero (tree exp)
4039 {
4040 tree dest, size;
4041
4042 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4043 return NULL_RTX;
4044
4045 dest = CALL_EXPR_ARG (exp, 0);
4046 size = CALL_EXPR_ARG (exp, 1);
4047
4048 /* New argument list transforming bzero(ptr x, int y) to
4049 memset(ptr x, int 0, size_t y). This is done this way
4050 so that if it isn't expanded inline, we fallback to
4051 calling bzero instead of memset. */
4052
4053 return expand_builtin_memset_args (dest, integer_zero_node,
4054 fold_convert (sizetype, size),
4055 const0_rtx, VOIDmode, exp);
4056 }
4057
4058 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4059 caller should emit a normal call, otherwise try to get the result
4060 in TARGET, if convenient (and in mode MODE if that's convenient). */
4061
4062 static rtx
4063 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4064 {
4065 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4066 INTEGER_TYPE, VOID_TYPE))
4067 {
4068 tree type = TREE_TYPE (exp);
4069 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4070 CALL_EXPR_ARG (exp, 1),
4071 CALL_EXPR_ARG (exp, 2), type);
4072 if (result)
4073 return expand_expr (result, target, mode, EXPAND_NORMAL);
4074 }
4075 return NULL_RTX;
4076 }
4077
4078 /* Expand expression EXP, which is a call to the memcmp built-in function.
4079 Return NULL_RTX if we failed and the
4080 caller should emit a normal call, otherwise try to get the result in
4081 TARGET, if convenient (and in mode MODE, if that's convenient). */
4082
4083 static rtx
4084 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4085 {
4086 if (!validate_arglist (exp,
4087 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4088 return NULL_RTX;
4089 else
4090 {
4091 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4092 CALL_EXPR_ARG (exp, 1),
4093 CALL_EXPR_ARG (exp, 2));
4094 if (result)
4095 return expand_expr (result, target, mode, EXPAND_NORMAL);
4096 }
4097
4098 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4099 {
4100 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4101 rtx result;
4102 rtx insn;
4103 tree arg1 = CALL_EXPR_ARG (exp, 0);
4104 tree arg2 = CALL_EXPR_ARG (exp, 1);
4105 tree len = CALL_EXPR_ARG (exp, 2);
4106
4107 int arg1_align
4108 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4109 int arg2_align
4110 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4111 enum machine_mode insn_mode;
4112
4113 #ifdef HAVE_cmpmemsi
4114 if (HAVE_cmpmemsi)
4115 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4116 else
4117 #endif
4118 #ifdef HAVE_cmpstrnsi
4119 if (HAVE_cmpstrnsi)
4120 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4121 else
4122 #endif
4123 return NULL_RTX;
4124
4125 /* If we don't have POINTER_TYPE, call the function. */
4126 if (arg1_align == 0 || arg2_align == 0)
4127 return NULL_RTX;
4128
4129 /* Make a place to write the result of the instruction. */
4130 result = target;
4131 if (! (result != 0
4132 && REG_P (result) && GET_MODE (result) == insn_mode
4133 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4134 result = gen_reg_rtx (insn_mode);
4135
4136 arg1_rtx = get_memory_rtx (arg1, len);
4137 arg2_rtx = get_memory_rtx (arg2, len);
4138 arg3_rtx = expand_normal (len);
4139
4140 /* Set MEM_SIZE as appropriate. */
4141 if (GET_CODE (arg3_rtx) == CONST_INT)
4142 {
4143 set_mem_size (arg1_rtx, arg3_rtx);
4144 set_mem_size (arg2_rtx, arg3_rtx);
4145 }
4146
4147 #ifdef HAVE_cmpmemsi
4148 if (HAVE_cmpmemsi)
4149 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4150 GEN_INT (MIN (arg1_align, arg2_align)));
4151 else
4152 #endif
4153 #ifdef HAVE_cmpstrnsi
4154 if (HAVE_cmpstrnsi)
4155 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4156 GEN_INT (MIN (arg1_align, arg2_align)));
4157 else
4158 #endif
4159 gcc_unreachable ();
4160
4161 if (insn)
4162 emit_insn (insn);
4163 else
4164 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4165 TYPE_MODE (integer_type_node), 3,
4166 XEXP (arg1_rtx, 0), Pmode,
4167 XEXP (arg2_rtx, 0), Pmode,
4168 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4169 TYPE_UNSIGNED (sizetype)),
4170 TYPE_MODE (sizetype));
4171
4172 /* Return the value in the proper mode for this function. */
4173 mode = TYPE_MODE (TREE_TYPE (exp));
4174 if (GET_MODE (result) == mode)
4175 return result;
4176 else if (target != 0)
4177 {
4178 convert_move (target, result, 0);
4179 return target;
4180 }
4181 else
4182 return convert_to_mode (mode, result, 0);
4183 }
4184 #endif
4185
4186 return NULL_RTX;
4187 }
4188
4189 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4190 if we failed the caller should emit a normal call, otherwise try to get
4191 the result in TARGET, if convenient. */
4192
4193 static rtx
4194 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4195 {
4196 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4197 return NULL_RTX;
4198 else
4199 {
4200 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4201 CALL_EXPR_ARG (exp, 1));
4202 if (result)
4203 return expand_expr (result, target, mode, EXPAND_NORMAL);
4204 }
4205
4206 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4207 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4208 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4209 {
4210 rtx arg1_rtx, arg2_rtx;
4211 rtx result, insn = NULL_RTX;
4212 tree fndecl, fn;
4213 tree arg1 = CALL_EXPR_ARG (exp, 0);
4214 tree arg2 = CALL_EXPR_ARG (exp, 1);
4215
4216 int arg1_align
4217 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4218 int arg2_align
4219 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4220
4221 /* If we don't have POINTER_TYPE, call the function. */
4222 if (arg1_align == 0 || arg2_align == 0)
4223 return NULL_RTX;
4224
4225 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4226 arg1 = builtin_save_expr (arg1);
4227 arg2 = builtin_save_expr (arg2);
4228
4229 arg1_rtx = get_memory_rtx (arg1, NULL);
4230 arg2_rtx = get_memory_rtx (arg2, NULL);
4231
4232 #ifdef HAVE_cmpstrsi
4233 /* Try to call cmpstrsi. */
4234 if (HAVE_cmpstrsi)
4235 {
4236 enum machine_mode insn_mode
4237 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4238
4239 /* Make a place to write the result of the instruction. */
4240 result = target;
4241 if (! (result != 0
4242 && REG_P (result) && GET_MODE (result) == insn_mode
4243 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4244 result = gen_reg_rtx (insn_mode);
4245
4246 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4247 GEN_INT (MIN (arg1_align, arg2_align)));
4248 }
4249 #endif
4250 #ifdef HAVE_cmpstrnsi
4251 /* Try to determine at least one length and call cmpstrnsi. */
4252 if (!insn && HAVE_cmpstrnsi)
4253 {
4254 tree len;
4255 rtx arg3_rtx;
4256
4257 enum machine_mode insn_mode
4258 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4259 tree len1 = c_strlen (arg1, 1);
4260 tree len2 = c_strlen (arg2, 1);
4261
4262 if (len1)
4263 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4264 if (len2)
4265 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4266
4267 /* If we don't have a constant length for the first, use the length
4268 of the second, if we know it. We don't require a constant for
4269 this case; some cost analysis could be done if both are available
4270 but neither is constant. For now, assume they're equally cheap,
4271 unless one has side effects. If both strings have constant lengths,
4272 use the smaller. */
4273
4274 if (!len1)
4275 len = len2;
4276 else if (!len2)
4277 len = len1;
4278 else if (TREE_SIDE_EFFECTS (len1))
4279 len = len2;
4280 else if (TREE_SIDE_EFFECTS (len2))
4281 len = len1;
4282 else if (TREE_CODE (len1) != INTEGER_CST)
4283 len = len2;
4284 else if (TREE_CODE (len2) != INTEGER_CST)
4285 len = len1;
4286 else if (tree_int_cst_lt (len1, len2))
4287 len = len1;
4288 else
4289 len = len2;
4290
4291 /* If both arguments have side effects, we cannot optimize. */
4292 if (!len || TREE_SIDE_EFFECTS (len))
4293 goto do_libcall;
4294
4295 arg3_rtx = expand_normal (len);
4296
4297 /* Make a place to write the result of the instruction. */
4298 result = target;
4299 if (! (result != 0
4300 && REG_P (result) && GET_MODE (result) == insn_mode
4301 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4302 result = gen_reg_rtx (insn_mode);
4303
4304 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4305 GEN_INT (MIN (arg1_align, arg2_align)));
4306 }
4307 #endif
4308
4309 if (insn)
4310 {
4311 emit_insn (insn);
4312
4313 /* Return the value in the proper mode for this function. */
4314 mode = TYPE_MODE (TREE_TYPE (exp));
4315 if (GET_MODE (result) == mode)
4316 return result;
4317 if (target == 0)
4318 return convert_to_mode (mode, result, 0);
4319 convert_move (target, result, 0);
4320 return target;
4321 }
4322
4323 /* Expand the library call ourselves using a stabilized argument
4324 list to avoid re-evaluating the function's arguments twice. */
4325 #ifdef HAVE_cmpstrnsi
4326 do_libcall:
4327 #endif
4328 fndecl = get_callee_fndecl (exp);
4329 fn = build_call_expr (fndecl, 2, arg1, arg2);
4330 if (TREE_CODE (fn) == CALL_EXPR)
4331 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4332 return expand_call (fn, target, target == const0_rtx);
4333 }
4334 #endif
4335 return NULL_RTX;
4336 }
4337
4338 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4339 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4340 the result in TARGET, if convenient. */
4341
4342 static rtx
4343 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4344 {
4345 if (!validate_arglist (exp,
4346 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4347 return NULL_RTX;
4348 else
4349 {
4350 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4351 CALL_EXPR_ARG (exp, 1),
4352 CALL_EXPR_ARG (exp, 2));
4353 if (result)
4354 return expand_expr (result, target, mode, EXPAND_NORMAL);
4355 }
4356
4357 /* If c_strlen can determine an expression for one of the string
4358 lengths, and it doesn't have side effects, then emit cmpstrnsi
4359 using length MIN(strlen(string)+1, arg3). */
4360 #ifdef HAVE_cmpstrnsi
4361 if (HAVE_cmpstrnsi)
4362 {
4363 tree len, len1, len2;
4364 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4365 rtx result, insn;
4366 tree fndecl, fn;
4367 tree arg1 = CALL_EXPR_ARG (exp, 0);
4368 tree arg2 = CALL_EXPR_ARG (exp, 1);
4369 tree arg3 = CALL_EXPR_ARG (exp, 2);
4370
4371 int arg1_align
4372 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4373 int arg2_align
4374 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4375 enum machine_mode insn_mode
4376 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4377
4378 len1 = c_strlen (arg1, 1);
4379 len2 = c_strlen (arg2, 1);
4380
4381 if (len1)
4382 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4383 if (len2)
4384 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4385
4386 /* If we don't have a constant length for the first, use the length
4387 of the second, if we know it. We don't require a constant for
4388 this case; some cost analysis could be done if both are available
4389 but neither is constant. For now, assume they're equally cheap,
4390 unless one has side effects. If both strings have constant lengths,
4391 use the smaller. */
4392
4393 if (!len1)
4394 len = len2;
4395 else if (!len2)
4396 len = len1;
4397 else if (TREE_SIDE_EFFECTS (len1))
4398 len = len2;
4399 else if (TREE_SIDE_EFFECTS (len2))
4400 len = len1;
4401 else if (TREE_CODE (len1) != INTEGER_CST)
4402 len = len2;
4403 else if (TREE_CODE (len2) != INTEGER_CST)
4404 len = len1;
4405 else if (tree_int_cst_lt (len1, len2))
4406 len = len1;
4407 else
4408 len = len2;
4409
4410 /* If both arguments have side effects, we cannot optimize. */
4411 if (!len || TREE_SIDE_EFFECTS (len))
4412 return NULL_RTX;
4413
4414 /* The actual new length parameter is MIN(len,arg3). */
4415 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4416 fold_convert (TREE_TYPE (len), arg3));
4417
4418 /* If we don't have POINTER_TYPE, call the function. */
4419 if (arg1_align == 0 || arg2_align == 0)
4420 return NULL_RTX;
4421
4422 /* Make a place to write the result of the instruction. */
4423 result = target;
4424 if (! (result != 0
4425 && REG_P (result) && GET_MODE (result) == insn_mode
4426 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4427 result = gen_reg_rtx (insn_mode);
4428
4429 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4430 arg1 = builtin_save_expr (arg1);
4431 arg2 = builtin_save_expr (arg2);
4432 len = builtin_save_expr (len);
4433
4434 arg1_rtx = get_memory_rtx (arg1, len);
4435 arg2_rtx = get_memory_rtx (arg2, len);
4436 arg3_rtx = expand_normal (len);
4437 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4438 GEN_INT (MIN (arg1_align, arg2_align)));
4439 if (insn)
4440 {
4441 emit_insn (insn);
4442
4443 /* Return the value in the proper mode for this function. */
4444 mode = TYPE_MODE (TREE_TYPE (exp));
4445 if (GET_MODE (result) == mode)
4446 return result;
4447 if (target == 0)
4448 return convert_to_mode (mode, result, 0);
4449 convert_move (target, result, 0);
4450 return target;
4451 }
4452
4453 /* Expand the library call ourselves using a stabilized argument
4454 list to avoid re-evaluating the function's arguments twice. */
4455 fndecl = get_callee_fndecl (exp);
4456 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4457 if (TREE_CODE (fn) == CALL_EXPR)
4458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4459 return expand_call (fn, target, target == const0_rtx);
4460 }
4461 #endif
4462 return NULL_RTX;
4463 }
4464
4465 /* Expand expression EXP, which is a call to the strcat builtin.
4466 Return NULL_RTX if we failed the caller should emit a normal call,
4467 otherwise try to get the result in TARGET, if convenient. */
4468
4469 static rtx
4470 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4471 {
4472 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4473 return NULL_RTX;
4474 else
4475 {
4476 tree dst = CALL_EXPR_ARG (exp, 0);
4477 tree src = CALL_EXPR_ARG (exp, 1);
4478 const char *p = c_getstr (src);
4479
4480 /* If the string length is zero, return the dst parameter. */
4481 if (p && *p == '\0')
4482 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4483
4484 if (!optimize_size)
4485 {
4486 /* See if we can store by pieces into (dst + strlen(dst)). */
4487 tree newsrc, newdst,
4488 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4489 rtx insns;
4490
4491 /* Stabilize the argument list. */
4492 newsrc = builtin_save_expr (src);
4493 dst = builtin_save_expr (dst);
4494
4495 start_sequence ();
4496
4497 /* Create strlen (dst). */
4498 newdst = build_call_expr (strlen_fn, 1, dst);
4499 /* Create (dst p+ strlen (dst)). */
4500
4501 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4502 newdst = builtin_save_expr (newdst);
4503
4504 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4505 {
4506 end_sequence (); /* Stop sequence. */
4507 return NULL_RTX;
4508 }
4509
4510 /* Output the entire sequence. */
4511 insns = get_insns ();
4512 end_sequence ();
4513 emit_insn (insns);
4514
4515 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4516 }
4517
4518 return NULL_RTX;
4519 }
4520 }
4521
4522 /* Expand expression EXP, which is a call to the strncat builtin.
4523 Return NULL_RTX if we failed the caller should emit a normal call,
4524 otherwise try to get the result in TARGET, if convenient. */
4525
4526 static rtx
4527 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4528 {
4529 if (validate_arglist (exp,
4530 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4531 {
4532 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4533 CALL_EXPR_ARG (exp, 1),
4534 CALL_EXPR_ARG (exp, 2));
4535 if (result)
4536 return expand_expr (result, target, mode, EXPAND_NORMAL);
4537 }
4538 return NULL_RTX;
4539 }
4540
4541 /* Expand expression EXP, which is a call to the strspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4544
4545 static rtx
4546 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4547 {
4548 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4549 {
4550 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4551 CALL_EXPR_ARG (exp, 1));
4552 if (result)
4553 return expand_expr (result, target, mode, EXPAND_NORMAL);
4554 }
4555 return NULL_RTX;
4556 }
4557
4558 /* Expand expression EXP, which is a call to the strcspn builtin.
4559 Return NULL_RTX if we failed the caller should emit a normal call,
4560 otherwise try to get the result in TARGET, if convenient. */
4561
4562 static rtx
4563 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4564 {
4565 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4566 {
4567 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4568 CALL_EXPR_ARG (exp, 1));
4569 if (result)
4570 return expand_expr (result, target, mode, EXPAND_NORMAL);
4571 }
4572 return NULL_RTX;
4573 }
4574
4575 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4576 if that's convenient. */
4577
4578 rtx
4579 expand_builtin_saveregs (void)
4580 {
4581 rtx val, seq;
4582
4583 /* Don't do __builtin_saveregs more than once in a function.
4584 Save the result of the first call and reuse it. */
4585 if (saveregs_value != 0)
4586 return saveregs_value;
4587
4588 /* When this function is called, it means that registers must be
4589 saved on entry to this function. So we migrate the call to the
4590 first insn of this function. */
4591
4592 start_sequence ();
4593
4594 /* Do whatever the machine needs done in this case. */
4595 val = targetm.calls.expand_builtin_saveregs ();
4596
4597 seq = get_insns ();
4598 end_sequence ();
4599
4600 saveregs_value = val;
4601
4602 /* Put the insns after the NOTE that starts the function. If this
4603 is inside a start_sequence, make the outer-level insn chain current, so
4604 the code is placed at the start of the function. */
4605 push_topmost_sequence ();
4606 emit_insn_after (seq, entry_of_function ());
4607 pop_topmost_sequence ();
4608
4609 return val;
4610 }
4611
4612 /* __builtin_args_info (N) returns word N of the arg space info
4613 for the current function. The number and meanings of words
4614 is controlled by the definition of CUMULATIVE_ARGS. */
4615
4616 static rtx
4617 expand_builtin_args_info (tree exp)
4618 {
4619 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4620 int *word_ptr = (int *) &crtl->args.info;
4621
4622 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4623
4624 if (call_expr_nargs (exp) != 0)
4625 {
4626 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4627 error ("argument of %<__builtin_args_info%> must be constant");
4628 else
4629 {
4630 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4631
4632 if (wordnum < 0 || wordnum >= nwords)
4633 error ("argument of %<__builtin_args_info%> out of range");
4634 else
4635 return GEN_INT (word_ptr[wordnum]);
4636 }
4637 }
4638 else
4639 error ("missing argument in %<__builtin_args_info%>");
4640
4641 return const0_rtx;
4642 }
4643
4644 /* Expand a call to __builtin_next_arg. */
4645
4646 static rtx
4647 expand_builtin_next_arg (void)
4648 {
4649 /* Checking arguments is already done in fold_builtin_next_arg
4650 that must be called before this function. */
4651 return expand_binop (ptr_mode, add_optab,
4652 crtl->args.internal_arg_pointer,
4653 crtl->args.arg_offset_rtx,
4654 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4655 }
4656
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4659
4660 static tree
4661 stabilize_va_list (tree valist, int needs_lvalue)
4662 {
4663 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4664 {
4665 if (TREE_SIDE_EFFECTS (valist))
4666 valist = save_expr (valist);
4667
4668 /* For this case, the backends will be expecting a pointer to
4669 TREE_TYPE (va_list_type_node), but it's possible we've
4670 actually been given an array (an actual va_list_type_node).
4671 So fix it. */
4672 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4673 {
4674 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4675 valist = build_fold_addr_expr_with_type (valist, p1);
4676 }
4677 }
4678 else
4679 {
4680 tree pt;
4681
4682 if (! needs_lvalue)
4683 {
4684 if (! TREE_SIDE_EFFECTS (valist))
4685 return valist;
4686
4687 pt = build_pointer_type (va_list_type_node);
4688 valist = fold_build1 (ADDR_EXPR, pt, valist);
4689 TREE_SIDE_EFFECTS (valist) = 1;
4690 }
4691
4692 if (TREE_SIDE_EFFECTS (valist))
4693 valist = save_expr (valist);
4694 valist = build_fold_indirect_ref (valist);
4695 }
4696
4697 return valist;
4698 }
4699
4700 /* The "standard" definition of va_list is void*. */
4701
4702 tree
4703 std_build_builtin_va_list (void)
4704 {
4705 return ptr_type_node;
4706 }
4707
4708 /* The "standard" implementation of va_start: just assign `nextarg' to
4709 the variable. */
4710
4711 void
4712 std_expand_builtin_va_start (tree valist, rtx nextarg)
4713 {
4714 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4715 convert_move (va_r, nextarg, 0);
4716 }
4717
4718 /* Expand EXP, a call to __builtin_va_start. */
4719
4720 static rtx
4721 expand_builtin_va_start (tree exp)
4722 {
4723 rtx nextarg;
4724 tree valist;
4725
4726 if (call_expr_nargs (exp) < 2)
4727 {
4728 error ("too few arguments to function %<va_start%>");
4729 return const0_rtx;
4730 }
4731
4732 if (fold_builtin_next_arg (exp, true))
4733 return const0_rtx;
4734
4735 nextarg = expand_builtin_next_arg ();
4736 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4737
4738 if (targetm.expand_builtin_va_start)
4739 targetm.expand_builtin_va_start (valist, nextarg);
4740 else
4741 std_expand_builtin_va_start (valist, nextarg);
4742
4743 return const0_rtx;
4744 }
4745
4746 /* The "standard" implementation of va_arg: read the value from the
4747 current (padded) address and increment by the (padded) size. */
4748
4749 tree
4750 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4751 {
4752 tree addr, t, type_size, rounded_size, valist_tmp;
4753 unsigned HOST_WIDE_INT align, boundary;
4754 bool indirect;
4755
4756 #ifdef ARGS_GROW_DOWNWARD
4757 /* All of the alignment and movement below is for args-grow-up machines.
4758 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4759 implement their own specialized gimplify_va_arg_expr routines. */
4760 gcc_unreachable ();
4761 #endif
4762
4763 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4764 if (indirect)
4765 type = build_pointer_type (type);
4766
4767 align = PARM_BOUNDARY / BITS_PER_UNIT;
4768 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4769
4770 /* Hoist the valist value into a temporary for the moment. */
4771 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4772
4773 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4774 requires greater alignment, we must perform dynamic alignment. */
4775 if (boundary > align
4776 && !integer_zerop (TYPE_SIZE (type)))
4777 {
4778 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4779 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4780 valist_tmp, size_int (boundary - 1)));
4781 gimplify_and_add (t, pre_p);
4782
4783 t = fold_convert (sizetype, valist_tmp);
4784 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4785 fold_convert (TREE_TYPE (valist),
4786 fold_build2 (BIT_AND_EXPR, sizetype, t,
4787 size_int (-boundary))));
4788 gimplify_and_add (t, pre_p);
4789 }
4790 else
4791 boundary = align;
4792
4793 /* If the actual alignment is less than the alignment of the type,
4794 adjust the type accordingly so that we don't assume strict alignment
4795 when deferencing the pointer. */
4796 boundary *= BITS_PER_UNIT;
4797 if (boundary < TYPE_ALIGN (type))
4798 {
4799 type = build_variant_type_copy (type);
4800 TYPE_ALIGN (type) = boundary;
4801 }
4802
4803 /* Compute the rounded size of the type. */
4804 type_size = size_in_bytes (type);
4805 rounded_size = round_up (type_size, align);
4806
4807 /* Reduce rounded_size so it's sharable with the postqueue. */
4808 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4809
4810 /* Get AP. */
4811 addr = valist_tmp;
4812 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4813 {
4814 /* Small args are padded downward. */
4815 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4816 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4817 size_binop (MINUS_EXPR, rounded_size, type_size));
4818 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4819 }
4820
4821 /* Compute new value for AP. */
4822 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4823 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4824 gimplify_and_add (t, pre_p);
4825
4826 addr = fold_convert (build_pointer_type (type), addr);
4827
4828 if (indirect)
4829 addr = build_va_arg_indirect_ref (addr);
4830
4831 return build_va_arg_indirect_ref (addr);
4832 }
4833
4834 /* Build an indirect-ref expression over the given TREE, which represents a
4835 piece of a va_arg() expansion. */
4836 tree
4837 build_va_arg_indirect_ref (tree addr)
4838 {
4839 addr = build_fold_indirect_ref (addr);
4840
4841 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4842 mf_mark (addr);
4843
4844 return addr;
4845 }
4846
4847 /* Return a dummy expression of type TYPE in order to keep going after an
4848 error. */
4849
4850 static tree
4851 dummy_object (tree type)
4852 {
4853 tree t = build_int_cst (build_pointer_type (type), 0);
4854 return build1 (INDIRECT_REF, type, t);
4855 }
4856
4857 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4858 builtin function, but a very special sort of operator. */
4859
4860 enum gimplify_status
4861 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4862 {
4863 tree promoted_type, want_va_type, have_va_type;
4864 tree valist = TREE_OPERAND (*expr_p, 0);
4865 tree type = TREE_TYPE (*expr_p);
4866 tree t;
4867
4868 /* Verify that valist is of the proper type. */
4869 want_va_type = va_list_type_node;
4870 have_va_type = TREE_TYPE (valist);
4871
4872 if (have_va_type == error_mark_node)
4873 return GS_ERROR;
4874
4875 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4876 {
4877 /* If va_list is an array type, the argument may have decayed
4878 to a pointer type, e.g. by being passed to another function.
4879 In that case, unwrap both types so that we can compare the
4880 underlying records. */
4881 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4882 || POINTER_TYPE_P (have_va_type))
4883 {
4884 want_va_type = TREE_TYPE (want_va_type);
4885 have_va_type = TREE_TYPE (have_va_type);
4886 }
4887 }
4888
4889 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4890 {
4891 error ("first argument to %<va_arg%> not of type %<va_list%>");
4892 return GS_ERROR;
4893 }
4894
4895 /* Generate a diagnostic for requesting data of a type that cannot
4896 be passed through `...' due to type promotion at the call site. */
4897 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4898 != type)
4899 {
4900 static bool gave_help;
4901
4902 /* Unfortunately, this is merely undefined, rather than a constraint
4903 violation, so we cannot make this an error. If this call is never
4904 executed, the program is still strictly conforming. */
4905 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4906 type, promoted_type);
4907 if (! gave_help)
4908 {
4909 gave_help = true;
4910 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4911 promoted_type, type);
4912 }
4913
4914 /* We can, however, treat "undefined" any way we please.
4915 Call abort to encourage the user to fix the program. */
4916 inform ("if this code is reached, the program will abort");
4917 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4918 append_to_statement_list (t, pre_p);
4919
4920 /* This is dead code, but go ahead and finish so that the
4921 mode of the result comes out right. */
4922 *expr_p = dummy_object (type);
4923 return GS_ALL_DONE;
4924 }
4925 else
4926 {
4927 /* Make it easier for the backends by protecting the valist argument
4928 from multiple evaluations. */
4929 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4930 {
4931 /* For this case, the backends will be expecting a pointer to
4932 TREE_TYPE (va_list_type_node), but it's possible we've
4933 actually been given an array (an actual va_list_type_node).
4934 So fix it. */
4935 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4936 {
4937 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4938 valist = build_fold_addr_expr_with_type (valist, p1);
4939 }
4940 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4941 }
4942 else
4943 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4944
4945 if (!targetm.gimplify_va_arg_expr)
4946 /* FIXME:Once most targets are converted we should merely
4947 assert this is non-null. */
4948 return GS_ALL_DONE;
4949
4950 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4951 return GS_OK;
4952 }
4953 }
4954
4955 /* Expand EXP, a call to __builtin_va_end. */
4956
4957 static rtx
4958 expand_builtin_va_end (tree exp)
4959 {
4960 tree valist = CALL_EXPR_ARG (exp, 0);
4961
4962 /* Evaluate for side effects, if needed. I hate macros that don't
4963 do that. */
4964 if (TREE_SIDE_EFFECTS (valist))
4965 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4966
4967 return const0_rtx;
4968 }
4969
4970 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4971 builtin rather than just as an assignment in stdarg.h because of the
4972 nastiness of array-type va_list types. */
4973
4974 static rtx
4975 expand_builtin_va_copy (tree exp)
4976 {
4977 tree dst, src, t;
4978
4979 dst = CALL_EXPR_ARG (exp, 0);
4980 src = CALL_EXPR_ARG (exp, 1);
4981
4982 dst = stabilize_va_list (dst, 1);
4983 src = stabilize_va_list (src, 0);
4984
4985 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4986 {
4987 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4988 TREE_SIDE_EFFECTS (t) = 1;
4989 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4990 }
4991 else
4992 {
4993 rtx dstb, srcb, size;
4994
4995 /* Evaluate to pointers. */
4996 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4997 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4998 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4999 VOIDmode, EXPAND_NORMAL);
5000
5001 dstb = convert_memory_address (Pmode, dstb);
5002 srcb = convert_memory_address (Pmode, srcb);
5003
5004 /* "Dereference" to BLKmode memories. */
5005 dstb = gen_rtx_MEM (BLKmode, dstb);
5006 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5007 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5008 srcb = gen_rtx_MEM (BLKmode, srcb);
5009 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5010 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5011
5012 /* Copy. */
5013 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5014 }
5015
5016 return const0_rtx;
5017 }
5018
5019 /* Expand a call to one of the builtin functions __builtin_frame_address or
5020 __builtin_return_address. */
5021
5022 static rtx
5023 expand_builtin_frame_address (tree fndecl, tree exp)
5024 {
5025 /* The argument must be a nonnegative integer constant.
5026 It counts the number of frames to scan up the stack.
5027 The value is the return address saved in that frame. */
5028 if (call_expr_nargs (exp) == 0)
5029 /* Warning about missing arg was already issued. */
5030 return const0_rtx;
5031 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5032 {
5033 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5034 error ("invalid argument to %<__builtin_frame_address%>");
5035 else
5036 error ("invalid argument to %<__builtin_return_address%>");
5037 return const0_rtx;
5038 }
5039 else
5040 {
5041 rtx tem
5042 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5043 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5044
5045 /* Some ports cannot access arbitrary stack frames. */
5046 if (tem == NULL)
5047 {
5048 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5049 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5050 else
5051 warning (0, "unsupported argument to %<__builtin_return_address%>");
5052 return const0_rtx;
5053 }
5054
5055 /* For __builtin_frame_address, return what we've got. */
5056 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5057 return tem;
5058
5059 if (!REG_P (tem)
5060 && ! CONSTANT_P (tem))
5061 tem = copy_to_mode_reg (Pmode, tem);
5062 return tem;
5063 }
5064 }
5065
5066 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5067 we failed and the caller should emit a normal call, otherwise try to get
5068 the result in TARGET, if convenient. */
5069
5070 static rtx
5071 expand_builtin_alloca (tree exp, rtx target)
5072 {
5073 rtx op0;
5074 rtx result;
5075
5076 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5077 should always expand to function calls. These can be intercepted
5078 in libmudflap. */
5079 if (flag_mudflap)
5080 return NULL_RTX;
5081
5082 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5083 return NULL_RTX;
5084
5085 /* Compute the argument. */
5086 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5087
5088 /* Allocate the desired space. */
5089 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5090 result = convert_memory_address (ptr_mode, result);
5091
5092 return result;
5093 }
5094
5095 /* Expand a call to a bswap builtin with argument ARG0. MODE
5096 is the mode to expand with. */
5097
5098 static rtx
5099 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5100 {
5101 enum machine_mode mode;
5102 tree arg;
5103 rtx op0;
5104
5105 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5106 return NULL_RTX;
5107
5108 arg = CALL_EXPR_ARG (exp, 0);
5109 mode = TYPE_MODE (TREE_TYPE (arg));
5110 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5111
5112 target = expand_unop (mode, bswap_optab, op0, target, 1);
5113
5114 gcc_assert (target);
5115
5116 return convert_to_mode (mode, target, 0);
5117 }
5118
5119 /* Expand a call to a unary builtin in EXP.
5120 Return NULL_RTX if a normal call should be emitted rather than expanding the
5121 function in-line. If convenient, the result should be placed in TARGET.
5122 SUBTARGET may be used as the target for computing one of EXP's operands. */
5123
5124 static rtx
5125 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5126 rtx subtarget, optab op_optab)
5127 {
5128 rtx op0;
5129
5130 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5131 return NULL_RTX;
5132
5133 /* Compute the argument. */
5134 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5135 VOIDmode, EXPAND_NORMAL);
5136 /* Compute op, into TARGET if possible.
5137 Set TARGET to wherever the result comes back. */
5138 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5139 op_optab, op0, target, 1);
5140 gcc_assert (target);
5141
5142 return convert_to_mode (target_mode, target, 0);
5143 }
5144
5145 /* If the string passed to fputs is a constant and is one character
5146 long, we attempt to transform this call into __builtin_fputc(). */
5147
5148 static rtx
5149 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5150 {
5151 /* Verify the arguments in the original call. */
5152 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5153 {
5154 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5155 CALL_EXPR_ARG (exp, 1),
5156 (target == const0_rtx),
5157 unlocked, NULL_TREE);
5158 if (result)
5159 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5160 }
5161 return NULL_RTX;
5162 }
5163
5164 /* Expand a call to __builtin_expect. We just return our argument
5165 as the builtin_expect semantic should've been already executed by
5166 tree branch prediction pass. */
5167
5168 static rtx
5169 expand_builtin_expect (tree exp, rtx target)
5170 {
5171 tree arg, c;
5172
5173 if (call_expr_nargs (exp) < 2)
5174 return const0_rtx;
5175 arg = CALL_EXPR_ARG (exp, 0);
5176 c = CALL_EXPR_ARG (exp, 1);
5177
5178 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5179 /* When guessing was done, the hints should be already stripped away. */
5180 gcc_assert (!flag_guess_branch_prob
5181 || optimize == 0 || errorcount || sorrycount);
5182 return target;
5183 }
5184
5185 void
5186 expand_builtin_trap (void)
5187 {
5188 #ifdef HAVE_trap
5189 if (HAVE_trap)
5190 emit_insn (gen_trap ());
5191 else
5192 #endif
5193 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5194 emit_barrier ();
5195 }
5196
5197 /* Expand EXP, a call to fabs, fabsf or fabsl.
5198 Return NULL_RTX if a normal call should be emitted rather than expanding
5199 the function inline. If convenient, the result should be placed
5200 in TARGET. SUBTARGET may be used as the target for computing
5201 the operand. */
5202
5203 static rtx
5204 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5205 {
5206 enum machine_mode mode;
5207 tree arg;
5208 rtx op0;
5209
5210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5211 return NULL_RTX;
5212
5213 arg = CALL_EXPR_ARG (exp, 0);
5214 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5215 mode = TYPE_MODE (TREE_TYPE (arg));
5216 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5217 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5218 }
5219
5220 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5221 Return NULL is a normal call should be emitted rather than expanding the
5222 function inline. If convenient, the result should be placed in TARGET.
5223 SUBTARGET may be used as the target for computing the operand. */
5224
5225 static rtx
5226 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5227 {
5228 rtx op0, op1;
5229 tree arg;
5230
5231 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5232 return NULL_RTX;
5233
5234 arg = CALL_EXPR_ARG (exp, 0);
5235 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5236
5237 arg = CALL_EXPR_ARG (exp, 1);
5238 op1 = expand_normal (arg);
5239
5240 return expand_copysign (op0, op1, target);
5241 }
5242
5243 /* Create a new constant string literal and return a char* pointer to it.
5244 The STRING_CST value is the LEN characters at STR. */
5245 tree
5246 build_string_literal (int len, const char *str)
5247 {
5248 tree t, elem, index, type;
5249
5250 t = build_string (len, str);
5251 elem = build_type_variant (char_type_node, 1, 0);
5252 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5253 type = build_array_type (elem, index);
5254 TREE_TYPE (t) = type;
5255 TREE_CONSTANT (t) = 1;
5256 TREE_READONLY (t) = 1;
5257 TREE_STATIC (t) = 1;
5258
5259 type = build_pointer_type (type);
5260 t = build1 (ADDR_EXPR, type, t);
5261
5262 type = build_pointer_type (elem);
5263 t = build1 (NOP_EXPR, type, t);
5264 return t;
5265 }
5266
5267 /* Expand EXP, a call to printf or printf_unlocked.
5268 Return NULL_RTX if a normal call should be emitted rather than transforming
5269 the function inline. If convenient, the result should be placed in
5270 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5271 call. */
5272 static rtx
5273 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5274 bool unlocked)
5275 {
5276 /* If we're using an unlocked function, assume the other unlocked
5277 functions exist explicitly. */
5278 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5279 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5280 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5281 : implicit_built_in_decls[BUILT_IN_PUTS];
5282 const char *fmt_str;
5283 tree fn = 0;
5284 tree fmt, arg;
5285 int nargs = call_expr_nargs (exp);
5286
5287 /* If the return value is used, don't do the transformation. */
5288 if (target != const0_rtx)
5289 return NULL_RTX;
5290
5291 /* Verify the required arguments in the original call. */
5292 if (nargs == 0)
5293 return NULL_RTX;
5294 fmt = CALL_EXPR_ARG (exp, 0);
5295 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5296 return NULL_RTX;
5297
5298 /* Check whether the format is a literal string constant. */
5299 fmt_str = c_getstr (fmt);
5300 if (fmt_str == NULL)
5301 return NULL_RTX;
5302
5303 if (!init_target_chars ())
5304 return NULL_RTX;
5305
5306 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5307 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5308 {
5309 if ((nargs != 2)
5310 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5311 return NULL_RTX;
5312 if (fn_puts)
5313 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5314 }
5315 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5316 else if (strcmp (fmt_str, target_percent_c) == 0)
5317 {
5318 if ((nargs != 2)
5319 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5320 return NULL_RTX;
5321 if (fn_putchar)
5322 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5323 }
5324 else
5325 {
5326 /* We can't handle anything else with % args or %% ... yet. */
5327 if (strchr (fmt_str, target_percent))
5328 return NULL_RTX;
5329
5330 if (nargs > 1)
5331 return NULL_RTX;
5332
5333 /* If the format specifier was "", printf does nothing. */
5334 if (fmt_str[0] == '\0')
5335 return const0_rtx;
5336 /* If the format specifier has length of 1, call putchar. */
5337 if (fmt_str[1] == '\0')
5338 {
5339 /* Given printf("c"), (where c is any one character,)
5340 convert "c"[0] to an int and pass that to the replacement
5341 function. */
5342 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5343 if (fn_putchar)
5344 fn = build_call_expr (fn_putchar, 1, arg);
5345 }
5346 else
5347 {
5348 /* If the format specifier was "string\n", call puts("string"). */
5349 size_t len = strlen (fmt_str);
5350 if ((unsigned char)fmt_str[len - 1] == target_newline)
5351 {
5352 /* Create a NUL-terminated string that's one char shorter
5353 than the original, stripping off the trailing '\n'. */
5354 char *newstr = alloca (len);
5355 memcpy (newstr, fmt_str, len - 1);
5356 newstr[len - 1] = 0;
5357 arg = build_string_literal (len, newstr);
5358 if (fn_puts)
5359 fn = build_call_expr (fn_puts, 1, arg);
5360 }
5361 else
5362 /* We'd like to arrange to call fputs(string,stdout) here,
5363 but we need stdout and don't have a way to get it yet. */
5364 return NULL_RTX;
5365 }
5366 }
5367
5368 if (!fn)
5369 return NULL_RTX;
5370 if (TREE_CODE (fn) == CALL_EXPR)
5371 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5372 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5373 }
5374
5375 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5376 Return NULL_RTX if a normal call should be emitted rather than transforming
5377 the function inline. If convenient, the result should be placed in
5378 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5379 call. */
5380 static rtx
5381 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5382 bool unlocked)
5383 {
5384 /* If we're using an unlocked function, assume the other unlocked
5385 functions exist explicitly. */
5386 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5387 : implicit_built_in_decls[BUILT_IN_FPUTC];
5388 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5389 : implicit_built_in_decls[BUILT_IN_FPUTS];
5390 const char *fmt_str;
5391 tree fn = 0;
5392 tree fmt, fp, arg;
5393 int nargs = call_expr_nargs (exp);
5394
5395 /* If the return value is used, don't do the transformation. */
5396 if (target != const0_rtx)
5397 return NULL_RTX;
5398
5399 /* Verify the required arguments in the original call. */
5400 if (nargs < 2)
5401 return NULL_RTX;
5402 fp = CALL_EXPR_ARG (exp, 0);
5403 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5404 return NULL_RTX;
5405 fmt = CALL_EXPR_ARG (exp, 1);
5406 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5407 return NULL_RTX;
5408
5409 /* Check whether the format is a literal string constant. */
5410 fmt_str = c_getstr (fmt);
5411 if (fmt_str == NULL)
5412 return NULL_RTX;
5413
5414 if (!init_target_chars ())
5415 return NULL_RTX;
5416
5417 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5418 if (strcmp (fmt_str, target_percent_s) == 0)
5419 {
5420 if ((nargs != 3)
5421 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5422 return NULL_RTX;
5423 arg = CALL_EXPR_ARG (exp, 2);
5424 if (fn_fputs)
5425 fn = build_call_expr (fn_fputs, 2, arg, fp);
5426 }
5427 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5428 else if (strcmp (fmt_str, target_percent_c) == 0)
5429 {
5430 if ((nargs != 3)
5431 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5432 return NULL_RTX;
5433 arg = CALL_EXPR_ARG (exp, 2);
5434 if (fn_fputc)
5435 fn = build_call_expr (fn_fputc, 2, arg, fp);
5436 }
5437 else
5438 {
5439 /* We can't handle anything else with % args or %% ... yet. */
5440 if (strchr (fmt_str, target_percent))
5441 return NULL_RTX;
5442
5443 if (nargs > 2)
5444 return NULL_RTX;
5445
5446 /* If the format specifier was "", fprintf does nothing. */
5447 if (fmt_str[0] == '\0')
5448 {
5449 /* Evaluate and ignore FILE* argument for side-effects. */
5450 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5451 return const0_rtx;
5452 }
5453
5454 /* When "string" doesn't contain %, replace all cases of
5455 fprintf(stream,string) with fputs(string,stream). The fputs
5456 builtin will take care of special cases like length == 1. */
5457 if (fn_fputs)
5458 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5459 }
5460
5461 if (!fn)
5462 return NULL_RTX;
5463 if (TREE_CODE (fn) == CALL_EXPR)
5464 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5465 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5466 }
5467
5468 /* Expand a call EXP to sprintf. Return NULL_RTX if
5469 a normal call should be emitted rather than expanding the function
5470 inline. If convenient, the result should be placed in TARGET with
5471 mode MODE. */
5472
5473 static rtx
5474 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5475 {
5476 tree dest, fmt;
5477 const char *fmt_str;
5478 int nargs = call_expr_nargs (exp);
5479
5480 /* Verify the required arguments in the original call. */
5481 if (nargs < 2)
5482 return NULL_RTX;
5483 dest = CALL_EXPR_ARG (exp, 0);
5484 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5485 return NULL_RTX;
5486 fmt = CALL_EXPR_ARG (exp, 0);
5487 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5488 return NULL_RTX;
5489
5490 /* Check whether the format is a literal string constant. */
5491 fmt_str = c_getstr (fmt);
5492 if (fmt_str == NULL)
5493 return NULL_RTX;
5494
5495 if (!init_target_chars ())
5496 return NULL_RTX;
5497
5498 /* If the format doesn't contain % args or %%, use strcpy. */
5499 if (strchr (fmt_str, target_percent) == 0)
5500 {
5501 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5502 tree exp;
5503
5504 if ((nargs > 2) || ! fn)
5505 return NULL_RTX;
5506 expand_expr (build_call_expr (fn, 2, dest, fmt),
5507 const0_rtx, VOIDmode, EXPAND_NORMAL);
5508 if (target == const0_rtx)
5509 return const0_rtx;
5510 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5511 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5512 }
5513 /* If the format is "%s", use strcpy if the result isn't used. */
5514 else if (strcmp (fmt_str, target_percent_s) == 0)
5515 {
5516 tree fn, arg, len;
5517 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5518
5519 if (! fn)
5520 return NULL_RTX;
5521 if (nargs != 3)
5522 return NULL_RTX;
5523 arg = CALL_EXPR_ARG (exp, 2);
5524 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5525 return NULL_RTX;
5526
5527 if (target != const0_rtx)
5528 {
5529 len = c_strlen (arg, 1);
5530 if (! len || TREE_CODE (len) != INTEGER_CST)
5531 return NULL_RTX;
5532 }
5533 else
5534 len = NULL_TREE;
5535
5536 expand_expr (build_call_expr (fn, 2, dest, arg),
5537 const0_rtx, VOIDmode, EXPAND_NORMAL);
5538
5539 if (target == const0_rtx)
5540 return const0_rtx;
5541 return expand_expr (len, target, mode, EXPAND_NORMAL);
5542 }
5543
5544 return NULL_RTX;
5545 }
5546
5547 /* Expand a call to either the entry or exit function profiler. */
5548
5549 static rtx
5550 expand_builtin_profile_func (bool exitp)
5551 {
5552 rtx this, which;
5553
5554 this = DECL_RTL (current_function_decl);
5555 gcc_assert (MEM_P (this));
5556 this = XEXP (this, 0);
5557
5558 if (exitp)
5559 which = profile_function_exit_libfunc;
5560 else
5561 which = profile_function_entry_libfunc;
5562
5563 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5564 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5565 0),
5566 Pmode);
5567
5568 return const0_rtx;
5569 }
5570
5571 /* Expand a call to __builtin___clear_cache. */
5572
5573 static rtx
5574 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5575 {
5576 #ifndef HAVE_clear_cache
5577 #ifdef CLEAR_INSN_CACHE
5578 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5579 does something. Just do the default expansion to a call to
5580 __clear_cache(). */
5581 return NULL_RTX;
5582 #else
5583 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5584 does nothing. There is no need to call it. Do nothing. */
5585 return const0_rtx;
5586 #endif /* CLEAR_INSN_CACHE */
5587 #else
5588 /* We have a "clear_cache" insn, and it will handle everything. */
5589 tree begin, end;
5590 rtx begin_rtx, end_rtx;
5591 enum insn_code icode;
5592
5593 /* We must not expand to a library call. If we did, any
5594 fallback library function in libgcc that might contain a call to
5595 __builtin___clear_cache() would recurse infinitely. */
5596 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5597 {
5598 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5599 return const0_rtx;
5600 }
5601
5602 if (HAVE_clear_cache)
5603 {
5604 icode = CODE_FOR_clear_cache;
5605
5606 begin = CALL_EXPR_ARG (exp, 0);
5607 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5608 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5609 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5610 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5611
5612 end = CALL_EXPR_ARG (exp, 1);
5613 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5614 end_rtx = convert_memory_address (Pmode, end_rtx);
5615 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5616 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5617
5618 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5619 }
5620 return const0_rtx;
5621 #endif /* HAVE_clear_cache */
5622 }
5623
5624 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5625
5626 static rtx
5627 round_trampoline_addr (rtx tramp)
5628 {
5629 rtx temp, addend, mask;
5630
5631 /* If we don't need too much alignment, we'll have been guaranteed
5632 proper alignment by get_trampoline_type. */
5633 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5634 return tramp;
5635
5636 /* Round address up to desired boundary. */
5637 temp = gen_reg_rtx (Pmode);
5638 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5639 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5640
5641 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5642 temp, 0, OPTAB_LIB_WIDEN);
5643 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5644 temp, 0, OPTAB_LIB_WIDEN);
5645
5646 return tramp;
5647 }
5648
5649 static rtx
5650 expand_builtin_init_trampoline (tree exp)
5651 {
5652 tree t_tramp, t_func, t_chain;
5653 rtx r_tramp, r_func, r_chain;
5654 #ifdef TRAMPOLINE_TEMPLATE
5655 rtx blktramp;
5656 #endif
5657
5658 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5659 POINTER_TYPE, VOID_TYPE))
5660 return NULL_RTX;
5661
5662 t_tramp = CALL_EXPR_ARG (exp, 0);
5663 t_func = CALL_EXPR_ARG (exp, 1);
5664 t_chain = CALL_EXPR_ARG (exp, 2);
5665
5666 r_tramp = expand_normal (t_tramp);
5667 r_func = expand_normal (t_func);
5668 r_chain = expand_normal (t_chain);
5669
5670 /* Generate insns to initialize the trampoline. */
5671 r_tramp = round_trampoline_addr (r_tramp);
5672 #ifdef TRAMPOLINE_TEMPLATE
5673 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5674 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5675 emit_block_move (blktramp, assemble_trampoline_template (),
5676 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5677 #endif
5678 trampolines_created = 1;
5679 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5680
5681 return const0_rtx;
5682 }
5683
5684 static rtx
5685 expand_builtin_adjust_trampoline (tree exp)
5686 {
5687 rtx tramp;
5688
5689 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5690 return NULL_RTX;
5691
5692 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5693 tramp = round_trampoline_addr (tramp);
5694 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5695 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5696 #endif
5697
5698 return tramp;
5699 }
5700
5701 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5702 function. The function first checks whether the back end provides
5703 an insn to implement signbit for the respective mode. If not, it
5704 checks whether the floating point format of the value is such that
5705 the sign bit can be extracted. If that is not the case, the
5706 function returns NULL_RTX to indicate that a normal call should be
5707 emitted rather than expanding the function in-line. EXP is the
5708 expression that is a call to the builtin function; if convenient,
5709 the result should be placed in TARGET. */
5710 static rtx
5711 expand_builtin_signbit (tree exp, rtx target)
5712 {
5713 const struct real_format *fmt;
5714 enum machine_mode fmode, imode, rmode;
5715 HOST_WIDE_INT hi, lo;
5716 tree arg;
5717 int word, bitpos;
5718 enum insn_code icode;
5719 rtx temp;
5720
5721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5722 return NULL_RTX;
5723
5724 arg = CALL_EXPR_ARG (exp, 0);
5725 fmode = TYPE_MODE (TREE_TYPE (arg));
5726 rmode = TYPE_MODE (TREE_TYPE (exp));
5727 fmt = REAL_MODE_FORMAT (fmode);
5728
5729 arg = builtin_save_expr (arg);
5730
5731 /* Expand the argument yielding a RTX expression. */
5732 temp = expand_normal (arg);
5733
5734 /* Check if the back end provides an insn that handles signbit for the
5735 argument's mode. */
5736 icode = signbit_optab->handlers [(int) fmode].insn_code;
5737 if (icode != CODE_FOR_nothing)
5738 {
5739 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5740 emit_unop_insn (icode, target, temp, UNKNOWN);
5741 return target;
5742 }
5743
5744 /* For floating point formats without a sign bit, implement signbit
5745 as "ARG < 0.0". */
5746 bitpos = fmt->signbit_ro;
5747 if (bitpos < 0)
5748 {
5749 /* But we can't do this if the format supports signed zero. */
5750 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5751 return NULL_RTX;
5752
5753 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5754 build_real (TREE_TYPE (arg), dconst0));
5755 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5756 }
5757
5758 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5759 {
5760 imode = int_mode_for_mode (fmode);
5761 if (imode == BLKmode)
5762 return NULL_RTX;
5763 temp = gen_lowpart (imode, temp);
5764 }
5765 else
5766 {
5767 imode = word_mode;
5768 /* Handle targets with different FP word orders. */
5769 if (FLOAT_WORDS_BIG_ENDIAN)
5770 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5771 else
5772 word = bitpos / BITS_PER_WORD;
5773 temp = operand_subword_force (temp, word, fmode);
5774 bitpos = bitpos % BITS_PER_WORD;
5775 }
5776
5777 /* Force the intermediate word_mode (or narrower) result into a
5778 register. This avoids attempting to create paradoxical SUBREGs
5779 of floating point modes below. */
5780 temp = force_reg (imode, temp);
5781
5782 /* If the bitpos is within the "result mode" lowpart, the operation
5783 can be implement with a single bitwise AND. Otherwise, we need
5784 a right shift and an AND. */
5785
5786 if (bitpos < GET_MODE_BITSIZE (rmode))
5787 {
5788 if (bitpos < HOST_BITS_PER_WIDE_INT)
5789 {
5790 hi = 0;
5791 lo = (HOST_WIDE_INT) 1 << bitpos;
5792 }
5793 else
5794 {
5795 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5796 lo = 0;
5797 }
5798
5799 if (imode != rmode)
5800 temp = gen_lowpart (rmode, temp);
5801 temp = expand_binop (rmode, and_optab, temp,
5802 immed_double_const (lo, hi, rmode),
5803 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5804 }
5805 else
5806 {
5807 /* Perform a logical right shift to place the signbit in the least
5808 significant bit, then truncate the result to the desired mode
5809 and mask just this bit. */
5810 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5811 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5812 temp = gen_lowpart (rmode, temp);
5813 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5814 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5815 }
5816
5817 return temp;
5818 }
5819
5820 /* Expand fork or exec calls. TARGET is the desired target of the
5821 call. EXP is the call. FN is the
5822 identificator of the actual function. IGNORE is nonzero if the
5823 value is to be ignored. */
5824
5825 static rtx
5826 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5827 {
5828 tree id, decl;
5829 tree call;
5830
5831 /* If we are not profiling, just call the function. */
5832 if (!profile_arc_flag)
5833 return NULL_RTX;
5834
5835 /* Otherwise call the wrapper. This should be equivalent for the rest of
5836 compiler, so the code does not diverge, and the wrapper may run the
5837 code necessary for keeping the profiling sane. */
5838
5839 switch (DECL_FUNCTION_CODE (fn))
5840 {
5841 case BUILT_IN_FORK:
5842 id = get_identifier ("__gcov_fork");
5843 break;
5844
5845 case BUILT_IN_EXECL:
5846 id = get_identifier ("__gcov_execl");
5847 break;
5848
5849 case BUILT_IN_EXECV:
5850 id = get_identifier ("__gcov_execv");
5851 break;
5852
5853 case BUILT_IN_EXECLP:
5854 id = get_identifier ("__gcov_execlp");
5855 break;
5856
5857 case BUILT_IN_EXECLE:
5858 id = get_identifier ("__gcov_execle");
5859 break;
5860
5861 case BUILT_IN_EXECVP:
5862 id = get_identifier ("__gcov_execvp");
5863 break;
5864
5865 case BUILT_IN_EXECVE:
5866 id = get_identifier ("__gcov_execve");
5867 break;
5868
5869 default:
5870 gcc_unreachable ();
5871 }
5872
5873 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5874 DECL_EXTERNAL (decl) = 1;
5875 TREE_PUBLIC (decl) = 1;
5876 DECL_ARTIFICIAL (decl) = 1;
5877 TREE_NOTHROW (decl) = 1;
5878 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5879 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5880 call = rewrite_call_expr (exp, 0, decl, 0);
5881 return expand_call (call, target, ignore);
5882 }
5883
5884
5885 \f
5886 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5887 the pointer in these functions is void*, the tree optimizers may remove
5888 casts. The mode computed in expand_builtin isn't reliable either, due
5889 to __sync_bool_compare_and_swap.
5890
5891 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5892 group of builtins. This gives us log2 of the mode size. */
5893
5894 static inline enum machine_mode
5895 get_builtin_sync_mode (int fcode_diff)
5896 {
5897 /* The size is not negotiable, so ask not to get BLKmode in return
5898 if the target indicates that a smaller size would be better. */
5899 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5900 }
5901
5902 /* Expand the memory expression LOC and return the appropriate memory operand
5903 for the builtin_sync operations. */
5904
5905 static rtx
5906 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5907 {
5908 rtx addr, mem;
5909
5910 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5911
5912 /* Note that we explicitly do not want any alias information for this
5913 memory, so that we kill all other live memories. Otherwise we don't
5914 satisfy the full barrier semantics of the intrinsic. */
5915 mem = validize_mem (gen_rtx_MEM (mode, addr));
5916
5917 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5918 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5919 MEM_VOLATILE_P (mem) = 1;
5920
5921 return mem;
5922 }
5923
5924 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5925 EXP is the CALL_EXPR. CODE is the rtx code
5926 that corresponds to the arithmetic or logical operation from the name;
5927 an exception here is that NOT actually means NAND. TARGET is an optional
5928 place for us to store the results; AFTER is true if this is the
5929 fetch_and_xxx form. IGNORE is true if we don't actually care about
5930 the result of the operation at all. */
5931
5932 static rtx
5933 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5934 enum rtx_code code, bool after,
5935 rtx target, bool ignore)
5936 {
5937 rtx val, mem;
5938 enum machine_mode old_mode;
5939
5940 /* Expand the operands. */
5941 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5942
5943 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5944 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5945 of CONST_INTs, where we know the old_mode only from the call argument. */
5946 old_mode = GET_MODE (val);
5947 if (old_mode == VOIDmode)
5948 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5949 val = convert_modes (mode, old_mode, val, 1);
5950
5951 if (ignore)
5952 return expand_sync_operation (mem, val, code);
5953 else
5954 return expand_sync_fetch_operation (mem, val, code, after, target);
5955 }
5956
5957 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5958 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5959 true if this is the boolean form. TARGET is a place for us to store the
5960 results; this is NOT optional if IS_BOOL is true. */
5961
5962 static rtx
5963 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5964 bool is_bool, rtx target)
5965 {
5966 rtx old_val, new_val, mem;
5967 enum machine_mode old_mode;
5968
5969 /* Expand the operands. */
5970 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5971
5972
5973 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5974 mode, EXPAND_NORMAL);
5975 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5976 of CONST_INTs, where we know the old_mode only from the call argument. */
5977 old_mode = GET_MODE (old_val);
5978 if (old_mode == VOIDmode)
5979 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5980 old_val = convert_modes (mode, old_mode, old_val, 1);
5981
5982 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5983 mode, EXPAND_NORMAL);
5984 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5985 of CONST_INTs, where we know the old_mode only from the call argument. */
5986 old_mode = GET_MODE (new_val);
5987 if (old_mode == VOIDmode)
5988 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5989 new_val = convert_modes (mode, old_mode, new_val, 1);
5990
5991 if (is_bool)
5992 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5993 else
5994 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5995 }
5996
5997 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5998 general form is actually an atomic exchange, and some targets only
5999 support a reduced form with the second argument being a constant 1.
6000 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6001 the results. */
6002
6003 static rtx
6004 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6005 rtx target)
6006 {
6007 rtx val, mem;
6008 enum machine_mode old_mode;
6009
6010 /* Expand the operands. */
6011 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6012 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6013 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6014 of CONST_INTs, where we know the old_mode only from the call argument. */
6015 old_mode = GET_MODE (val);
6016 if (old_mode == VOIDmode)
6017 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6018 val = convert_modes (mode, old_mode, val, 1);
6019
6020 return expand_sync_lock_test_and_set (mem, val, target);
6021 }
6022
6023 /* Expand the __sync_synchronize intrinsic. */
6024
6025 static void
6026 expand_builtin_synchronize (void)
6027 {
6028 tree x;
6029
6030 #ifdef HAVE_memory_barrier
6031 if (HAVE_memory_barrier)
6032 {
6033 emit_insn (gen_memory_barrier ());
6034 return;
6035 }
6036 #endif
6037
6038 /* If no explicit memory barrier instruction is available, create an
6039 empty asm stmt with a memory clobber. */
6040 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6041 tree_cons (NULL, build_string (6, "memory"), NULL));
6042 ASM_VOLATILE_P (x) = 1;
6043 expand_asm_expr (x);
6044 }
6045
6046 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6047
6048 static void
6049 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6050 {
6051 enum insn_code icode;
6052 rtx mem, insn;
6053 rtx val = const0_rtx;
6054
6055 /* Expand the operands. */
6056 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6057
6058 /* If there is an explicit operation in the md file, use it. */
6059 icode = sync_lock_release[mode];
6060 if (icode != CODE_FOR_nothing)
6061 {
6062 if (!insn_data[icode].operand[1].predicate (val, mode))
6063 val = force_reg (mode, val);
6064
6065 insn = GEN_FCN (icode) (mem, val);
6066 if (insn)
6067 {
6068 emit_insn (insn);
6069 return;
6070 }
6071 }
6072
6073 /* Otherwise we can implement this operation by emitting a barrier
6074 followed by a store of zero. */
6075 expand_builtin_synchronize ();
6076 emit_move_insn (mem, val);
6077 }
6078 \f
6079 /* Expand an expression EXP that calls a built-in function,
6080 with result going to TARGET if that's convenient
6081 (and in mode MODE if that's convenient).
6082 SUBTARGET may be used as the target for computing one of EXP's operands.
6083 IGNORE is nonzero if the value is to be ignored. */
6084
6085 rtx
6086 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6087 int ignore)
6088 {
6089 tree fndecl = get_callee_fndecl (exp);
6090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6091 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6092
6093 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6094 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6095
6096 /* When not optimizing, generate calls to library functions for a certain
6097 set of builtins. */
6098 if (!optimize
6099 && !called_as_built_in (fndecl)
6100 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6101 && fcode != BUILT_IN_ALLOCA)
6102 return expand_call (exp, target, ignore);
6103
6104 /* The built-in function expanders test for target == const0_rtx
6105 to determine whether the function's result will be ignored. */
6106 if (ignore)
6107 target = const0_rtx;
6108
6109 /* If the result of a pure or const built-in function is ignored, and
6110 none of its arguments are volatile, we can avoid expanding the
6111 built-in call and just evaluate the arguments for side-effects. */
6112 if (target == const0_rtx
6113 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6114 {
6115 bool volatilep = false;
6116 tree arg;
6117 call_expr_arg_iterator iter;
6118
6119 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6120 if (TREE_THIS_VOLATILE (arg))
6121 {
6122 volatilep = true;
6123 break;
6124 }
6125
6126 if (! volatilep)
6127 {
6128 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6129 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6130 return const0_rtx;
6131 }
6132 }
6133
6134 switch (fcode)
6135 {
6136 CASE_FLT_FN (BUILT_IN_FABS):
6137 target = expand_builtin_fabs (exp, target, subtarget);
6138 if (target)
6139 return target;
6140 break;
6141
6142 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6143 target = expand_builtin_copysign (exp, target, subtarget);
6144 if (target)
6145 return target;
6146 break;
6147
6148 /* Just do a normal library call if we were unable to fold
6149 the values. */
6150 CASE_FLT_FN (BUILT_IN_CABS):
6151 break;
6152
6153 CASE_FLT_FN (BUILT_IN_EXP):
6154 CASE_FLT_FN (BUILT_IN_EXP10):
6155 CASE_FLT_FN (BUILT_IN_POW10):
6156 CASE_FLT_FN (BUILT_IN_EXP2):
6157 CASE_FLT_FN (BUILT_IN_EXPM1):
6158 CASE_FLT_FN (BUILT_IN_LOGB):
6159 CASE_FLT_FN (BUILT_IN_LOG):
6160 CASE_FLT_FN (BUILT_IN_LOG10):
6161 CASE_FLT_FN (BUILT_IN_LOG2):
6162 CASE_FLT_FN (BUILT_IN_LOG1P):
6163 CASE_FLT_FN (BUILT_IN_TAN):
6164 CASE_FLT_FN (BUILT_IN_ASIN):
6165 CASE_FLT_FN (BUILT_IN_ACOS):
6166 CASE_FLT_FN (BUILT_IN_ATAN):
6167 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6168 because of possible accuracy problems. */
6169 if (! flag_unsafe_math_optimizations)
6170 break;
6171 CASE_FLT_FN (BUILT_IN_SQRT):
6172 CASE_FLT_FN (BUILT_IN_FLOOR):
6173 CASE_FLT_FN (BUILT_IN_CEIL):
6174 CASE_FLT_FN (BUILT_IN_TRUNC):
6175 CASE_FLT_FN (BUILT_IN_ROUND):
6176 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6177 CASE_FLT_FN (BUILT_IN_RINT):
6178 target = expand_builtin_mathfn (exp, target, subtarget);
6179 if (target)
6180 return target;
6181 break;
6182
6183 CASE_FLT_FN (BUILT_IN_ILOGB):
6184 if (! flag_unsafe_math_optimizations)
6185 break;
6186 CASE_FLT_FN (BUILT_IN_ISINF):
6187 CASE_FLT_FN (BUILT_IN_FINITE):
6188 case BUILT_IN_ISFINITE:
6189 case BUILT_IN_ISNORMAL:
6190 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6191 if (target)
6192 return target;
6193 break;
6194
6195 CASE_FLT_FN (BUILT_IN_LCEIL):
6196 CASE_FLT_FN (BUILT_IN_LLCEIL):
6197 CASE_FLT_FN (BUILT_IN_LFLOOR):
6198 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6199 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6200 if (target)
6201 return target;
6202 break;
6203
6204 CASE_FLT_FN (BUILT_IN_LRINT):
6205 CASE_FLT_FN (BUILT_IN_LLRINT):
6206 CASE_FLT_FN (BUILT_IN_LROUND):
6207 CASE_FLT_FN (BUILT_IN_LLROUND):
6208 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6209 if (target)
6210 return target;
6211 break;
6212
6213 CASE_FLT_FN (BUILT_IN_POW):
6214 target = expand_builtin_pow (exp, target, subtarget);
6215 if (target)
6216 return target;
6217 break;
6218
6219 CASE_FLT_FN (BUILT_IN_POWI):
6220 target = expand_builtin_powi (exp, target, subtarget);
6221 if (target)
6222 return target;
6223 break;
6224
6225 CASE_FLT_FN (BUILT_IN_ATAN2):
6226 CASE_FLT_FN (BUILT_IN_LDEXP):
6227 CASE_FLT_FN (BUILT_IN_SCALB):
6228 CASE_FLT_FN (BUILT_IN_SCALBN):
6229 CASE_FLT_FN (BUILT_IN_SCALBLN):
6230 if (! flag_unsafe_math_optimizations)
6231 break;
6232
6233 CASE_FLT_FN (BUILT_IN_FMOD):
6234 CASE_FLT_FN (BUILT_IN_REMAINDER):
6235 CASE_FLT_FN (BUILT_IN_DREM):
6236 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6237 if (target)
6238 return target;
6239 break;
6240
6241 CASE_FLT_FN (BUILT_IN_CEXPI):
6242 target = expand_builtin_cexpi (exp, target, subtarget);
6243 gcc_assert (target);
6244 return target;
6245
6246 CASE_FLT_FN (BUILT_IN_SIN):
6247 CASE_FLT_FN (BUILT_IN_COS):
6248 if (! flag_unsafe_math_optimizations)
6249 break;
6250 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6251 if (target)
6252 return target;
6253 break;
6254
6255 CASE_FLT_FN (BUILT_IN_SINCOS):
6256 if (! flag_unsafe_math_optimizations)
6257 break;
6258 target = expand_builtin_sincos (exp);
6259 if (target)
6260 return target;
6261 break;
6262
6263 case BUILT_IN_APPLY_ARGS:
6264 return expand_builtin_apply_args ();
6265
6266 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6267 FUNCTION with a copy of the parameters described by
6268 ARGUMENTS, and ARGSIZE. It returns a block of memory
6269 allocated on the stack into which is stored all the registers
6270 that might possibly be used for returning the result of a
6271 function. ARGUMENTS is the value returned by
6272 __builtin_apply_args. ARGSIZE is the number of bytes of
6273 arguments that must be copied. ??? How should this value be
6274 computed? We'll also need a safe worst case value for varargs
6275 functions. */
6276 case BUILT_IN_APPLY:
6277 if (!validate_arglist (exp, POINTER_TYPE,
6278 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6279 && !validate_arglist (exp, REFERENCE_TYPE,
6280 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6281 return const0_rtx;
6282 else
6283 {
6284 rtx ops[3];
6285
6286 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6287 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6288 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6289
6290 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6291 }
6292
6293 /* __builtin_return (RESULT) causes the function to return the
6294 value described by RESULT. RESULT is address of the block of
6295 memory returned by __builtin_apply. */
6296 case BUILT_IN_RETURN:
6297 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6298 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6299 return const0_rtx;
6300
6301 case BUILT_IN_SAVEREGS:
6302 return expand_builtin_saveregs ();
6303
6304 case BUILT_IN_ARGS_INFO:
6305 return expand_builtin_args_info (exp);
6306
6307 case BUILT_IN_VA_ARG_PACK:
6308 /* All valid uses of __builtin_va_arg_pack () are removed during
6309 inlining. */
6310 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6311 return const0_rtx;
6312
6313 case BUILT_IN_VA_ARG_PACK_LEN:
6314 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6315 inlining. */
6316 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6317 return const0_rtx;
6318
6319 /* Return the address of the first anonymous stack arg. */
6320 case BUILT_IN_NEXT_ARG:
6321 if (fold_builtin_next_arg (exp, false))
6322 return const0_rtx;
6323 return expand_builtin_next_arg ();
6324
6325 case BUILT_IN_CLEAR_CACHE:
6326 target = expand_builtin___clear_cache (exp);
6327 if (target)
6328 return target;
6329 break;
6330
6331 case BUILT_IN_CLASSIFY_TYPE:
6332 return expand_builtin_classify_type (exp);
6333
6334 case BUILT_IN_CONSTANT_P:
6335 return const0_rtx;
6336
6337 case BUILT_IN_FRAME_ADDRESS:
6338 case BUILT_IN_RETURN_ADDRESS:
6339 return expand_builtin_frame_address (fndecl, exp);
6340
6341 /* Returns the address of the area where the structure is returned.
6342 0 otherwise. */
6343 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6344 if (call_expr_nargs (exp) != 0
6345 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6346 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6347 return const0_rtx;
6348 else
6349 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6350
6351 case BUILT_IN_ALLOCA:
6352 target = expand_builtin_alloca (exp, target);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_STACK_SAVE:
6358 return expand_stack_save ();
6359
6360 case BUILT_IN_STACK_RESTORE:
6361 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6362 return const0_rtx;
6363
6364 case BUILT_IN_BSWAP32:
6365 case BUILT_IN_BSWAP64:
6366 target = expand_builtin_bswap (exp, target, subtarget);
6367
6368 if (target)
6369 return target;
6370 break;
6371
6372 CASE_INT_FN (BUILT_IN_FFS):
6373 case BUILT_IN_FFSIMAX:
6374 target = expand_builtin_unop (target_mode, exp, target,
6375 subtarget, ffs_optab);
6376 if (target)
6377 return target;
6378 break;
6379
6380 CASE_INT_FN (BUILT_IN_CLZ):
6381 case BUILT_IN_CLZIMAX:
6382 target = expand_builtin_unop (target_mode, exp, target,
6383 subtarget, clz_optab);
6384 if (target)
6385 return target;
6386 break;
6387
6388 CASE_INT_FN (BUILT_IN_CTZ):
6389 case BUILT_IN_CTZIMAX:
6390 target = expand_builtin_unop (target_mode, exp, target,
6391 subtarget, ctz_optab);
6392 if (target)
6393 return target;
6394 break;
6395
6396 CASE_INT_FN (BUILT_IN_POPCOUNT):
6397 case BUILT_IN_POPCOUNTIMAX:
6398 target = expand_builtin_unop (target_mode, exp, target,
6399 subtarget, popcount_optab);
6400 if (target)
6401 return target;
6402 break;
6403
6404 CASE_INT_FN (BUILT_IN_PARITY):
6405 case BUILT_IN_PARITYIMAX:
6406 target = expand_builtin_unop (target_mode, exp, target,
6407 subtarget, parity_optab);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_STRLEN:
6413 target = expand_builtin_strlen (exp, target, target_mode);
6414 if (target)
6415 return target;
6416 break;
6417
6418 case BUILT_IN_STRCPY:
6419 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_STRNCPY:
6425 target = expand_builtin_strncpy (exp, target, mode);
6426 if (target)
6427 return target;
6428 break;
6429
6430 case BUILT_IN_STPCPY:
6431 target = expand_builtin_stpcpy (exp, target, mode);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_STRCAT:
6437 target = expand_builtin_strcat (fndecl, exp, target, mode);
6438 if (target)
6439 return target;
6440 break;
6441
6442 case BUILT_IN_STRNCAT:
6443 target = expand_builtin_strncat (exp, target, mode);
6444 if (target)
6445 return target;
6446 break;
6447
6448 case BUILT_IN_STRSPN:
6449 target = expand_builtin_strspn (exp, target, mode);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_STRCSPN:
6455 target = expand_builtin_strcspn (exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6459
6460 case BUILT_IN_STRSTR:
6461 target = expand_builtin_strstr (exp, target, mode);
6462 if (target)
6463 return target;
6464 break;
6465
6466 case BUILT_IN_STRPBRK:
6467 target = expand_builtin_strpbrk (exp, target, mode);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_INDEX:
6473 case BUILT_IN_STRCHR:
6474 target = expand_builtin_strchr (exp, target, mode);
6475 if (target)
6476 return target;
6477 break;
6478
6479 case BUILT_IN_RINDEX:
6480 case BUILT_IN_STRRCHR:
6481 target = expand_builtin_strrchr (exp, target, mode);
6482 if (target)
6483 return target;
6484 break;
6485
6486 case BUILT_IN_MEMCPY:
6487 target = expand_builtin_memcpy (exp, target, mode);
6488 if (target)
6489 return target;
6490 break;
6491
6492 case BUILT_IN_MEMPCPY:
6493 target = expand_builtin_mempcpy (exp, target, mode);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_MEMMOVE:
6499 target = expand_builtin_memmove (exp, target, mode, ignore);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_BCOPY:
6505 target = expand_builtin_bcopy (exp, ignore);
6506 if (target)
6507 return target;
6508 break;
6509
6510 case BUILT_IN_MEMSET:
6511 target = expand_builtin_memset (exp, target, mode);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_BZERO:
6517 target = expand_builtin_bzero (exp);
6518 if (target)
6519 return target;
6520 break;
6521
6522 case BUILT_IN_STRCMP:
6523 target = expand_builtin_strcmp (exp, target, mode);
6524 if (target)
6525 return target;
6526 break;
6527
6528 case BUILT_IN_STRNCMP:
6529 target = expand_builtin_strncmp (exp, target, mode);
6530 if (target)
6531 return target;
6532 break;
6533
6534 case BUILT_IN_MEMCHR:
6535 target = expand_builtin_memchr (exp, target, mode);
6536 if (target)
6537 return target;
6538 break;
6539
6540 case BUILT_IN_BCMP:
6541 case BUILT_IN_MEMCMP:
6542 target = expand_builtin_memcmp (exp, target, mode);
6543 if (target)
6544 return target;
6545 break;
6546
6547 case BUILT_IN_SETJMP:
6548 /* This should have been lowered to the builtins below. */
6549 gcc_unreachable ();
6550
6551 case BUILT_IN_SETJMP_SETUP:
6552 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6553 and the receiver label. */
6554 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6555 {
6556 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6557 VOIDmode, EXPAND_NORMAL);
6558 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6559 rtx label_r = label_rtx (label);
6560
6561 /* This is copied from the handling of non-local gotos. */
6562 expand_builtin_setjmp_setup (buf_addr, label_r);
6563 nonlocal_goto_handler_labels
6564 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6565 nonlocal_goto_handler_labels);
6566 /* ??? Do not let expand_label treat us as such since we would
6567 not want to be both on the list of non-local labels and on
6568 the list of forced labels. */
6569 FORCED_LABEL (label) = 0;
6570 return const0_rtx;
6571 }
6572 break;
6573
6574 case BUILT_IN_SETJMP_DISPATCHER:
6575 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6576 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6577 {
6578 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6579 rtx label_r = label_rtx (label);
6580
6581 /* Remove the dispatcher label from the list of non-local labels
6582 since the receiver labels have been added to it above. */
6583 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6584 return const0_rtx;
6585 }
6586 break;
6587
6588 case BUILT_IN_SETJMP_RECEIVER:
6589 /* __builtin_setjmp_receiver is passed the receiver label. */
6590 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6591 {
6592 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6593 rtx label_r = label_rtx (label);
6594
6595 expand_builtin_setjmp_receiver (label_r);
6596 return const0_rtx;
6597 }
6598 break;
6599
6600 /* __builtin_longjmp is passed a pointer to an array of five words.
6601 It's similar to the C library longjmp function but works with
6602 __builtin_setjmp above. */
6603 case BUILT_IN_LONGJMP:
6604 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6605 {
6606 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6607 VOIDmode, EXPAND_NORMAL);
6608 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6609
6610 if (value != const1_rtx)
6611 {
6612 error ("%<__builtin_longjmp%> second argument must be 1");
6613 return const0_rtx;
6614 }
6615
6616 expand_builtin_longjmp (buf_addr, value);
6617 return const0_rtx;
6618 }
6619 break;
6620
6621 case BUILT_IN_NONLOCAL_GOTO:
6622 target = expand_builtin_nonlocal_goto (exp);
6623 if (target)
6624 return target;
6625 break;
6626
6627 /* This updates the setjmp buffer that is its argument with the value
6628 of the current stack pointer. */
6629 case BUILT_IN_UPDATE_SETJMP_BUF:
6630 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6631 {
6632 rtx buf_addr
6633 = expand_normal (CALL_EXPR_ARG (exp, 0));
6634
6635 expand_builtin_update_setjmp_buf (buf_addr);
6636 return const0_rtx;
6637 }
6638 break;
6639
6640 case BUILT_IN_TRAP:
6641 expand_builtin_trap ();
6642 return const0_rtx;
6643
6644 case BUILT_IN_PRINTF:
6645 target = expand_builtin_printf (exp, target, mode, false);
6646 if (target)
6647 return target;
6648 break;
6649
6650 case BUILT_IN_PRINTF_UNLOCKED:
6651 target = expand_builtin_printf (exp, target, mode, true);
6652 if (target)
6653 return target;
6654 break;
6655
6656 case BUILT_IN_FPUTS:
6657 target = expand_builtin_fputs (exp, target, false);
6658 if (target)
6659 return target;
6660 break;
6661 case BUILT_IN_FPUTS_UNLOCKED:
6662 target = expand_builtin_fputs (exp, target, true);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_FPRINTF:
6668 target = expand_builtin_fprintf (exp, target, mode, false);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_FPRINTF_UNLOCKED:
6674 target = expand_builtin_fprintf (exp, target, mode, true);
6675 if (target)
6676 return target;
6677 break;
6678
6679 case BUILT_IN_SPRINTF:
6680 target = expand_builtin_sprintf (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6684
6685 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6686 case BUILT_IN_SIGNBITD32:
6687 case BUILT_IN_SIGNBITD64:
6688 case BUILT_IN_SIGNBITD128:
6689 target = expand_builtin_signbit (exp, target);
6690 if (target)
6691 return target;
6692 break;
6693
6694 /* Various hooks for the DWARF 2 __throw routine. */
6695 case BUILT_IN_UNWIND_INIT:
6696 expand_builtin_unwind_init ();
6697 return const0_rtx;
6698 case BUILT_IN_DWARF_CFA:
6699 return virtual_cfa_rtx;
6700 #ifdef DWARF2_UNWIND_INFO
6701 case BUILT_IN_DWARF_SP_COLUMN:
6702 return expand_builtin_dwarf_sp_column ();
6703 case BUILT_IN_INIT_DWARF_REG_SIZES:
6704 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6705 return const0_rtx;
6706 #endif
6707 case BUILT_IN_FROB_RETURN_ADDR:
6708 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6709 case BUILT_IN_EXTRACT_RETURN_ADDR:
6710 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6711 case BUILT_IN_EH_RETURN:
6712 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6713 CALL_EXPR_ARG (exp, 1));
6714 return const0_rtx;
6715 #ifdef EH_RETURN_DATA_REGNO
6716 case BUILT_IN_EH_RETURN_DATA_REGNO:
6717 return expand_builtin_eh_return_data_regno (exp);
6718 #endif
6719 case BUILT_IN_EXTEND_POINTER:
6720 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6721
6722 case BUILT_IN_VA_START:
6723 return expand_builtin_va_start (exp);
6724 case BUILT_IN_VA_END:
6725 return expand_builtin_va_end (exp);
6726 case BUILT_IN_VA_COPY:
6727 return expand_builtin_va_copy (exp);
6728 case BUILT_IN_EXPECT:
6729 return expand_builtin_expect (exp, target);
6730 case BUILT_IN_PREFETCH:
6731 expand_builtin_prefetch (exp);
6732 return const0_rtx;
6733
6734 case BUILT_IN_PROFILE_FUNC_ENTER:
6735 return expand_builtin_profile_func (false);
6736 case BUILT_IN_PROFILE_FUNC_EXIT:
6737 return expand_builtin_profile_func (true);
6738
6739 case BUILT_IN_INIT_TRAMPOLINE:
6740 return expand_builtin_init_trampoline (exp);
6741 case BUILT_IN_ADJUST_TRAMPOLINE:
6742 return expand_builtin_adjust_trampoline (exp);
6743
6744 case BUILT_IN_FORK:
6745 case BUILT_IN_EXECL:
6746 case BUILT_IN_EXECV:
6747 case BUILT_IN_EXECLP:
6748 case BUILT_IN_EXECLE:
6749 case BUILT_IN_EXECVP:
6750 case BUILT_IN_EXECVE:
6751 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6752 if (target)
6753 return target;
6754 break;
6755
6756 case BUILT_IN_FETCH_AND_ADD_1:
6757 case BUILT_IN_FETCH_AND_ADD_2:
6758 case BUILT_IN_FETCH_AND_ADD_4:
6759 case BUILT_IN_FETCH_AND_ADD_8:
6760 case BUILT_IN_FETCH_AND_ADD_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6762 target = expand_builtin_sync_operation (mode, exp, PLUS,
6763 false, target, ignore);
6764 if (target)
6765 return target;
6766 break;
6767
6768 case BUILT_IN_FETCH_AND_SUB_1:
6769 case BUILT_IN_FETCH_AND_SUB_2:
6770 case BUILT_IN_FETCH_AND_SUB_4:
6771 case BUILT_IN_FETCH_AND_SUB_8:
6772 case BUILT_IN_FETCH_AND_SUB_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6774 target = expand_builtin_sync_operation (mode, exp, MINUS,
6775 false, target, ignore);
6776 if (target)
6777 return target;
6778 break;
6779
6780 case BUILT_IN_FETCH_AND_OR_1:
6781 case BUILT_IN_FETCH_AND_OR_2:
6782 case BUILT_IN_FETCH_AND_OR_4:
6783 case BUILT_IN_FETCH_AND_OR_8:
6784 case BUILT_IN_FETCH_AND_OR_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6786 target = expand_builtin_sync_operation (mode, exp, IOR,
6787 false, target, ignore);
6788 if (target)
6789 return target;
6790 break;
6791
6792 case BUILT_IN_FETCH_AND_AND_1:
6793 case BUILT_IN_FETCH_AND_AND_2:
6794 case BUILT_IN_FETCH_AND_AND_4:
6795 case BUILT_IN_FETCH_AND_AND_8:
6796 case BUILT_IN_FETCH_AND_AND_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6798 target = expand_builtin_sync_operation (mode, exp, AND,
6799 false, target, ignore);
6800 if (target)
6801 return target;
6802 break;
6803
6804 case BUILT_IN_FETCH_AND_XOR_1:
6805 case BUILT_IN_FETCH_AND_XOR_2:
6806 case BUILT_IN_FETCH_AND_XOR_4:
6807 case BUILT_IN_FETCH_AND_XOR_8:
6808 case BUILT_IN_FETCH_AND_XOR_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6810 target = expand_builtin_sync_operation (mode, exp, XOR,
6811 false, target, ignore);
6812 if (target)
6813 return target;
6814 break;
6815
6816 case BUILT_IN_FETCH_AND_NAND_1:
6817 case BUILT_IN_FETCH_AND_NAND_2:
6818 case BUILT_IN_FETCH_AND_NAND_4:
6819 case BUILT_IN_FETCH_AND_NAND_8:
6820 case BUILT_IN_FETCH_AND_NAND_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6822 target = expand_builtin_sync_operation (mode, exp, NOT,
6823 false, target, ignore);
6824 if (target)
6825 return target;
6826 break;
6827
6828 case BUILT_IN_ADD_AND_FETCH_1:
6829 case BUILT_IN_ADD_AND_FETCH_2:
6830 case BUILT_IN_ADD_AND_FETCH_4:
6831 case BUILT_IN_ADD_AND_FETCH_8:
6832 case BUILT_IN_ADD_AND_FETCH_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6834 target = expand_builtin_sync_operation (mode, exp, PLUS,
6835 true, target, ignore);
6836 if (target)
6837 return target;
6838 break;
6839
6840 case BUILT_IN_SUB_AND_FETCH_1:
6841 case BUILT_IN_SUB_AND_FETCH_2:
6842 case BUILT_IN_SUB_AND_FETCH_4:
6843 case BUILT_IN_SUB_AND_FETCH_8:
6844 case BUILT_IN_SUB_AND_FETCH_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6846 target = expand_builtin_sync_operation (mode, exp, MINUS,
6847 true, target, ignore);
6848 if (target)
6849 return target;
6850 break;
6851
6852 case BUILT_IN_OR_AND_FETCH_1:
6853 case BUILT_IN_OR_AND_FETCH_2:
6854 case BUILT_IN_OR_AND_FETCH_4:
6855 case BUILT_IN_OR_AND_FETCH_8:
6856 case BUILT_IN_OR_AND_FETCH_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6858 target = expand_builtin_sync_operation (mode, exp, IOR,
6859 true, target, ignore);
6860 if (target)
6861 return target;
6862 break;
6863
6864 case BUILT_IN_AND_AND_FETCH_1:
6865 case BUILT_IN_AND_AND_FETCH_2:
6866 case BUILT_IN_AND_AND_FETCH_4:
6867 case BUILT_IN_AND_AND_FETCH_8:
6868 case BUILT_IN_AND_AND_FETCH_16:
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6870 target = expand_builtin_sync_operation (mode, exp, AND,
6871 true, target, ignore);
6872 if (target)
6873 return target;
6874 break;
6875
6876 case BUILT_IN_XOR_AND_FETCH_1:
6877 case BUILT_IN_XOR_AND_FETCH_2:
6878 case BUILT_IN_XOR_AND_FETCH_4:
6879 case BUILT_IN_XOR_AND_FETCH_8:
6880 case BUILT_IN_XOR_AND_FETCH_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6882 target = expand_builtin_sync_operation (mode, exp, XOR,
6883 true, target, ignore);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_NAND_AND_FETCH_1:
6889 case BUILT_IN_NAND_AND_FETCH_2:
6890 case BUILT_IN_NAND_AND_FETCH_4:
6891 case BUILT_IN_NAND_AND_FETCH_8:
6892 case BUILT_IN_NAND_AND_FETCH_16:
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6894 target = expand_builtin_sync_operation (mode, exp, NOT,
6895 true, target, ignore);
6896 if (target)
6897 return target;
6898 break;
6899
6900 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6901 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6902 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6903 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6904 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6905 if (mode == VOIDmode)
6906 mode = TYPE_MODE (boolean_type_node);
6907 if (!target || !register_operand (target, mode))
6908 target = gen_reg_rtx (mode);
6909
6910 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6911 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6912 if (target)
6913 return target;
6914 break;
6915
6916 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6917 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6918 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6919 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6920 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6921 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6922 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6923 if (target)
6924 return target;
6925 break;
6926
6927 case BUILT_IN_LOCK_TEST_AND_SET_1:
6928 case BUILT_IN_LOCK_TEST_AND_SET_2:
6929 case BUILT_IN_LOCK_TEST_AND_SET_4:
6930 case BUILT_IN_LOCK_TEST_AND_SET_8:
6931 case BUILT_IN_LOCK_TEST_AND_SET_16:
6932 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6933 target = expand_builtin_lock_test_and_set (mode, exp, target);
6934 if (target)
6935 return target;
6936 break;
6937
6938 case BUILT_IN_LOCK_RELEASE_1:
6939 case BUILT_IN_LOCK_RELEASE_2:
6940 case BUILT_IN_LOCK_RELEASE_4:
6941 case BUILT_IN_LOCK_RELEASE_8:
6942 case BUILT_IN_LOCK_RELEASE_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6944 expand_builtin_lock_release (mode, exp);
6945 return const0_rtx;
6946
6947 case BUILT_IN_SYNCHRONIZE:
6948 expand_builtin_synchronize ();
6949 return const0_rtx;
6950
6951 case BUILT_IN_OBJECT_SIZE:
6952 return expand_builtin_object_size (exp);
6953
6954 case BUILT_IN_MEMCPY_CHK:
6955 case BUILT_IN_MEMPCPY_CHK:
6956 case BUILT_IN_MEMMOVE_CHK:
6957 case BUILT_IN_MEMSET_CHK:
6958 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6959 if (target)
6960 return target;
6961 break;
6962
6963 case BUILT_IN_STRCPY_CHK:
6964 case BUILT_IN_STPCPY_CHK:
6965 case BUILT_IN_STRNCPY_CHK:
6966 case BUILT_IN_STRCAT_CHK:
6967 case BUILT_IN_STRNCAT_CHK:
6968 case BUILT_IN_SNPRINTF_CHK:
6969 case BUILT_IN_VSNPRINTF_CHK:
6970 maybe_emit_chk_warning (exp, fcode);
6971 break;
6972
6973 case BUILT_IN_SPRINTF_CHK:
6974 case BUILT_IN_VSPRINTF_CHK:
6975 maybe_emit_sprintf_chk_warning (exp, fcode);
6976 break;
6977
6978 default: /* just do library call, if unknown builtin */
6979 break;
6980 }
6981
6982 /* The switch statement above can drop through to cause the function
6983 to be called normally. */
6984 return expand_call (exp, target, ignore);
6985 }
6986
6987 /* Determine whether a tree node represents a call to a built-in
6988 function. If the tree T is a call to a built-in function with
6989 the right number of arguments of the appropriate types, return
6990 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6991 Otherwise the return value is END_BUILTINS. */
6992
6993 enum built_in_function
6994 builtin_mathfn_code (const_tree t)
6995 {
6996 const_tree fndecl, arg, parmlist;
6997 const_tree argtype, parmtype;
6998 const_call_expr_arg_iterator iter;
6999
7000 if (TREE_CODE (t) != CALL_EXPR
7001 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7002 return END_BUILTINS;
7003
7004 fndecl = get_callee_fndecl (t);
7005 if (fndecl == NULL_TREE
7006 || TREE_CODE (fndecl) != FUNCTION_DECL
7007 || ! DECL_BUILT_IN (fndecl)
7008 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7009 return END_BUILTINS;
7010
7011 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7012 init_const_call_expr_arg_iterator (t, &iter);
7013 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7014 {
7015 /* If a function doesn't take a variable number of arguments,
7016 the last element in the list will have type `void'. */
7017 parmtype = TREE_VALUE (parmlist);
7018 if (VOID_TYPE_P (parmtype))
7019 {
7020 if (more_const_call_expr_args_p (&iter))
7021 return END_BUILTINS;
7022 return DECL_FUNCTION_CODE (fndecl);
7023 }
7024
7025 if (! more_const_call_expr_args_p (&iter))
7026 return END_BUILTINS;
7027
7028 arg = next_const_call_expr_arg (&iter);
7029 argtype = TREE_TYPE (arg);
7030
7031 if (SCALAR_FLOAT_TYPE_P (parmtype))
7032 {
7033 if (! SCALAR_FLOAT_TYPE_P (argtype))
7034 return END_BUILTINS;
7035 }
7036 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7037 {
7038 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7039 return END_BUILTINS;
7040 }
7041 else if (POINTER_TYPE_P (parmtype))
7042 {
7043 if (! POINTER_TYPE_P (argtype))
7044 return END_BUILTINS;
7045 }
7046 else if (INTEGRAL_TYPE_P (parmtype))
7047 {
7048 if (! INTEGRAL_TYPE_P (argtype))
7049 return END_BUILTINS;
7050 }
7051 else
7052 return END_BUILTINS;
7053 }
7054
7055 /* Variable-length argument list. */
7056 return DECL_FUNCTION_CODE (fndecl);
7057 }
7058
7059 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7060 evaluate to a constant. */
7061
7062 static tree
7063 fold_builtin_constant_p (tree arg)
7064 {
7065 /* We return 1 for a numeric type that's known to be a constant
7066 value at compile-time or for an aggregate type that's a
7067 literal constant. */
7068 STRIP_NOPS (arg);
7069
7070 /* If we know this is a constant, emit the constant of one. */
7071 if (CONSTANT_CLASS_P (arg)
7072 || (TREE_CODE (arg) == CONSTRUCTOR
7073 && TREE_CONSTANT (arg)))
7074 return integer_one_node;
7075 if (TREE_CODE (arg) == ADDR_EXPR)
7076 {
7077 tree op = TREE_OPERAND (arg, 0);
7078 if (TREE_CODE (op) == STRING_CST
7079 || (TREE_CODE (op) == ARRAY_REF
7080 && integer_zerop (TREE_OPERAND (op, 1))
7081 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7082 return integer_one_node;
7083 }
7084
7085 /* If this expression has side effects, show we don't know it to be a
7086 constant. Likewise if it's a pointer or aggregate type since in
7087 those case we only want literals, since those are only optimized
7088 when generating RTL, not later.
7089 And finally, if we are compiling an initializer, not code, we
7090 need to return a definite result now; there's not going to be any
7091 more optimization done. */
7092 if (TREE_SIDE_EFFECTS (arg)
7093 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7094 || POINTER_TYPE_P (TREE_TYPE (arg))
7095 || cfun == 0
7096 || folding_initializer)
7097 return integer_zero_node;
7098
7099 return NULL_TREE;
7100 }
7101
7102 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7103 return it as a truthvalue. */
7104
7105 static tree
7106 build_builtin_expect_predicate (tree pred, tree expected)
7107 {
7108 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7109
7110 fn = built_in_decls[BUILT_IN_EXPECT];
7111 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7112 ret_type = TREE_TYPE (TREE_TYPE (fn));
7113 pred_type = TREE_VALUE (arg_types);
7114 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7115
7116 pred = fold_convert (pred_type, pred);
7117 expected = fold_convert (expected_type, expected);
7118 call_expr = build_call_expr (fn, 2, pred, expected);
7119
7120 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7121 build_int_cst (ret_type, 0));
7122 }
7123
7124 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7125 NULL_TREE if no simplification is possible. */
7126
7127 static tree
7128 fold_builtin_expect (tree arg0, tree arg1)
7129 {
7130 tree inner, fndecl;
7131 enum tree_code code;
7132
7133 /* If this is a builtin_expect within a builtin_expect keep the
7134 inner one. See through a comparison against a constant. It
7135 might have been added to create a thruthvalue. */
7136 inner = arg0;
7137 if (COMPARISON_CLASS_P (inner)
7138 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7139 inner = TREE_OPERAND (inner, 0);
7140
7141 if (TREE_CODE (inner) == CALL_EXPR
7142 && (fndecl = get_callee_fndecl (inner))
7143 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7144 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7145 return arg0;
7146
7147 /* Distribute the expected value over short-circuiting operators.
7148 See through the cast from truthvalue_type_node to long. */
7149 inner = arg0;
7150 while (TREE_CODE (inner) == NOP_EXPR
7151 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7152 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7153 inner = TREE_OPERAND (inner, 0);
7154
7155 code = TREE_CODE (inner);
7156 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7157 {
7158 tree op0 = TREE_OPERAND (inner, 0);
7159 tree op1 = TREE_OPERAND (inner, 1);
7160
7161 op0 = build_builtin_expect_predicate (op0, arg1);
7162 op1 = build_builtin_expect_predicate (op1, arg1);
7163 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7164
7165 return fold_convert (TREE_TYPE (arg0), inner);
7166 }
7167
7168 /* If the argument isn't invariant then there's nothing else we can do. */
7169 if (!TREE_CONSTANT (arg0))
7170 return NULL_TREE;
7171
7172 /* If we expect that a comparison against the argument will fold to
7173 a constant return the constant. In practice, this means a true
7174 constant or the address of a non-weak symbol. */
7175 inner = arg0;
7176 STRIP_NOPS (inner);
7177 if (TREE_CODE (inner) == ADDR_EXPR)
7178 {
7179 do
7180 {
7181 inner = TREE_OPERAND (inner, 0);
7182 }
7183 while (TREE_CODE (inner) == COMPONENT_REF
7184 || TREE_CODE (inner) == ARRAY_REF);
7185 if (DECL_P (inner) && DECL_WEAK (inner))
7186 return NULL_TREE;
7187 }
7188
7189 /* Otherwise, ARG0 already has the proper type for the return value. */
7190 return arg0;
7191 }
7192
7193 /* Fold a call to __builtin_classify_type with argument ARG. */
7194
7195 static tree
7196 fold_builtin_classify_type (tree arg)
7197 {
7198 if (arg == 0)
7199 return build_int_cst (NULL_TREE, no_type_class);
7200
7201 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7202 }
7203
7204 /* Fold a call to __builtin_strlen with argument ARG. */
7205
7206 static tree
7207 fold_builtin_strlen (tree arg)
7208 {
7209 if (!validate_arg (arg, POINTER_TYPE))
7210 return NULL_TREE;
7211 else
7212 {
7213 tree len = c_strlen (arg, 0);
7214
7215 if (len)
7216 {
7217 /* Convert from the internal "sizetype" type to "size_t". */
7218 if (size_type_node)
7219 len = fold_convert (size_type_node, len);
7220 return len;
7221 }
7222
7223 return NULL_TREE;
7224 }
7225 }
7226
7227 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7228
7229 static tree
7230 fold_builtin_inf (tree type, int warn)
7231 {
7232 REAL_VALUE_TYPE real;
7233
7234 /* __builtin_inff is intended to be usable to define INFINITY on all
7235 targets. If an infinity is not available, INFINITY expands "to a
7236 positive constant of type float that overflows at translation
7237 time", footnote "In this case, using INFINITY will violate the
7238 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7239 Thus we pedwarn to ensure this constraint violation is
7240 diagnosed. */
7241 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7242 pedwarn ("target format does not support infinity");
7243
7244 real_inf (&real);
7245 return build_real (type, real);
7246 }
7247
7248 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7249
7250 static tree
7251 fold_builtin_nan (tree arg, tree type, int quiet)
7252 {
7253 REAL_VALUE_TYPE real;
7254 const char *str;
7255
7256 if (!validate_arg (arg, POINTER_TYPE))
7257 return NULL_TREE;
7258 str = c_getstr (arg);
7259 if (!str)
7260 return NULL_TREE;
7261
7262 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7263 return NULL_TREE;
7264
7265 return build_real (type, real);
7266 }
7267
7268 /* Return true if the floating point expression T has an integer value.
7269 We also allow +Inf, -Inf and NaN to be considered integer values. */
7270
7271 static bool
7272 integer_valued_real_p (tree t)
7273 {
7274 switch (TREE_CODE (t))
7275 {
7276 case FLOAT_EXPR:
7277 return true;
7278
7279 case ABS_EXPR:
7280 case SAVE_EXPR:
7281 return integer_valued_real_p (TREE_OPERAND (t, 0));
7282
7283 case COMPOUND_EXPR:
7284 case MODIFY_EXPR:
7285 case BIND_EXPR:
7286 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7287
7288 case PLUS_EXPR:
7289 case MINUS_EXPR:
7290 case MULT_EXPR:
7291 case MIN_EXPR:
7292 case MAX_EXPR:
7293 return integer_valued_real_p (TREE_OPERAND (t, 0))
7294 && integer_valued_real_p (TREE_OPERAND (t, 1));
7295
7296 case COND_EXPR:
7297 return integer_valued_real_p (TREE_OPERAND (t, 1))
7298 && integer_valued_real_p (TREE_OPERAND (t, 2));
7299
7300 case REAL_CST:
7301 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7302
7303 case NOP_EXPR:
7304 {
7305 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7306 if (TREE_CODE (type) == INTEGER_TYPE)
7307 return true;
7308 if (TREE_CODE (type) == REAL_TYPE)
7309 return integer_valued_real_p (TREE_OPERAND (t, 0));
7310 break;
7311 }
7312
7313 case CALL_EXPR:
7314 switch (builtin_mathfn_code (t))
7315 {
7316 CASE_FLT_FN (BUILT_IN_CEIL):
7317 CASE_FLT_FN (BUILT_IN_FLOOR):
7318 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7319 CASE_FLT_FN (BUILT_IN_RINT):
7320 CASE_FLT_FN (BUILT_IN_ROUND):
7321 CASE_FLT_FN (BUILT_IN_TRUNC):
7322 return true;
7323
7324 CASE_FLT_FN (BUILT_IN_FMIN):
7325 CASE_FLT_FN (BUILT_IN_FMAX):
7326 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7327 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7328
7329 default:
7330 break;
7331 }
7332 break;
7333
7334 default:
7335 break;
7336 }
7337 return false;
7338 }
7339
7340 /* FNDECL is assumed to be a builtin where truncation can be propagated
7341 across (for instance floor((double)f) == (double)floorf (f).
7342 Do the transformation for a call with argument ARG. */
7343
7344 static tree
7345 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7346 {
7347 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7348
7349 if (!validate_arg (arg, REAL_TYPE))
7350 return NULL_TREE;
7351
7352 /* Integer rounding functions are idempotent. */
7353 if (fcode == builtin_mathfn_code (arg))
7354 return arg;
7355
7356 /* If argument is already integer valued, and we don't need to worry
7357 about setting errno, there's no need to perform rounding. */
7358 if (! flag_errno_math && integer_valued_real_p (arg))
7359 return arg;
7360
7361 if (optimize)
7362 {
7363 tree arg0 = strip_float_extensions (arg);
7364 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7365 tree newtype = TREE_TYPE (arg0);
7366 tree decl;
7367
7368 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7369 && (decl = mathfn_built_in (newtype, fcode)))
7370 return fold_convert (ftype,
7371 build_call_expr (decl, 1,
7372 fold_convert (newtype, arg0)));
7373 }
7374 return NULL_TREE;
7375 }
7376
7377 /* FNDECL is assumed to be builtin which can narrow the FP type of
7378 the argument, for instance lround((double)f) -> lroundf (f).
7379 Do the transformation for a call with argument ARG. */
7380
7381 static tree
7382 fold_fixed_mathfn (tree fndecl, tree arg)
7383 {
7384 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7385
7386 if (!validate_arg (arg, REAL_TYPE))
7387 return NULL_TREE;
7388
7389 /* If argument is already integer valued, and we don't need to worry
7390 about setting errno, there's no need to perform rounding. */
7391 if (! flag_errno_math && integer_valued_real_p (arg))
7392 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7393
7394 if (optimize)
7395 {
7396 tree ftype = TREE_TYPE (arg);
7397 tree arg0 = strip_float_extensions (arg);
7398 tree newtype = TREE_TYPE (arg0);
7399 tree decl;
7400
7401 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7402 && (decl = mathfn_built_in (newtype, fcode)))
7403 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7404 }
7405
7406 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7407 sizeof (long long) == sizeof (long). */
7408 if (TYPE_PRECISION (long_long_integer_type_node)
7409 == TYPE_PRECISION (long_integer_type_node))
7410 {
7411 tree newfn = NULL_TREE;
7412 switch (fcode)
7413 {
7414 CASE_FLT_FN (BUILT_IN_LLCEIL):
7415 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7416 break;
7417
7418 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7419 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7420 break;
7421
7422 CASE_FLT_FN (BUILT_IN_LLROUND):
7423 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7424 break;
7425
7426 CASE_FLT_FN (BUILT_IN_LLRINT):
7427 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7428 break;
7429
7430 default:
7431 break;
7432 }
7433
7434 if (newfn)
7435 {
7436 tree newcall = build_call_expr(newfn, 1, arg);
7437 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7438 }
7439 }
7440
7441 return NULL_TREE;
7442 }
7443
7444 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7445 return type. Return NULL_TREE if no simplification can be made. */
7446
7447 static tree
7448 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7449 {
7450 tree res;
7451
7452 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7453 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7454 return NULL_TREE;
7455
7456 /* Calculate the result when the argument is a constant. */
7457 if (TREE_CODE (arg) == COMPLEX_CST
7458 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7459 type, mpfr_hypot)))
7460 return res;
7461
7462 if (TREE_CODE (arg) == COMPLEX_EXPR)
7463 {
7464 tree real = TREE_OPERAND (arg, 0);
7465 tree imag = TREE_OPERAND (arg, 1);
7466
7467 /* If either part is zero, cabs is fabs of the other. */
7468 if (real_zerop (real))
7469 return fold_build1 (ABS_EXPR, type, imag);
7470 if (real_zerop (imag))
7471 return fold_build1 (ABS_EXPR, type, real);
7472
7473 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7474 if (flag_unsafe_math_optimizations
7475 && operand_equal_p (real, imag, OEP_PURE_SAME))
7476 {
7477 const REAL_VALUE_TYPE sqrt2_trunc
7478 = real_value_truncate (TYPE_MODE (type),
7479 *get_real_const (rv_sqrt2));
7480 STRIP_NOPS (real);
7481 return fold_build2 (MULT_EXPR, type,
7482 fold_build1 (ABS_EXPR, type, real),
7483 build_real (type, sqrt2_trunc));
7484 }
7485 }
7486
7487 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7488 if (TREE_CODE (arg) == NEGATE_EXPR
7489 || TREE_CODE (arg) == CONJ_EXPR)
7490 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7491
7492 /* Don't do this when optimizing for size. */
7493 if (flag_unsafe_math_optimizations
7494 && optimize && !optimize_size)
7495 {
7496 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7497
7498 if (sqrtfn != NULL_TREE)
7499 {
7500 tree rpart, ipart, result;
7501
7502 arg = builtin_save_expr (arg);
7503
7504 rpart = fold_build1 (REALPART_EXPR, type, arg);
7505 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7506
7507 rpart = builtin_save_expr (rpart);
7508 ipart = builtin_save_expr (ipart);
7509
7510 result = fold_build2 (PLUS_EXPR, type,
7511 fold_build2 (MULT_EXPR, type,
7512 rpart, rpart),
7513 fold_build2 (MULT_EXPR, type,
7514 ipart, ipart));
7515
7516 return build_call_expr (sqrtfn, 1, result);
7517 }
7518 }
7519
7520 return NULL_TREE;
7521 }
7522
7523 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7524 Return NULL_TREE if no simplification can be made. */
7525
7526 static tree
7527 fold_builtin_sqrt (tree arg, tree type)
7528 {
7529
7530 enum built_in_function fcode;
7531 tree res;
7532
7533 if (!validate_arg (arg, REAL_TYPE))
7534 return NULL_TREE;
7535
7536 /* Calculate the result when the argument is a constant. */
7537 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7538 return res;
7539
7540 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7541 fcode = builtin_mathfn_code (arg);
7542 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7543 {
7544 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7545 arg = fold_build2 (MULT_EXPR, type,
7546 CALL_EXPR_ARG (arg, 0),
7547 build_real (type, dconsthalf));
7548 return build_call_expr (expfn, 1, arg);
7549 }
7550
7551 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7552 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7553 {
7554 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7555
7556 if (powfn)
7557 {
7558 tree arg0 = CALL_EXPR_ARG (arg, 0);
7559 tree tree_root;
7560 /* The inner root was either sqrt or cbrt. */
7561 REAL_VALUE_TYPE dconstroot =
7562 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7563
7564 /* Adjust for the outer root. */
7565 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7566 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7567 tree_root = build_real (type, dconstroot);
7568 return build_call_expr (powfn, 2, arg0, tree_root);
7569 }
7570 }
7571
7572 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7573 if (flag_unsafe_math_optimizations
7574 && (fcode == BUILT_IN_POW
7575 || fcode == BUILT_IN_POWF
7576 || fcode == BUILT_IN_POWL))
7577 {
7578 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7579 tree arg0 = CALL_EXPR_ARG (arg, 0);
7580 tree arg1 = CALL_EXPR_ARG (arg, 1);
7581 tree narg1;
7582 if (!tree_expr_nonnegative_p (arg0))
7583 arg0 = build1 (ABS_EXPR, type, arg0);
7584 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7585 build_real (type, dconsthalf));
7586 return build_call_expr (powfn, 2, arg0, narg1);
7587 }
7588
7589 return NULL_TREE;
7590 }
7591
7592 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7593 Return NULL_TREE if no simplification can be made. */
7594
7595 static tree
7596 fold_builtin_cbrt (tree arg, tree type)
7597 {
7598 const enum built_in_function fcode = builtin_mathfn_code (arg);
7599 tree res;
7600
7601 if (!validate_arg (arg, REAL_TYPE))
7602 return NULL_TREE;
7603
7604 /* Calculate the result when the argument is a constant. */
7605 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7606 return res;
7607
7608 if (flag_unsafe_math_optimizations)
7609 {
7610 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7611 if (BUILTIN_EXPONENT_P (fcode))
7612 {
7613 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7614 const REAL_VALUE_TYPE third_trunc =
7615 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7616 arg = fold_build2 (MULT_EXPR, type,
7617 CALL_EXPR_ARG (arg, 0),
7618 build_real (type, third_trunc));
7619 return build_call_expr (expfn, 1, arg);
7620 }
7621
7622 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7623 if (BUILTIN_SQRT_P (fcode))
7624 {
7625 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7626
7627 if (powfn)
7628 {
7629 tree arg0 = CALL_EXPR_ARG (arg, 0);
7630 tree tree_root;
7631 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7632
7633 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7634 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7635 tree_root = build_real (type, dconstroot);
7636 return build_call_expr (powfn, 2, arg0, tree_root);
7637 }
7638 }
7639
7640 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7641 if (BUILTIN_CBRT_P (fcode))
7642 {
7643 tree arg0 = CALL_EXPR_ARG (arg, 0);
7644 if (tree_expr_nonnegative_p (arg0))
7645 {
7646 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7647
7648 if (powfn)
7649 {
7650 tree tree_root;
7651 REAL_VALUE_TYPE dconstroot;
7652
7653 real_arithmetic (&dconstroot, MULT_EXPR,
7654 get_real_const (rv_third),
7655 get_real_const (rv_third));
7656 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7657 tree_root = build_real (type, dconstroot);
7658 return build_call_expr (powfn, 2, arg0, tree_root);
7659 }
7660 }
7661 }
7662
7663 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7664 if (fcode == BUILT_IN_POW
7665 || fcode == BUILT_IN_POWF
7666 || fcode == BUILT_IN_POWL)
7667 {
7668 tree arg00 = CALL_EXPR_ARG (arg, 0);
7669 tree arg01 = CALL_EXPR_ARG (arg, 1);
7670 if (tree_expr_nonnegative_p (arg00))
7671 {
7672 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7673 const REAL_VALUE_TYPE dconstroot
7674 = real_value_truncate (TYPE_MODE (type),
7675 *get_real_const (rv_third));
7676 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7677 build_real (type, dconstroot));
7678 return build_call_expr (powfn, 2, arg00, narg01);
7679 }
7680 }
7681 }
7682 return NULL_TREE;
7683 }
7684
7685 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7686 TYPE is the type of the return value. Return NULL_TREE if no
7687 simplification can be made. */
7688
7689 static tree
7690 fold_builtin_cos (tree arg, tree type, tree fndecl)
7691 {
7692 tree res, narg;
7693
7694 if (!validate_arg (arg, REAL_TYPE))
7695 return NULL_TREE;
7696
7697 /* Calculate the result when the argument is a constant. */
7698 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7699 return res;
7700
7701 /* Optimize cos(-x) into cos (x). */
7702 if ((narg = fold_strip_sign_ops (arg)))
7703 return build_call_expr (fndecl, 1, narg);
7704
7705 return NULL_TREE;
7706 }
7707
7708 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7709 Return NULL_TREE if no simplification can be made. */
7710
7711 static tree
7712 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7713 {
7714 if (validate_arg (arg, REAL_TYPE))
7715 {
7716 tree res, narg;
7717
7718 /* Calculate the result when the argument is a constant. */
7719 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7720 return res;
7721
7722 /* Optimize cosh(-x) into cosh (x). */
7723 if ((narg = fold_strip_sign_ops (arg)))
7724 return build_call_expr (fndecl, 1, narg);
7725 }
7726
7727 return NULL_TREE;
7728 }
7729
7730 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7731 Return NULL_TREE if no simplification can be made. */
7732
7733 static tree
7734 fold_builtin_tan (tree arg, tree type)
7735 {
7736 enum built_in_function fcode;
7737 tree res;
7738
7739 if (!validate_arg (arg, REAL_TYPE))
7740 return NULL_TREE;
7741
7742 /* Calculate the result when the argument is a constant. */
7743 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7744 return res;
7745
7746 /* Optimize tan(atan(x)) = x. */
7747 fcode = builtin_mathfn_code (arg);
7748 if (flag_unsafe_math_optimizations
7749 && (fcode == BUILT_IN_ATAN
7750 || fcode == BUILT_IN_ATANF
7751 || fcode == BUILT_IN_ATANL))
7752 return CALL_EXPR_ARG (arg, 0);
7753
7754 return NULL_TREE;
7755 }
7756
7757 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7758 NULL_TREE if no simplification can be made. */
7759
7760 static tree
7761 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7762 {
7763 tree type;
7764 tree res, fn, call;
7765
7766 if (!validate_arg (arg0, REAL_TYPE)
7767 || !validate_arg (arg1, POINTER_TYPE)
7768 || !validate_arg (arg2, POINTER_TYPE))
7769 return NULL_TREE;
7770
7771 type = TREE_TYPE (arg0);
7772
7773 /* Calculate the result when the argument is a constant. */
7774 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7775 return res;
7776
7777 /* Canonicalize sincos to cexpi. */
7778 if (!TARGET_C99_FUNCTIONS)
7779 return NULL_TREE;
7780 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7781 if (!fn)
7782 return NULL_TREE;
7783
7784 call = build_call_expr (fn, 1, arg0);
7785 call = builtin_save_expr (call);
7786
7787 return build2 (COMPOUND_EXPR, type,
7788 build2 (MODIFY_EXPR, void_type_node,
7789 build_fold_indirect_ref (arg1),
7790 build1 (IMAGPART_EXPR, type, call)),
7791 build2 (MODIFY_EXPR, void_type_node,
7792 build_fold_indirect_ref (arg2),
7793 build1 (REALPART_EXPR, type, call)));
7794 }
7795
7796 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7797 NULL_TREE if no simplification can be made. */
7798
7799 static tree
7800 fold_builtin_cexp (tree arg0, tree type)
7801 {
7802 tree rtype;
7803 tree realp, imagp, ifn;
7804
7805 if (!validate_arg (arg0, COMPLEX_TYPE))
7806 return NULL_TREE;
7807
7808 rtype = TREE_TYPE (TREE_TYPE (arg0));
7809
7810 /* In case we can figure out the real part of arg0 and it is constant zero
7811 fold to cexpi. */
7812 if (!TARGET_C99_FUNCTIONS)
7813 return NULL_TREE;
7814 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7815 if (!ifn)
7816 return NULL_TREE;
7817
7818 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7819 && real_zerop (realp))
7820 {
7821 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7822 return build_call_expr (ifn, 1, narg);
7823 }
7824
7825 /* In case we can easily decompose real and imaginary parts split cexp
7826 to exp (r) * cexpi (i). */
7827 if (flag_unsafe_math_optimizations
7828 && realp)
7829 {
7830 tree rfn, rcall, icall;
7831
7832 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7833 if (!rfn)
7834 return NULL_TREE;
7835
7836 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7837 if (!imagp)
7838 return NULL_TREE;
7839
7840 icall = build_call_expr (ifn, 1, imagp);
7841 icall = builtin_save_expr (icall);
7842 rcall = build_call_expr (rfn, 1, realp);
7843 rcall = builtin_save_expr (rcall);
7844 return fold_build2 (COMPLEX_EXPR, type,
7845 fold_build2 (MULT_EXPR, rtype,
7846 rcall,
7847 fold_build1 (REALPART_EXPR, rtype, icall)),
7848 fold_build2 (MULT_EXPR, rtype,
7849 rcall,
7850 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7851 }
7852
7853 return NULL_TREE;
7854 }
7855
7856 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7858
7859 static tree
7860 fold_builtin_trunc (tree fndecl, tree arg)
7861 {
7862 if (!validate_arg (arg, REAL_TYPE))
7863 return NULL_TREE;
7864
7865 /* Optimize trunc of constant value. */
7866 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7867 {
7868 REAL_VALUE_TYPE r, x;
7869 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7870
7871 x = TREE_REAL_CST (arg);
7872 real_trunc (&r, TYPE_MODE (type), &x);
7873 return build_real (type, r);
7874 }
7875
7876 return fold_trunc_transparent_mathfn (fndecl, arg);
7877 }
7878
7879 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7880 Return NULL_TREE if no simplification can be made. */
7881
7882 static tree
7883 fold_builtin_floor (tree fndecl, tree arg)
7884 {
7885 if (!validate_arg (arg, REAL_TYPE))
7886 return NULL_TREE;
7887
7888 /* Optimize floor of constant value. */
7889 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7890 {
7891 REAL_VALUE_TYPE x;
7892
7893 x = TREE_REAL_CST (arg);
7894 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7895 {
7896 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7897 REAL_VALUE_TYPE r;
7898
7899 real_floor (&r, TYPE_MODE (type), &x);
7900 return build_real (type, r);
7901 }
7902 }
7903
7904 /* Fold floor (x) where x is nonnegative to trunc (x). */
7905 if (tree_expr_nonnegative_p (arg))
7906 {
7907 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7908 if (truncfn)
7909 return build_call_expr (truncfn, 1, arg);
7910 }
7911
7912 return fold_trunc_transparent_mathfn (fndecl, arg);
7913 }
7914
7915 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7916 Return NULL_TREE if no simplification can be made. */
7917
7918 static tree
7919 fold_builtin_ceil (tree fndecl, tree arg)
7920 {
7921 if (!validate_arg (arg, REAL_TYPE))
7922 return NULL_TREE;
7923
7924 /* Optimize ceil of constant value. */
7925 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7926 {
7927 REAL_VALUE_TYPE x;
7928
7929 x = TREE_REAL_CST (arg);
7930 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7931 {
7932 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7933 REAL_VALUE_TYPE r;
7934
7935 real_ceil (&r, TYPE_MODE (type), &x);
7936 return build_real (type, r);
7937 }
7938 }
7939
7940 return fold_trunc_transparent_mathfn (fndecl, arg);
7941 }
7942
7943 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7944 Return NULL_TREE if no simplification can be made. */
7945
7946 static tree
7947 fold_builtin_round (tree fndecl, tree arg)
7948 {
7949 if (!validate_arg (arg, REAL_TYPE))
7950 return NULL_TREE;
7951
7952 /* Optimize round of constant value. */
7953 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7954 {
7955 REAL_VALUE_TYPE x;
7956
7957 x = TREE_REAL_CST (arg);
7958 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7959 {
7960 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7961 REAL_VALUE_TYPE r;
7962
7963 real_round (&r, TYPE_MODE (type), &x);
7964 return build_real (type, r);
7965 }
7966 }
7967
7968 return fold_trunc_transparent_mathfn (fndecl, arg);
7969 }
7970
7971 /* Fold function call to builtin lround, lroundf or lroundl (or the
7972 corresponding long long versions) and other rounding functions. ARG
7973 is the argument to the call. Return NULL_TREE if no simplification
7974 can be made. */
7975
7976 static tree
7977 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7978 {
7979 if (!validate_arg (arg, REAL_TYPE))
7980 return NULL_TREE;
7981
7982 /* Optimize lround of constant value. */
7983 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7984 {
7985 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7986
7987 if (real_isfinite (&x))
7988 {
7989 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7990 tree ftype = TREE_TYPE (arg);
7991 unsigned HOST_WIDE_INT lo2;
7992 HOST_WIDE_INT hi, lo;
7993 REAL_VALUE_TYPE r;
7994
7995 switch (DECL_FUNCTION_CODE (fndecl))
7996 {
7997 CASE_FLT_FN (BUILT_IN_LFLOOR):
7998 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7999 real_floor (&r, TYPE_MODE (ftype), &x);
8000 break;
8001
8002 CASE_FLT_FN (BUILT_IN_LCEIL):
8003 CASE_FLT_FN (BUILT_IN_LLCEIL):
8004 real_ceil (&r, TYPE_MODE (ftype), &x);
8005 break;
8006
8007 CASE_FLT_FN (BUILT_IN_LROUND):
8008 CASE_FLT_FN (BUILT_IN_LLROUND):
8009 real_round (&r, TYPE_MODE (ftype), &x);
8010 break;
8011
8012 default:
8013 gcc_unreachable ();
8014 }
8015
8016 REAL_VALUE_TO_INT (&lo, &hi, r);
8017 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8018 return build_int_cst_wide (itype, lo2, hi);
8019 }
8020 }
8021
8022 switch (DECL_FUNCTION_CODE (fndecl))
8023 {
8024 CASE_FLT_FN (BUILT_IN_LFLOOR):
8025 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8026 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8027 if (tree_expr_nonnegative_p (arg))
8028 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8029 arg);
8030 break;
8031 default:;
8032 }
8033
8034 return fold_fixed_mathfn (fndecl, arg);
8035 }
8036
8037 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8038 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8039 the argument to the call. Return NULL_TREE if no simplification can
8040 be made. */
8041
8042 static tree
8043 fold_builtin_bitop (tree fndecl, tree arg)
8044 {
8045 if (!validate_arg (arg, INTEGER_TYPE))
8046 return NULL_TREE;
8047
8048 /* Optimize for constant argument. */
8049 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8050 {
8051 HOST_WIDE_INT hi, width, result;
8052 unsigned HOST_WIDE_INT lo;
8053 tree type;
8054
8055 type = TREE_TYPE (arg);
8056 width = TYPE_PRECISION (type);
8057 lo = TREE_INT_CST_LOW (arg);
8058
8059 /* Clear all the bits that are beyond the type's precision. */
8060 if (width > HOST_BITS_PER_WIDE_INT)
8061 {
8062 hi = TREE_INT_CST_HIGH (arg);
8063 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8064 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8065 }
8066 else
8067 {
8068 hi = 0;
8069 if (width < HOST_BITS_PER_WIDE_INT)
8070 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8071 }
8072
8073 switch (DECL_FUNCTION_CODE (fndecl))
8074 {
8075 CASE_INT_FN (BUILT_IN_FFS):
8076 if (lo != 0)
8077 result = exact_log2 (lo & -lo) + 1;
8078 else if (hi != 0)
8079 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8080 else
8081 result = 0;
8082 break;
8083
8084 CASE_INT_FN (BUILT_IN_CLZ):
8085 if (hi != 0)
8086 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8087 else if (lo != 0)
8088 result = width - floor_log2 (lo) - 1;
8089 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8090 result = width;
8091 break;
8092
8093 CASE_INT_FN (BUILT_IN_CTZ):
8094 if (lo != 0)
8095 result = exact_log2 (lo & -lo);
8096 else if (hi != 0)
8097 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8098 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8099 result = width;
8100 break;
8101
8102 CASE_INT_FN (BUILT_IN_POPCOUNT):
8103 result = 0;
8104 while (lo)
8105 result++, lo &= lo - 1;
8106 while (hi)
8107 result++, hi &= hi - 1;
8108 break;
8109
8110 CASE_INT_FN (BUILT_IN_PARITY):
8111 result = 0;
8112 while (lo)
8113 result++, lo &= lo - 1;
8114 while (hi)
8115 result++, hi &= hi - 1;
8116 result &= 1;
8117 break;
8118
8119 default:
8120 gcc_unreachable ();
8121 }
8122
8123 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8124 }
8125
8126 return NULL_TREE;
8127 }
8128
8129 /* Fold function call to builtin_bswap and the long and long long
8130 variants. Return NULL_TREE if no simplification can be made. */
8131 static tree
8132 fold_builtin_bswap (tree fndecl, tree arg)
8133 {
8134 if (! validate_arg (arg, INTEGER_TYPE))
8135 return NULL_TREE;
8136
8137 /* Optimize constant value. */
8138 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8139 {
8140 HOST_WIDE_INT hi, width, r_hi = 0;
8141 unsigned HOST_WIDE_INT lo, r_lo = 0;
8142 tree type;
8143
8144 type = TREE_TYPE (arg);
8145 width = TYPE_PRECISION (type);
8146 lo = TREE_INT_CST_LOW (arg);
8147 hi = TREE_INT_CST_HIGH (arg);
8148
8149 switch (DECL_FUNCTION_CODE (fndecl))
8150 {
8151 case BUILT_IN_BSWAP32:
8152 case BUILT_IN_BSWAP64:
8153 {
8154 int s;
8155
8156 for (s = 0; s < width; s += 8)
8157 {
8158 int d = width - s - 8;
8159 unsigned HOST_WIDE_INT byte;
8160
8161 if (s < HOST_BITS_PER_WIDE_INT)
8162 byte = (lo >> s) & 0xff;
8163 else
8164 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8165
8166 if (d < HOST_BITS_PER_WIDE_INT)
8167 r_lo |= byte << d;
8168 else
8169 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8170 }
8171 }
8172
8173 break;
8174
8175 default:
8176 gcc_unreachable ();
8177 }
8178
8179 if (width < HOST_BITS_PER_WIDE_INT)
8180 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8181 else
8182 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8183 }
8184
8185 return NULL_TREE;
8186 }
8187
8188 /* Return true if EXPR is the real constant contained in VALUE. */
8189
8190 static bool
8191 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8192 {
8193 STRIP_NOPS (expr);
8194
8195 return ((TREE_CODE (expr) == REAL_CST
8196 && !TREE_OVERFLOW (expr)
8197 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8198 || (TREE_CODE (expr) == COMPLEX_CST
8199 && real_dconstp (TREE_REALPART (expr), value)
8200 && real_zerop (TREE_IMAGPART (expr))));
8201 }
8202
8203 /* A subroutine of fold_builtin to fold the various logarithmic
8204 functions. Return NULL_TREE if no simplification can me made.
8205 FUNC is the corresponding MPFR logarithm function. */
8206
8207 static tree
8208 fold_builtin_logarithm (tree fndecl, tree arg,
8209 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8210 {
8211 if (validate_arg (arg, REAL_TYPE))
8212 {
8213 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8214 tree res;
8215 const enum built_in_function fcode = builtin_mathfn_code (arg);
8216
8217 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8218 instead we'll look for 'e' truncated to MODE. So only do
8219 this if flag_unsafe_math_optimizations is set. */
8220 if (flag_unsafe_math_optimizations && func == mpfr_log)
8221 {
8222 const REAL_VALUE_TYPE e_truncated =
8223 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8224 if (real_dconstp (arg, &e_truncated))
8225 return build_real (type, dconst1);
8226 }
8227
8228 /* Calculate the result when the argument is a constant. */
8229 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8230 return res;
8231
8232 /* Special case, optimize logN(expN(x)) = x. */
8233 if (flag_unsafe_math_optimizations
8234 && ((func == mpfr_log
8235 && (fcode == BUILT_IN_EXP
8236 || fcode == BUILT_IN_EXPF
8237 || fcode == BUILT_IN_EXPL))
8238 || (func == mpfr_log2
8239 && (fcode == BUILT_IN_EXP2
8240 || fcode == BUILT_IN_EXP2F
8241 || fcode == BUILT_IN_EXP2L))
8242 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8243 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8244
8245 /* Optimize logN(func()) for various exponential functions. We
8246 want to determine the value "x" and the power "exponent" in
8247 order to transform logN(x**exponent) into exponent*logN(x). */
8248 if (flag_unsafe_math_optimizations)
8249 {
8250 tree exponent = 0, x = 0;
8251
8252 switch (fcode)
8253 {
8254 CASE_FLT_FN (BUILT_IN_EXP):
8255 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8256 x = build_real (type,
8257 real_value_truncate (TYPE_MODE (type),
8258 *get_real_const (rv_e)));
8259 exponent = CALL_EXPR_ARG (arg, 0);
8260 break;
8261 CASE_FLT_FN (BUILT_IN_EXP2):
8262 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8263 x = build_real (type, dconst2);
8264 exponent = CALL_EXPR_ARG (arg, 0);
8265 break;
8266 CASE_FLT_FN (BUILT_IN_EXP10):
8267 CASE_FLT_FN (BUILT_IN_POW10):
8268 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8269 {
8270 REAL_VALUE_TYPE dconst10;
8271 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8272 x = build_real (type, dconst10);
8273 }
8274 exponent = CALL_EXPR_ARG (arg, 0);
8275 break;
8276 CASE_FLT_FN (BUILT_IN_SQRT):
8277 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8278 x = CALL_EXPR_ARG (arg, 0);
8279 exponent = build_real (type, dconsthalf);
8280 break;
8281 CASE_FLT_FN (BUILT_IN_CBRT):
8282 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8283 x = CALL_EXPR_ARG (arg, 0);
8284 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8285 *get_real_const (rv_third)));
8286 break;
8287 CASE_FLT_FN (BUILT_IN_POW):
8288 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8289 x = CALL_EXPR_ARG (arg, 0);
8290 exponent = CALL_EXPR_ARG (arg, 1);
8291 break;
8292 default:
8293 break;
8294 }
8295
8296 /* Now perform the optimization. */
8297 if (x && exponent)
8298 {
8299 tree logfn = build_call_expr (fndecl, 1, x);
8300 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8301 }
8302 }
8303 }
8304
8305 return NULL_TREE;
8306 }
8307
8308 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8309 NULL_TREE if no simplification can be made. */
8310
8311 static tree
8312 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8313 {
8314 tree res, narg0, narg1;
8315
8316 if (!validate_arg (arg0, REAL_TYPE)
8317 || !validate_arg (arg1, REAL_TYPE))
8318 return NULL_TREE;
8319
8320 /* Calculate the result when the argument is a constant. */
8321 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8322 return res;
8323
8324 /* If either argument to hypot has a negate or abs, strip that off.
8325 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8326 narg0 = fold_strip_sign_ops (arg0);
8327 narg1 = fold_strip_sign_ops (arg1);
8328 if (narg0 || narg1)
8329 {
8330 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8331 narg1 ? narg1 : arg1);
8332 }
8333
8334 /* If either argument is zero, hypot is fabs of the other. */
8335 if (real_zerop (arg0))
8336 return fold_build1 (ABS_EXPR, type, arg1);
8337 else if (real_zerop (arg1))
8338 return fold_build1 (ABS_EXPR, type, arg0);
8339
8340 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8341 if (flag_unsafe_math_optimizations
8342 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8343 {
8344 const REAL_VALUE_TYPE sqrt2_trunc
8345 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8346 return fold_build2 (MULT_EXPR, type,
8347 fold_build1 (ABS_EXPR, type, arg0),
8348 build_real (type, sqrt2_trunc));
8349 }
8350
8351 return NULL_TREE;
8352 }
8353
8354
8355 /* Fold a builtin function call to pow, powf, or powl. Return
8356 NULL_TREE if no simplification can be made. */
8357 static tree
8358 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8359 {
8360 tree res;
8361
8362 if (!validate_arg (arg0, REAL_TYPE)
8363 || !validate_arg (arg1, REAL_TYPE))
8364 return NULL_TREE;
8365
8366 /* Calculate the result when the argument is a constant. */
8367 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8368 return res;
8369
8370 /* Optimize pow(1.0,y) = 1.0. */
8371 if (real_onep (arg0))
8372 return omit_one_operand (type, build_real (type, dconst1), arg1);
8373
8374 if (TREE_CODE (arg1) == REAL_CST
8375 && !TREE_OVERFLOW (arg1))
8376 {
8377 REAL_VALUE_TYPE cint;
8378 REAL_VALUE_TYPE c;
8379 HOST_WIDE_INT n;
8380
8381 c = TREE_REAL_CST (arg1);
8382
8383 /* Optimize pow(x,0.0) = 1.0. */
8384 if (REAL_VALUES_EQUAL (c, dconst0))
8385 return omit_one_operand (type, build_real (type, dconst1),
8386 arg0);
8387
8388 /* Optimize pow(x,1.0) = x. */
8389 if (REAL_VALUES_EQUAL (c, dconst1))
8390 return arg0;
8391
8392 /* Optimize pow(x,-1.0) = 1.0/x. */
8393 if (REAL_VALUES_EQUAL (c, dconstm1))
8394 return fold_build2 (RDIV_EXPR, type,
8395 build_real (type, dconst1), arg0);
8396
8397 /* Optimize pow(x,0.5) = sqrt(x). */
8398 if (flag_unsafe_math_optimizations
8399 && REAL_VALUES_EQUAL (c, dconsthalf))
8400 {
8401 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8402
8403 if (sqrtfn != NULL_TREE)
8404 return build_call_expr (sqrtfn, 1, arg0);
8405 }
8406
8407 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8408 if (flag_unsafe_math_optimizations)
8409 {
8410 const REAL_VALUE_TYPE dconstroot
8411 = real_value_truncate (TYPE_MODE (type),
8412 *get_real_const (rv_third));
8413
8414 if (REAL_VALUES_EQUAL (c, dconstroot))
8415 {
8416 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8417 if (cbrtfn != NULL_TREE)
8418 return build_call_expr (cbrtfn, 1, arg0);
8419 }
8420 }
8421
8422 /* Check for an integer exponent. */
8423 n = real_to_integer (&c);
8424 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8425 if (real_identical (&c, &cint))
8426 {
8427 /* Attempt to evaluate pow at compile-time. */
8428 if (TREE_CODE (arg0) == REAL_CST
8429 && !TREE_OVERFLOW (arg0))
8430 {
8431 REAL_VALUE_TYPE x;
8432 bool inexact;
8433
8434 x = TREE_REAL_CST (arg0);
8435 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8436 if (flag_unsafe_math_optimizations || !inexact)
8437 return build_real (type, x);
8438 }
8439
8440 /* Strip sign ops from even integer powers. */
8441 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8442 {
8443 tree narg0 = fold_strip_sign_ops (arg0);
8444 if (narg0)
8445 return build_call_expr (fndecl, 2, narg0, arg1);
8446 }
8447 }
8448 }
8449
8450 if (flag_unsafe_math_optimizations)
8451 {
8452 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8453
8454 /* Optimize pow(expN(x),y) = expN(x*y). */
8455 if (BUILTIN_EXPONENT_P (fcode))
8456 {
8457 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8458 tree arg = CALL_EXPR_ARG (arg0, 0);
8459 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8460 return build_call_expr (expfn, 1, arg);
8461 }
8462
8463 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8464 if (BUILTIN_SQRT_P (fcode))
8465 {
8466 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8467 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8468 build_real (type, dconsthalf));
8469 return build_call_expr (fndecl, 2, narg0, narg1);
8470 }
8471
8472 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8473 if (BUILTIN_CBRT_P (fcode))
8474 {
8475 tree arg = CALL_EXPR_ARG (arg0, 0);
8476 if (tree_expr_nonnegative_p (arg))
8477 {
8478 const REAL_VALUE_TYPE dconstroot
8479 = real_value_truncate (TYPE_MODE (type),
8480 *get_real_const (rv_third));
8481 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8482 build_real (type, dconstroot));
8483 return build_call_expr (fndecl, 2, arg, narg1);
8484 }
8485 }
8486
8487 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8488 if (fcode == BUILT_IN_POW
8489 || fcode == BUILT_IN_POWF
8490 || fcode == BUILT_IN_POWL)
8491 {
8492 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8493 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8494 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8495 return build_call_expr (fndecl, 2, arg00, narg1);
8496 }
8497 }
8498
8499 return NULL_TREE;
8500 }
8501
8502 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8503 Return NULL_TREE if no simplification can be made. */
8504 static tree
8505 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8506 tree arg0, tree arg1, tree type)
8507 {
8508 if (!validate_arg (arg0, REAL_TYPE)
8509 || !validate_arg (arg1, INTEGER_TYPE))
8510 return NULL_TREE;
8511
8512 /* Optimize pow(1.0,y) = 1.0. */
8513 if (real_onep (arg0))
8514 return omit_one_operand (type, build_real (type, dconst1), arg1);
8515
8516 if (host_integerp (arg1, 0))
8517 {
8518 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8519
8520 /* Evaluate powi at compile-time. */
8521 if (TREE_CODE (arg0) == REAL_CST
8522 && !TREE_OVERFLOW (arg0))
8523 {
8524 REAL_VALUE_TYPE x;
8525 x = TREE_REAL_CST (arg0);
8526 real_powi (&x, TYPE_MODE (type), &x, c);
8527 return build_real (type, x);
8528 }
8529
8530 /* Optimize pow(x,0) = 1.0. */
8531 if (c == 0)
8532 return omit_one_operand (type, build_real (type, dconst1),
8533 arg0);
8534
8535 /* Optimize pow(x,1) = x. */
8536 if (c == 1)
8537 return arg0;
8538
8539 /* Optimize pow(x,-1) = 1.0/x. */
8540 if (c == -1)
8541 return fold_build2 (RDIV_EXPR, type,
8542 build_real (type, dconst1), arg0);
8543 }
8544
8545 return NULL_TREE;
8546 }
8547
8548 /* A subroutine of fold_builtin to fold the various exponent
8549 functions. Return NULL_TREE if no simplification can be made.
8550 FUNC is the corresponding MPFR exponent function. */
8551
8552 static tree
8553 fold_builtin_exponent (tree fndecl, tree arg,
8554 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8555 {
8556 if (validate_arg (arg, REAL_TYPE))
8557 {
8558 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8559 tree res;
8560
8561 /* Calculate the result when the argument is a constant. */
8562 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8563 return res;
8564
8565 /* Optimize expN(logN(x)) = x. */
8566 if (flag_unsafe_math_optimizations)
8567 {
8568 const enum built_in_function fcode = builtin_mathfn_code (arg);
8569
8570 if ((func == mpfr_exp
8571 && (fcode == BUILT_IN_LOG
8572 || fcode == BUILT_IN_LOGF
8573 || fcode == BUILT_IN_LOGL))
8574 || (func == mpfr_exp2
8575 && (fcode == BUILT_IN_LOG2
8576 || fcode == BUILT_IN_LOG2F
8577 || fcode == BUILT_IN_LOG2L))
8578 || (func == mpfr_exp10
8579 && (fcode == BUILT_IN_LOG10
8580 || fcode == BUILT_IN_LOG10F
8581 || fcode == BUILT_IN_LOG10L)))
8582 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8583 }
8584 }
8585
8586 return NULL_TREE;
8587 }
8588
8589 /* Return true if VAR is a VAR_DECL or a component thereof. */
8590
8591 static bool
8592 var_decl_component_p (tree var)
8593 {
8594 tree inner = var;
8595 while (handled_component_p (inner))
8596 inner = TREE_OPERAND (inner, 0);
8597 return SSA_VAR_P (inner);
8598 }
8599
8600 /* Fold function call to builtin memset. Return
8601 NULL_TREE if no simplification can be made. */
8602
8603 static tree
8604 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8605 {
8606 tree var, ret;
8607 unsigned HOST_WIDE_INT length, cval;
8608
8609 if (! validate_arg (dest, POINTER_TYPE)
8610 || ! validate_arg (c, INTEGER_TYPE)
8611 || ! validate_arg (len, INTEGER_TYPE))
8612 return NULL_TREE;
8613
8614 if (! host_integerp (len, 1))
8615 return NULL_TREE;
8616
8617 /* If the LEN parameter is zero, return DEST. */
8618 if (integer_zerop (len))
8619 return omit_one_operand (type, dest, c);
8620
8621 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8622 return NULL_TREE;
8623
8624 var = dest;
8625 STRIP_NOPS (var);
8626 if (TREE_CODE (var) != ADDR_EXPR)
8627 return NULL_TREE;
8628
8629 var = TREE_OPERAND (var, 0);
8630 if (TREE_THIS_VOLATILE (var))
8631 return NULL_TREE;
8632
8633 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8634 && !POINTER_TYPE_P (TREE_TYPE (var)))
8635 return NULL_TREE;
8636
8637 if (! var_decl_component_p (var))
8638 return NULL_TREE;
8639
8640 length = tree_low_cst (len, 1);
8641 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8642 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8643 < (int) length)
8644 return NULL_TREE;
8645
8646 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8647 return NULL_TREE;
8648
8649 if (integer_zerop (c))
8650 cval = 0;
8651 else
8652 {
8653 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8654 return NULL_TREE;
8655
8656 cval = tree_low_cst (c, 1);
8657 cval &= 0xff;
8658 cval |= cval << 8;
8659 cval |= cval << 16;
8660 cval |= (cval << 31) << 1;
8661 }
8662
8663 ret = build_int_cst_type (TREE_TYPE (var), cval);
8664 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8665 if (ignore)
8666 return ret;
8667
8668 return omit_one_operand (type, dest, ret);
8669 }
8670
8671 /* Fold function call to builtin memset. Return
8672 NULL_TREE if no simplification can be made. */
8673
8674 static tree
8675 fold_builtin_bzero (tree dest, tree size, bool ignore)
8676 {
8677 if (! validate_arg (dest, POINTER_TYPE)
8678 || ! validate_arg (size, INTEGER_TYPE))
8679 return NULL_TREE;
8680
8681 if (!ignore)
8682 return NULL_TREE;
8683
8684 /* New argument list transforming bzero(ptr x, int y) to
8685 memset(ptr x, int 0, size_t y). This is done this way
8686 so that if it isn't expanded inline, we fallback to
8687 calling bzero instead of memset. */
8688
8689 return fold_builtin_memset (dest, integer_zero_node,
8690 fold_convert (sizetype, size),
8691 void_type_node, ignore);
8692 }
8693
8694 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8695 NULL_TREE if no simplification can be made.
8696 If ENDP is 0, return DEST (like memcpy).
8697 If ENDP is 1, return DEST+LEN (like mempcpy).
8698 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8699 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8700 (memmove). */
8701
8702 static tree
8703 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8704 {
8705 tree destvar, srcvar, expr;
8706
8707 if (! validate_arg (dest, POINTER_TYPE)
8708 || ! validate_arg (src, POINTER_TYPE)
8709 || ! validate_arg (len, INTEGER_TYPE))
8710 return NULL_TREE;
8711
8712 /* If the LEN parameter is zero, return DEST. */
8713 if (integer_zerop (len))
8714 return omit_one_operand (type, dest, src);
8715
8716 /* If SRC and DEST are the same (and not volatile), return
8717 DEST{,+LEN,+LEN-1}. */
8718 if (operand_equal_p (src, dest, 0))
8719 expr = len;
8720 else
8721 {
8722 tree srctype, desttype;
8723 if (endp == 3)
8724 {
8725 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8726 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8727
8728 /* Both DEST and SRC must be pointer types.
8729 ??? This is what old code did. Is the testing for pointer types
8730 really mandatory?
8731
8732 If either SRC is readonly or length is 1, we can use memcpy. */
8733 if (dest_align && src_align
8734 && (readonly_data_expr (src)
8735 || (host_integerp (len, 1)
8736 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8737 tree_low_cst (len, 1)))))
8738 {
8739 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8740 if (!fn)
8741 return NULL_TREE;
8742 return build_call_expr (fn, 3, dest, src, len);
8743 }
8744 return NULL_TREE;
8745 }
8746
8747 if (!host_integerp (len, 0))
8748 return NULL_TREE;
8749 /* FIXME:
8750 This logic lose for arguments like (type *)malloc (sizeof (type)),
8751 since we strip the casts of up to VOID return value from malloc.
8752 Perhaps we ought to inherit type from non-VOID argument here? */
8753 STRIP_NOPS (src);
8754 STRIP_NOPS (dest);
8755 srctype = TREE_TYPE (TREE_TYPE (src));
8756 desttype = TREE_TYPE (TREE_TYPE (dest));
8757 if (!srctype || !desttype
8758 || !TYPE_SIZE_UNIT (srctype)
8759 || !TYPE_SIZE_UNIT (desttype)
8760 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8761 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8762 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8763 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8764 return NULL_TREE;
8765
8766 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8767 < (int) TYPE_ALIGN (desttype)
8768 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8769 < (int) TYPE_ALIGN (srctype)))
8770 return NULL_TREE;
8771
8772 if (!ignore)
8773 dest = builtin_save_expr (dest);
8774
8775 srcvar = build_fold_indirect_ref (src);
8776 if (TREE_THIS_VOLATILE (srcvar))
8777 return NULL_TREE;
8778 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8779 return NULL_TREE;
8780 /* With memcpy, it is possible to bypass aliasing rules, so without
8781 this check i. e. execute/20060930-2.c would be misoptimized, because
8782 it use conflicting alias set to hold argument for the memcpy call.
8783 This check is probably unnecesary with -fno-strict-aliasing.
8784 Similarly for destvar. See also PR29286. */
8785 if (!var_decl_component_p (srcvar)
8786 /* Accept: memcpy (*char_var, "test", 1); that simplify
8787 to char_var='t'; */
8788 || is_gimple_min_invariant (srcvar)
8789 || readonly_data_expr (src))
8790 return NULL_TREE;
8791
8792 destvar = build_fold_indirect_ref (dest);
8793 if (TREE_THIS_VOLATILE (destvar))
8794 return NULL_TREE;
8795 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8796 return NULL_TREE;
8797 if (!var_decl_component_p (destvar))
8798 return NULL_TREE;
8799
8800 if (srctype == desttype
8801 || (gimple_in_ssa_p (cfun)
8802 && useless_type_conversion_p (desttype, srctype)))
8803 expr = srcvar;
8804 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8805 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8806 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8807 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8808 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8809 else
8810 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8811 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8812 }
8813
8814 if (ignore)
8815 return expr;
8816
8817 if (endp == 0 || endp == 3)
8818 return omit_one_operand (type, dest, expr);
8819
8820 if (expr == len)
8821 expr = NULL_TREE;
8822
8823 if (endp == 2)
8824 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8825 ssize_int (1));
8826
8827 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8828 dest = fold_convert (type, dest);
8829 if (expr)
8830 dest = omit_one_operand (type, dest, expr);
8831 return dest;
8832 }
8833
8834 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8835 If LEN is not NULL, it represents the length of the string to be
8836 copied. Return NULL_TREE if no simplification can be made. */
8837
8838 tree
8839 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8840 {
8841 tree fn;
8842
8843 if (!validate_arg (dest, POINTER_TYPE)
8844 || !validate_arg (src, POINTER_TYPE))
8845 return NULL_TREE;
8846
8847 /* If SRC and DEST are the same (and not volatile), return DEST. */
8848 if (operand_equal_p (src, dest, 0))
8849 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8850
8851 if (optimize_size)
8852 return NULL_TREE;
8853
8854 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8855 if (!fn)
8856 return NULL_TREE;
8857
8858 if (!len)
8859 {
8860 len = c_strlen (src, 1);
8861 if (! len || TREE_SIDE_EFFECTS (len))
8862 return NULL_TREE;
8863 }
8864
8865 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8866 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8867 build_call_expr (fn, 3, dest, src, len));
8868 }
8869
8870 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8871 If SLEN is not NULL, it represents the length of the source string.
8872 Return NULL_TREE if no simplification can be made. */
8873
8874 tree
8875 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8876 {
8877 tree fn;
8878
8879 if (!validate_arg (dest, POINTER_TYPE)
8880 || !validate_arg (src, POINTER_TYPE)
8881 || !validate_arg (len, INTEGER_TYPE))
8882 return NULL_TREE;
8883
8884 /* If the LEN parameter is zero, return DEST. */
8885 if (integer_zerop (len))
8886 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8887
8888 /* We can't compare slen with len as constants below if len is not a
8889 constant. */
8890 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8891 return NULL_TREE;
8892
8893 if (!slen)
8894 slen = c_strlen (src, 1);
8895
8896 /* Now, we must be passed a constant src ptr parameter. */
8897 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8898 return NULL_TREE;
8899
8900 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8901
8902 /* We do not support simplification of this case, though we do
8903 support it when expanding trees into RTL. */
8904 /* FIXME: generate a call to __builtin_memset. */
8905 if (tree_int_cst_lt (slen, len))
8906 return NULL_TREE;
8907
8908 /* OK transform into builtin memcpy. */
8909 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8910 if (!fn)
8911 return NULL_TREE;
8912 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8913 build_call_expr (fn, 3, dest, src, len));
8914 }
8915
8916 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8917 arguments to the call, and TYPE is its return type.
8918 Return NULL_TREE if no simplification can be made. */
8919
8920 static tree
8921 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8922 {
8923 if (!validate_arg (arg1, POINTER_TYPE)
8924 || !validate_arg (arg2, INTEGER_TYPE)
8925 || !validate_arg (len, INTEGER_TYPE))
8926 return NULL_TREE;
8927 else
8928 {
8929 const char *p1;
8930
8931 if (TREE_CODE (arg2) != INTEGER_CST
8932 || !host_integerp (len, 1))
8933 return NULL_TREE;
8934
8935 p1 = c_getstr (arg1);
8936 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8937 {
8938 char c;
8939 const char *r;
8940 tree tem;
8941
8942 if (target_char_cast (arg2, &c))
8943 return NULL_TREE;
8944
8945 r = memchr (p1, c, tree_low_cst (len, 1));
8946
8947 if (r == NULL)
8948 return build_int_cst (TREE_TYPE (arg1), 0);
8949
8950 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8951 size_int (r - p1));
8952 return fold_convert (type, tem);
8953 }
8954 return NULL_TREE;
8955 }
8956 }
8957
8958 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8959 Return NULL_TREE if no simplification can be made. */
8960
8961 static tree
8962 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8963 {
8964 const char *p1, *p2;
8965
8966 if (!validate_arg (arg1, POINTER_TYPE)
8967 || !validate_arg (arg2, POINTER_TYPE)
8968 || !validate_arg (len, INTEGER_TYPE))
8969 return NULL_TREE;
8970
8971 /* If the LEN parameter is zero, return zero. */
8972 if (integer_zerop (len))
8973 return omit_two_operands (integer_type_node, integer_zero_node,
8974 arg1, arg2);
8975
8976 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8977 if (operand_equal_p (arg1, arg2, 0))
8978 return omit_one_operand (integer_type_node, integer_zero_node, len);
8979
8980 p1 = c_getstr (arg1);
8981 p2 = c_getstr (arg2);
8982
8983 /* If all arguments are constant, and the value of len is not greater
8984 than the lengths of arg1 and arg2, evaluate at compile-time. */
8985 if (host_integerp (len, 1) && p1 && p2
8986 && compare_tree_int (len, strlen (p1) + 1) <= 0
8987 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8988 {
8989 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8990
8991 if (r > 0)
8992 return integer_one_node;
8993 else if (r < 0)
8994 return integer_minus_one_node;
8995 else
8996 return integer_zero_node;
8997 }
8998
8999 /* If len parameter is one, return an expression corresponding to
9000 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9001 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9002 {
9003 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9004 tree cst_uchar_ptr_node
9005 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9006
9007 tree ind1 = fold_convert (integer_type_node,
9008 build1 (INDIRECT_REF, cst_uchar_node,
9009 fold_convert (cst_uchar_ptr_node,
9010 arg1)));
9011 tree ind2 = fold_convert (integer_type_node,
9012 build1 (INDIRECT_REF, cst_uchar_node,
9013 fold_convert (cst_uchar_ptr_node,
9014 arg2)));
9015 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9016 }
9017
9018 return NULL_TREE;
9019 }
9020
9021 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9022 Return NULL_TREE if no simplification can be made. */
9023
9024 static tree
9025 fold_builtin_strcmp (tree arg1, tree arg2)
9026 {
9027 const char *p1, *p2;
9028
9029 if (!validate_arg (arg1, POINTER_TYPE)
9030 || !validate_arg (arg2, POINTER_TYPE))
9031 return NULL_TREE;
9032
9033 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9034 if (operand_equal_p (arg1, arg2, 0))
9035 return integer_zero_node;
9036
9037 p1 = c_getstr (arg1);
9038 p2 = c_getstr (arg2);
9039
9040 if (p1 && p2)
9041 {
9042 const int i = strcmp (p1, p2);
9043 if (i < 0)
9044 return integer_minus_one_node;
9045 else if (i > 0)
9046 return integer_one_node;
9047 else
9048 return integer_zero_node;
9049 }
9050
9051 /* If the second arg is "", return *(const unsigned char*)arg1. */
9052 if (p2 && *p2 == '\0')
9053 {
9054 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9055 tree cst_uchar_ptr_node
9056 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9057
9058 return fold_convert (integer_type_node,
9059 build1 (INDIRECT_REF, cst_uchar_node,
9060 fold_convert (cst_uchar_ptr_node,
9061 arg1)));
9062 }
9063
9064 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9065 if (p1 && *p1 == '\0')
9066 {
9067 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9068 tree cst_uchar_ptr_node
9069 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9070
9071 tree temp = fold_convert (integer_type_node,
9072 build1 (INDIRECT_REF, cst_uchar_node,
9073 fold_convert (cst_uchar_ptr_node,
9074 arg2)));
9075 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9076 }
9077
9078 return NULL_TREE;
9079 }
9080
9081 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9082 Return NULL_TREE if no simplification can be made. */
9083
9084 static tree
9085 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9086 {
9087 const char *p1, *p2;
9088
9089 if (!validate_arg (arg1, POINTER_TYPE)
9090 || !validate_arg (arg2, POINTER_TYPE)
9091 || !validate_arg (len, INTEGER_TYPE))
9092 return NULL_TREE;
9093
9094 /* If the LEN parameter is zero, return zero. */
9095 if (integer_zerop (len))
9096 return omit_two_operands (integer_type_node, integer_zero_node,
9097 arg1, arg2);
9098
9099 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9100 if (operand_equal_p (arg1, arg2, 0))
9101 return omit_one_operand (integer_type_node, integer_zero_node, len);
9102
9103 p1 = c_getstr (arg1);
9104 p2 = c_getstr (arg2);
9105
9106 if (host_integerp (len, 1) && p1 && p2)
9107 {
9108 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9109 if (i > 0)
9110 return integer_one_node;
9111 else if (i < 0)
9112 return integer_minus_one_node;
9113 else
9114 return integer_zero_node;
9115 }
9116
9117 /* If the second arg is "", and the length is greater than zero,
9118 return *(const unsigned char*)arg1. */
9119 if (p2 && *p2 == '\0'
9120 && TREE_CODE (len) == INTEGER_CST
9121 && tree_int_cst_sgn (len) == 1)
9122 {
9123 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9124 tree cst_uchar_ptr_node
9125 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9126
9127 return fold_convert (integer_type_node,
9128 build1 (INDIRECT_REF, cst_uchar_node,
9129 fold_convert (cst_uchar_ptr_node,
9130 arg1)));
9131 }
9132
9133 /* If the first arg is "", and the length is greater than zero,
9134 return -*(const unsigned char*)arg2. */
9135 if (p1 && *p1 == '\0'
9136 && TREE_CODE (len) == INTEGER_CST
9137 && tree_int_cst_sgn (len) == 1)
9138 {
9139 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9140 tree cst_uchar_ptr_node
9141 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9142
9143 tree temp = fold_convert (integer_type_node,
9144 build1 (INDIRECT_REF, cst_uchar_node,
9145 fold_convert (cst_uchar_ptr_node,
9146 arg2)));
9147 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9148 }
9149
9150 /* If len parameter is one, return an expression corresponding to
9151 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9152 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9153 {
9154 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9155 tree cst_uchar_ptr_node
9156 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9157
9158 tree ind1 = fold_convert (integer_type_node,
9159 build1 (INDIRECT_REF, cst_uchar_node,
9160 fold_convert (cst_uchar_ptr_node,
9161 arg1)));
9162 tree ind2 = fold_convert (integer_type_node,
9163 build1 (INDIRECT_REF, cst_uchar_node,
9164 fold_convert (cst_uchar_ptr_node,
9165 arg2)));
9166 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9167 }
9168
9169 return NULL_TREE;
9170 }
9171
9172 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9173 ARG. Return NULL_TREE if no simplification can be made. */
9174
9175 static tree
9176 fold_builtin_signbit (tree arg, tree type)
9177 {
9178 tree temp;
9179
9180 if (!validate_arg (arg, REAL_TYPE))
9181 return NULL_TREE;
9182
9183 /* If ARG is a compile-time constant, determine the result. */
9184 if (TREE_CODE (arg) == REAL_CST
9185 && !TREE_OVERFLOW (arg))
9186 {
9187 REAL_VALUE_TYPE c;
9188
9189 c = TREE_REAL_CST (arg);
9190 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9191 return fold_convert (type, temp);
9192 }
9193
9194 /* If ARG is non-negative, the result is always zero. */
9195 if (tree_expr_nonnegative_p (arg))
9196 return omit_one_operand (type, integer_zero_node, arg);
9197
9198 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9199 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9200 return fold_build2 (LT_EXPR, type, arg,
9201 build_real (TREE_TYPE (arg), dconst0));
9202
9203 return NULL_TREE;
9204 }
9205
9206 /* Fold function call to builtin copysign, copysignf or copysignl with
9207 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9208 be made. */
9209
9210 static tree
9211 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9212 {
9213 tree tem;
9214
9215 if (!validate_arg (arg1, REAL_TYPE)
9216 || !validate_arg (arg2, REAL_TYPE))
9217 return NULL_TREE;
9218
9219 /* copysign(X,X) is X. */
9220 if (operand_equal_p (arg1, arg2, 0))
9221 return fold_convert (type, arg1);
9222
9223 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9224 if (TREE_CODE (arg1) == REAL_CST
9225 && TREE_CODE (arg2) == REAL_CST
9226 && !TREE_OVERFLOW (arg1)
9227 && !TREE_OVERFLOW (arg2))
9228 {
9229 REAL_VALUE_TYPE c1, c2;
9230
9231 c1 = TREE_REAL_CST (arg1);
9232 c2 = TREE_REAL_CST (arg2);
9233 /* c1.sign := c2.sign. */
9234 real_copysign (&c1, &c2);
9235 return build_real (type, c1);
9236 }
9237
9238 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9239 Remember to evaluate Y for side-effects. */
9240 if (tree_expr_nonnegative_p (arg2))
9241 return omit_one_operand (type,
9242 fold_build1 (ABS_EXPR, type, arg1),
9243 arg2);
9244
9245 /* Strip sign changing operations for the first argument. */
9246 tem = fold_strip_sign_ops (arg1);
9247 if (tem)
9248 return build_call_expr (fndecl, 2, tem, arg2);
9249
9250 return NULL_TREE;
9251 }
9252
9253 /* Fold a call to builtin isascii with argument ARG. */
9254
9255 static tree
9256 fold_builtin_isascii (tree arg)
9257 {
9258 if (!validate_arg (arg, INTEGER_TYPE))
9259 return NULL_TREE;
9260 else
9261 {
9262 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9263 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9264 build_int_cst (NULL_TREE,
9265 ~ (unsigned HOST_WIDE_INT) 0x7f));
9266 return fold_build2 (EQ_EXPR, integer_type_node,
9267 arg, integer_zero_node);
9268 }
9269 }
9270
9271 /* Fold a call to builtin toascii with argument ARG. */
9272
9273 static tree
9274 fold_builtin_toascii (tree arg)
9275 {
9276 if (!validate_arg (arg, INTEGER_TYPE))
9277 return NULL_TREE;
9278
9279 /* Transform toascii(c) -> (c & 0x7f). */
9280 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9281 build_int_cst (NULL_TREE, 0x7f));
9282 }
9283
9284 /* Fold a call to builtin isdigit with argument ARG. */
9285
9286 static tree
9287 fold_builtin_isdigit (tree arg)
9288 {
9289 if (!validate_arg (arg, INTEGER_TYPE))
9290 return NULL_TREE;
9291 else
9292 {
9293 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9294 /* According to the C standard, isdigit is unaffected by locale.
9295 However, it definitely is affected by the target character set. */
9296 unsigned HOST_WIDE_INT target_digit0
9297 = lang_hooks.to_target_charset ('0');
9298
9299 if (target_digit0 == 0)
9300 return NULL_TREE;
9301
9302 arg = fold_convert (unsigned_type_node, arg);
9303 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9304 build_int_cst (unsigned_type_node, target_digit0));
9305 return fold_build2 (LE_EXPR, integer_type_node, arg,
9306 build_int_cst (unsigned_type_node, 9));
9307 }
9308 }
9309
9310 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9311
9312 static tree
9313 fold_builtin_fabs (tree arg, tree type)
9314 {
9315 if (!validate_arg (arg, REAL_TYPE))
9316 return NULL_TREE;
9317
9318 arg = fold_convert (type, arg);
9319 if (TREE_CODE (arg) == REAL_CST)
9320 return fold_abs_const (arg, type);
9321 return fold_build1 (ABS_EXPR, type, arg);
9322 }
9323
9324 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9325
9326 static tree
9327 fold_builtin_abs (tree arg, tree type)
9328 {
9329 if (!validate_arg (arg, INTEGER_TYPE))
9330 return NULL_TREE;
9331
9332 arg = fold_convert (type, arg);
9333 if (TREE_CODE (arg) == INTEGER_CST)
9334 return fold_abs_const (arg, type);
9335 return fold_build1 (ABS_EXPR, type, arg);
9336 }
9337
9338 /* Fold a call to builtin fmin or fmax. */
9339
9340 static tree
9341 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9342 {
9343 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9344 {
9345 /* Calculate the result when the argument is a constant. */
9346 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9347
9348 if (res)
9349 return res;
9350
9351 /* If either argument is NaN, return the other one. Avoid the
9352 transformation if we get (and honor) a signalling NaN. Using
9353 omit_one_operand() ensures we create a non-lvalue. */
9354 if (TREE_CODE (arg0) == REAL_CST
9355 && real_isnan (&TREE_REAL_CST (arg0))
9356 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9357 || ! TREE_REAL_CST (arg0).signalling))
9358 return omit_one_operand (type, arg1, arg0);
9359 if (TREE_CODE (arg1) == REAL_CST
9360 && real_isnan (&TREE_REAL_CST (arg1))
9361 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9362 || ! TREE_REAL_CST (arg1).signalling))
9363 return omit_one_operand (type, arg0, arg1);
9364
9365 /* Transform fmin/fmax(x,x) -> x. */
9366 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9367 return omit_one_operand (type, arg0, arg1);
9368
9369 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9370 functions to return the numeric arg if the other one is NaN.
9371 These tree codes don't honor that, so only transform if
9372 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9373 handled, so we don't have to worry about it either. */
9374 if (flag_finite_math_only)
9375 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9376 fold_convert (type, arg0),
9377 fold_convert (type, arg1));
9378 }
9379 return NULL_TREE;
9380 }
9381
9382 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9383
9384 static tree
9385 fold_builtin_carg (tree arg, tree type)
9386 {
9387 if (validate_arg (arg, COMPLEX_TYPE))
9388 {
9389 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9390
9391 if (atan2_fn)
9392 {
9393 tree new_arg = builtin_save_expr (arg);
9394 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9395 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9396 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9397 }
9398 }
9399
9400 return NULL_TREE;
9401 }
9402
9403 /* Fold a call to builtin logb/ilogb. */
9404
9405 static tree
9406 fold_builtin_logb (tree arg, tree rettype)
9407 {
9408 if (! validate_arg (arg, REAL_TYPE))
9409 return NULL_TREE;
9410
9411 STRIP_NOPS (arg);
9412
9413 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9414 {
9415 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9416
9417 switch (value->cl)
9418 {
9419 case rvc_nan:
9420 case rvc_inf:
9421 /* If arg is Inf or NaN and we're logb, return it. */
9422 if (TREE_CODE (rettype) == REAL_TYPE)
9423 return fold_convert (rettype, arg);
9424 /* Fall through... */
9425 case rvc_zero:
9426 /* Zero may set errno and/or raise an exception for logb, also
9427 for ilogb we don't know FP_ILOGB0. */
9428 return NULL_TREE;
9429 case rvc_normal:
9430 /* For normal numbers, proceed iff radix == 2. In GCC,
9431 normalized significands are in the range [0.5, 1.0). We
9432 want the exponent as if they were [1.0, 2.0) so get the
9433 exponent and subtract 1. */
9434 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9435 return fold_convert (rettype, build_int_cst (NULL_TREE,
9436 REAL_EXP (value)-1));
9437 break;
9438 }
9439 }
9440
9441 return NULL_TREE;
9442 }
9443
9444 /* Fold a call to builtin significand, if radix == 2. */
9445
9446 static tree
9447 fold_builtin_significand (tree arg, tree rettype)
9448 {
9449 if (! validate_arg (arg, REAL_TYPE))
9450 return NULL_TREE;
9451
9452 STRIP_NOPS (arg);
9453
9454 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9455 {
9456 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9457
9458 switch (value->cl)
9459 {
9460 case rvc_zero:
9461 case rvc_nan:
9462 case rvc_inf:
9463 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9464 return fold_convert (rettype, arg);
9465 case rvc_normal:
9466 /* For normal numbers, proceed iff radix == 2. */
9467 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9468 {
9469 REAL_VALUE_TYPE result = *value;
9470 /* In GCC, normalized significands are in the range [0.5,
9471 1.0). We want them to be [1.0, 2.0) so set the
9472 exponent to 1. */
9473 SET_REAL_EXP (&result, 1);
9474 return build_real (rettype, result);
9475 }
9476 break;
9477 }
9478 }
9479
9480 return NULL_TREE;
9481 }
9482
9483 /* Fold a call to builtin frexp, we can assume the base is 2. */
9484
9485 static tree
9486 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9487 {
9488 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9489 return NULL_TREE;
9490
9491 STRIP_NOPS (arg0);
9492
9493 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9494 return NULL_TREE;
9495
9496 arg1 = build_fold_indirect_ref (arg1);
9497
9498 /* Proceed if a valid pointer type was passed in. */
9499 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9500 {
9501 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9502 tree frac, exp;
9503
9504 switch (value->cl)
9505 {
9506 case rvc_zero:
9507 /* For +-0, return (*exp = 0, +-0). */
9508 exp = integer_zero_node;
9509 frac = arg0;
9510 break;
9511 case rvc_nan:
9512 case rvc_inf:
9513 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9514 return omit_one_operand (rettype, arg0, arg1);
9515 case rvc_normal:
9516 {
9517 /* Since the frexp function always expects base 2, and in
9518 GCC normalized significands are already in the range
9519 [0.5, 1.0), we have exactly what frexp wants. */
9520 REAL_VALUE_TYPE frac_rvt = *value;
9521 SET_REAL_EXP (&frac_rvt, 0);
9522 frac = build_real (rettype, frac_rvt);
9523 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9524 }
9525 break;
9526 default:
9527 gcc_unreachable ();
9528 }
9529
9530 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9531 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9532 TREE_SIDE_EFFECTS (arg1) = 1;
9533 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9534 }
9535
9536 return NULL_TREE;
9537 }
9538
9539 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9540 then we can assume the base is two. If it's false, then we have to
9541 check the mode of the TYPE parameter in certain cases. */
9542
9543 static tree
9544 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9545 {
9546 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9547 {
9548 STRIP_NOPS (arg0);
9549 STRIP_NOPS (arg1);
9550
9551 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9552 if (real_zerop (arg0) || integer_zerop (arg1)
9553 || (TREE_CODE (arg0) == REAL_CST
9554 && !real_isfinite (&TREE_REAL_CST (arg0))))
9555 return omit_one_operand (type, arg0, arg1);
9556
9557 /* If both arguments are constant, then try to evaluate it. */
9558 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9559 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9560 && host_integerp (arg1, 0))
9561 {
9562 /* Bound the maximum adjustment to twice the range of the
9563 mode's valid exponents. Use abs to ensure the range is
9564 positive as a sanity check. */
9565 const long max_exp_adj = 2 *
9566 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9567 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9568
9569 /* Get the user-requested adjustment. */
9570 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9571
9572 /* The requested adjustment must be inside this range. This
9573 is a preliminary cap to avoid things like overflow, we
9574 may still fail to compute the result for other reasons. */
9575 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9576 {
9577 REAL_VALUE_TYPE initial_result;
9578
9579 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9580
9581 /* Ensure we didn't overflow. */
9582 if (! real_isinf (&initial_result))
9583 {
9584 const REAL_VALUE_TYPE trunc_result
9585 = real_value_truncate (TYPE_MODE (type), initial_result);
9586
9587 /* Only proceed if the target mode can hold the
9588 resulting value. */
9589 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9590 return build_real (type, trunc_result);
9591 }
9592 }
9593 }
9594 }
9595
9596 return NULL_TREE;
9597 }
9598
9599 /* Fold a call to builtin modf. */
9600
9601 static tree
9602 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9603 {
9604 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9605 return NULL_TREE;
9606
9607 STRIP_NOPS (arg0);
9608
9609 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9610 return NULL_TREE;
9611
9612 arg1 = build_fold_indirect_ref (arg1);
9613
9614 /* Proceed if a valid pointer type was passed in. */
9615 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9616 {
9617 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9618 REAL_VALUE_TYPE trunc, frac;
9619
9620 switch (value->cl)
9621 {
9622 case rvc_nan:
9623 case rvc_zero:
9624 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9625 trunc = frac = *value;
9626 break;
9627 case rvc_inf:
9628 /* For +-Inf, return (*arg1 = arg0, +-0). */
9629 frac = dconst0;
9630 frac.sign = value->sign;
9631 trunc = *value;
9632 break;
9633 case rvc_normal:
9634 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9635 real_trunc (&trunc, VOIDmode, value);
9636 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9637 /* If the original number was negative and already
9638 integral, then the fractional part is -0.0. */
9639 if (value->sign && frac.cl == rvc_zero)
9640 frac.sign = value->sign;
9641 break;
9642 }
9643
9644 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9645 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9646 build_real (rettype, trunc));
9647 TREE_SIDE_EFFECTS (arg1) = 1;
9648 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9649 build_real (rettype, frac));
9650 }
9651
9652 return NULL_TREE;
9653 }
9654
9655 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9656 ARG is the argument for the call. */
9657
9658 static tree
9659 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9660 {
9661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9662 REAL_VALUE_TYPE r;
9663
9664 if (!validate_arg (arg, REAL_TYPE))
9665 return NULL_TREE;
9666
9667 switch (builtin_index)
9668 {
9669 case BUILT_IN_ISINF:
9670 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9671 return omit_one_operand (type, integer_zero_node, arg);
9672
9673 if (TREE_CODE (arg) == REAL_CST)
9674 {
9675 r = TREE_REAL_CST (arg);
9676 if (real_isinf (&r))
9677 return real_compare (GT_EXPR, &r, &dconst0)
9678 ? integer_one_node : integer_minus_one_node;
9679 else
9680 return integer_zero_node;
9681 }
9682
9683 return NULL_TREE;
9684
9685 case BUILT_IN_ISINF_SIGN:
9686 {
9687 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9688 /* In a boolean context, GCC will fold the inner COND_EXPR to
9689 1. So e.g. "if (isinf_sign(x))" would be folded to just
9690 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9691 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9692 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9693 tree tmp = NULL_TREE;
9694
9695 arg = builtin_save_expr (arg);
9696
9697 if (signbit_fn && isinf_fn)
9698 {
9699 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9700 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9701
9702 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9703 signbit_call, integer_zero_node);
9704 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9705 isinf_call, integer_zero_node);
9706
9707 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9708 integer_minus_one_node, integer_one_node);
9709 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9710 integer_zero_node);
9711 }
9712
9713 return tmp;
9714 }
9715
9716 case BUILT_IN_ISFINITE:
9717 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9718 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9719 return omit_one_operand (type, integer_one_node, arg);
9720
9721 if (TREE_CODE (arg) == REAL_CST)
9722 {
9723 r = TREE_REAL_CST (arg);
9724 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9725 }
9726
9727 return NULL_TREE;
9728
9729 case BUILT_IN_ISNAN:
9730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9731 return omit_one_operand (type, integer_zero_node, arg);
9732
9733 if (TREE_CODE (arg) == REAL_CST)
9734 {
9735 r = TREE_REAL_CST (arg);
9736 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9737 }
9738
9739 arg = builtin_save_expr (arg);
9740 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9741
9742 default:
9743 gcc_unreachable ();
9744 }
9745 }
9746
9747 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9748 This builtin will generate code to return the appropriate floating
9749 point classification depending on the value of the floating point
9750 number passed in. The possible return values must be supplied as
9751 int arguments to the call in the following order: FP_NAN, FP_INF,
9752 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9753 one floating point argument which is "type generic". */
9754
9755 static tree
9756 fold_builtin_fpclassify (tree exp)
9757 {
9758 tree fp_nan, fp_inf, fp_normal, fp_subnormal, fp_zero, arg, type, res, tmp;
9759 enum machine_mode mode;
9760 REAL_VALUE_TYPE r;
9761 char buf[128];
9762
9763 /* Verify the required arguments in the original call. */
9764 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9765 INTEGER_TYPE, INTEGER_TYPE,
9766 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9767 return NULL_TREE;
9768
9769 fp_nan = CALL_EXPR_ARG (exp, 0);
9770 fp_inf = CALL_EXPR_ARG (exp, 1);
9771 fp_normal = CALL_EXPR_ARG (exp, 2);
9772 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9773 fp_zero = CALL_EXPR_ARG (exp, 4);
9774 arg = CALL_EXPR_ARG (exp, 5);
9775 type = TREE_TYPE (arg);
9776 mode = TYPE_MODE (type);
9777 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9778
9779 /* fpclassify(x) ->
9780 isnan(x) ? FP_NAN :
9781 (fabs(x) == Inf ? FP_INF :
9782 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9783 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9784
9785 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9786 build_real (type, dconst0));
9787 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9788
9789 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9790 real_from_string (&r, buf);
9791 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9792 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9793
9794 if (HONOR_INFINITIES (mode))
9795 {
9796 real_inf (&r);
9797 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9798 build_real (type, r));
9799 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_inf, res);
9800 }
9801
9802 if (HONOR_NANS (mode))
9803 {
9804 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9805 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9806 }
9807
9808 return res;
9809 }
9810
9811 /* Fold a call to an unordered comparison function such as
9812 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9813 being called and ARG0 and ARG1 are the arguments for the call.
9814 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9815 the opposite of the desired result. UNORDERED_CODE is used
9816 for modes that can hold NaNs and ORDERED_CODE is used for
9817 the rest. */
9818
9819 static tree
9820 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9821 enum tree_code unordered_code,
9822 enum tree_code ordered_code)
9823 {
9824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9825 enum tree_code code;
9826 tree type0, type1;
9827 enum tree_code code0, code1;
9828 tree cmp_type = NULL_TREE;
9829
9830 type0 = TREE_TYPE (arg0);
9831 type1 = TREE_TYPE (arg1);
9832
9833 code0 = TREE_CODE (type0);
9834 code1 = TREE_CODE (type1);
9835
9836 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9837 /* Choose the wider of two real types. */
9838 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9839 ? type0 : type1;
9840 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9841 cmp_type = type0;
9842 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9843 cmp_type = type1;
9844
9845 arg0 = fold_convert (cmp_type, arg0);
9846 arg1 = fold_convert (cmp_type, arg1);
9847
9848 if (unordered_code == UNORDERED_EXPR)
9849 {
9850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9851 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9852 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9853 }
9854
9855 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9856 : ordered_code;
9857 return fold_build1 (TRUTH_NOT_EXPR, type,
9858 fold_build2 (code, type, arg0, arg1));
9859 }
9860
9861 /* Fold a call to built-in function FNDECL with 0 arguments.
9862 IGNORE is true if the result of the function call is ignored. This
9863 function returns NULL_TREE if no simplification was possible. */
9864
9865 static tree
9866 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9867 {
9868 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9869 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9870 switch (fcode)
9871 {
9872 CASE_FLT_FN (BUILT_IN_INF):
9873 case BUILT_IN_INFD32:
9874 case BUILT_IN_INFD64:
9875 case BUILT_IN_INFD128:
9876 return fold_builtin_inf (type, true);
9877
9878 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9879 return fold_builtin_inf (type, false);
9880
9881 case BUILT_IN_CLASSIFY_TYPE:
9882 return fold_builtin_classify_type (NULL_TREE);
9883
9884 default:
9885 break;
9886 }
9887 return NULL_TREE;
9888 }
9889
9890 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9891 IGNORE is true if the result of the function call is ignored. This
9892 function returns NULL_TREE if no simplification was possible. */
9893
9894 static tree
9895 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9896 {
9897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9899 switch (fcode)
9900 {
9901
9902 case BUILT_IN_CONSTANT_P:
9903 {
9904 tree val = fold_builtin_constant_p (arg0);
9905
9906 /* Gimplification will pull the CALL_EXPR for the builtin out of
9907 an if condition. When not optimizing, we'll not CSE it back.
9908 To avoid link error types of regressions, return false now. */
9909 if (!val && !optimize)
9910 val = integer_zero_node;
9911
9912 return val;
9913 }
9914
9915 case BUILT_IN_CLASSIFY_TYPE:
9916 return fold_builtin_classify_type (arg0);
9917
9918 case BUILT_IN_STRLEN:
9919 return fold_builtin_strlen (arg0);
9920
9921 CASE_FLT_FN (BUILT_IN_FABS):
9922 return fold_builtin_fabs (arg0, type);
9923
9924 case BUILT_IN_ABS:
9925 case BUILT_IN_LABS:
9926 case BUILT_IN_LLABS:
9927 case BUILT_IN_IMAXABS:
9928 return fold_builtin_abs (arg0, type);
9929
9930 CASE_FLT_FN (BUILT_IN_CONJ):
9931 if (validate_arg (arg0, COMPLEX_TYPE))
9932 return fold_build1 (CONJ_EXPR, type, arg0);
9933 break;
9934
9935 CASE_FLT_FN (BUILT_IN_CREAL):
9936 if (validate_arg (arg0, COMPLEX_TYPE))
9937 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9938 break;
9939
9940 CASE_FLT_FN (BUILT_IN_CIMAG):
9941 if (validate_arg (arg0, COMPLEX_TYPE))
9942 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9943 break;
9944
9945 CASE_FLT_FN (BUILT_IN_CCOS):
9946 CASE_FLT_FN (BUILT_IN_CCOSH):
9947 /* These functions are "even", i.e. f(x) == f(-x). */
9948 if (validate_arg (arg0, COMPLEX_TYPE))
9949 {
9950 tree narg = fold_strip_sign_ops (arg0);
9951 if (narg)
9952 return build_call_expr (fndecl, 1, narg);
9953 }
9954 break;
9955
9956 CASE_FLT_FN (BUILT_IN_CABS):
9957 return fold_builtin_cabs (arg0, type, fndecl);
9958
9959 CASE_FLT_FN (BUILT_IN_CARG):
9960 return fold_builtin_carg (arg0, type);
9961
9962 CASE_FLT_FN (BUILT_IN_SQRT):
9963 return fold_builtin_sqrt (arg0, type);
9964
9965 CASE_FLT_FN (BUILT_IN_CBRT):
9966 return fold_builtin_cbrt (arg0, type);
9967
9968 CASE_FLT_FN (BUILT_IN_ASIN):
9969 if (validate_arg (arg0, REAL_TYPE))
9970 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9971 &dconstm1, &dconst1, true);
9972 break;
9973
9974 CASE_FLT_FN (BUILT_IN_ACOS):
9975 if (validate_arg (arg0, REAL_TYPE))
9976 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9977 &dconstm1, &dconst1, true);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_ATAN):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9983 break;
9984
9985 CASE_FLT_FN (BUILT_IN_ASINH):
9986 if (validate_arg (arg0, REAL_TYPE))
9987 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9988 break;
9989
9990 CASE_FLT_FN (BUILT_IN_ACOSH):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9993 &dconst1, NULL, true);
9994 break;
9995
9996 CASE_FLT_FN (BUILT_IN_ATANH):
9997 if (validate_arg (arg0, REAL_TYPE))
9998 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9999 &dconstm1, &dconst1, false);
10000 break;
10001
10002 CASE_FLT_FN (BUILT_IN_SIN):
10003 if (validate_arg (arg0, REAL_TYPE))
10004 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10005 break;
10006
10007 CASE_FLT_FN (BUILT_IN_COS):
10008 return fold_builtin_cos (arg0, type, fndecl);
10009 break;
10010
10011 CASE_FLT_FN (BUILT_IN_TAN):
10012 return fold_builtin_tan (arg0, type);
10013
10014 CASE_FLT_FN (BUILT_IN_CEXP):
10015 return fold_builtin_cexp (arg0, type);
10016
10017 CASE_FLT_FN (BUILT_IN_CEXPI):
10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10020 break;
10021
10022 CASE_FLT_FN (BUILT_IN_SINH):
10023 if (validate_arg (arg0, REAL_TYPE))
10024 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10025 break;
10026
10027 CASE_FLT_FN (BUILT_IN_COSH):
10028 return fold_builtin_cosh (arg0, type, fndecl);
10029
10030 CASE_FLT_FN (BUILT_IN_TANH):
10031 if (validate_arg (arg0, REAL_TYPE))
10032 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_ERF):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10038 break;
10039
10040 CASE_FLT_FN (BUILT_IN_ERFC):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10043 break;
10044
10045 CASE_FLT_FN (BUILT_IN_TGAMMA):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10048 break;
10049
10050 CASE_FLT_FN (BUILT_IN_EXP):
10051 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10052
10053 CASE_FLT_FN (BUILT_IN_EXP2):
10054 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10055
10056 CASE_FLT_FN (BUILT_IN_EXP10):
10057 CASE_FLT_FN (BUILT_IN_POW10):
10058 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10059
10060 CASE_FLT_FN (BUILT_IN_EXPM1):
10061 if (validate_arg (arg0, REAL_TYPE))
10062 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10063 break;
10064
10065 CASE_FLT_FN (BUILT_IN_LOG):
10066 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10067
10068 CASE_FLT_FN (BUILT_IN_LOG2):
10069 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10070
10071 CASE_FLT_FN (BUILT_IN_LOG10):
10072 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10073
10074 CASE_FLT_FN (BUILT_IN_LOG1P):
10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10077 &dconstm1, NULL, false);
10078 break;
10079
10080 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10081 CASE_FLT_FN (BUILT_IN_J0):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10084 NULL, NULL, 0);
10085 break;
10086
10087 CASE_FLT_FN (BUILT_IN_J1):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10090 NULL, NULL, 0);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_Y0):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10096 &dconst0, NULL, false);
10097 break;
10098
10099 CASE_FLT_FN (BUILT_IN_Y1):
10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10102 &dconst0, NULL, false);
10103 break;
10104 #endif
10105
10106 CASE_FLT_FN (BUILT_IN_NAN):
10107 case BUILT_IN_NAND32:
10108 case BUILT_IN_NAND64:
10109 case BUILT_IN_NAND128:
10110 return fold_builtin_nan (arg0, type, true);
10111
10112 CASE_FLT_FN (BUILT_IN_NANS):
10113 return fold_builtin_nan (arg0, type, false);
10114
10115 CASE_FLT_FN (BUILT_IN_FLOOR):
10116 return fold_builtin_floor (fndecl, arg0);
10117
10118 CASE_FLT_FN (BUILT_IN_CEIL):
10119 return fold_builtin_ceil (fndecl, arg0);
10120
10121 CASE_FLT_FN (BUILT_IN_TRUNC):
10122 return fold_builtin_trunc (fndecl, arg0);
10123
10124 CASE_FLT_FN (BUILT_IN_ROUND):
10125 return fold_builtin_round (fndecl, arg0);
10126
10127 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10128 CASE_FLT_FN (BUILT_IN_RINT):
10129 return fold_trunc_transparent_mathfn (fndecl, arg0);
10130
10131 CASE_FLT_FN (BUILT_IN_LCEIL):
10132 CASE_FLT_FN (BUILT_IN_LLCEIL):
10133 CASE_FLT_FN (BUILT_IN_LFLOOR):
10134 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10135 CASE_FLT_FN (BUILT_IN_LROUND):
10136 CASE_FLT_FN (BUILT_IN_LLROUND):
10137 return fold_builtin_int_roundingfn (fndecl, arg0);
10138
10139 CASE_FLT_FN (BUILT_IN_LRINT):
10140 CASE_FLT_FN (BUILT_IN_LLRINT):
10141 return fold_fixed_mathfn (fndecl, arg0);
10142
10143 case BUILT_IN_BSWAP32:
10144 case BUILT_IN_BSWAP64:
10145 return fold_builtin_bswap (fndecl, arg0);
10146
10147 CASE_INT_FN (BUILT_IN_FFS):
10148 CASE_INT_FN (BUILT_IN_CLZ):
10149 CASE_INT_FN (BUILT_IN_CTZ):
10150 CASE_INT_FN (BUILT_IN_POPCOUNT):
10151 CASE_INT_FN (BUILT_IN_PARITY):
10152 return fold_builtin_bitop (fndecl, arg0);
10153
10154 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10155 return fold_builtin_signbit (arg0, type);
10156
10157 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10158 return fold_builtin_significand (arg0, type);
10159
10160 CASE_FLT_FN (BUILT_IN_ILOGB):
10161 CASE_FLT_FN (BUILT_IN_LOGB):
10162 return fold_builtin_logb (arg0, type);
10163
10164 case BUILT_IN_ISASCII:
10165 return fold_builtin_isascii (arg0);
10166
10167 case BUILT_IN_TOASCII:
10168 return fold_builtin_toascii (arg0);
10169
10170 case BUILT_IN_ISDIGIT:
10171 return fold_builtin_isdigit (arg0);
10172
10173 CASE_FLT_FN (BUILT_IN_FINITE):
10174 case BUILT_IN_FINITED32:
10175 case BUILT_IN_FINITED64:
10176 case BUILT_IN_FINITED128:
10177 case BUILT_IN_ISFINITE:
10178 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10179
10180 CASE_FLT_FN (BUILT_IN_ISINF):
10181 case BUILT_IN_ISINFD32:
10182 case BUILT_IN_ISINFD64:
10183 case BUILT_IN_ISINFD128:
10184 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10185
10186 case BUILT_IN_ISINF_SIGN:
10187 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10188
10189 CASE_FLT_FN (BUILT_IN_ISNAN):
10190 case BUILT_IN_ISNAND32:
10191 case BUILT_IN_ISNAND64:
10192 case BUILT_IN_ISNAND128:
10193 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10194
10195 case BUILT_IN_PRINTF:
10196 case BUILT_IN_PRINTF_UNLOCKED:
10197 case BUILT_IN_VPRINTF:
10198 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10199
10200 default:
10201 break;
10202 }
10203
10204 return NULL_TREE;
10205
10206 }
10207
10208 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10209 IGNORE is true if the result of the function call is ignored. This
10210 function returns NULL_TREE if no simplification was possible. */
10211
10212 static tree
10213 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10214 {
10215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10217
10218 switch (fcode)
10219 {
10220 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10221 CASE_FLT_FN (BUILT_IN_JN):
10222 if (validate_arg (arg0, INTEGER_TYPE)
10223 && validate_arg (arg1, REAL_TYPE))
10224 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10225 break;
10226
10227 CASE_FLT_FN (BUILT_IN_YN):
10228 if (validate_arg (arg0, INTEGER_TYPE)
10229 && validate_arg (arg1, REAL_TYPE))
10230 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10231 &dconst0, false);
10232 break;
10233
10234 CASE_FLT_FN (BUILT_IN_DREM):
10235 CASE_FLT_FN (BUILT_IN_REMAINDER):
10236 if (validate_arg (arg0, REAL_TYPE)
10237 && validate_arg(arg1, REAL_TYPE))
10238 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10239 break;
10240
10241 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10242 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10243 if (validate_arg (arg0, REAL_TYPE)
10244 && validate_arg(arg1, POINTER_TYPE))
10245 return do_mpfr_lgamma_r (arg0, arg1, type);
10246 break;
10247 #endif
10248
10249 CASE_FLT_FN (BUILT_IN_ATAN2):
10250 if (validate_arg (arg0, REAL_TYPE)
10251 && validate_arg(arg1, REAL_TYPE))
10252 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10253 break;
10254
10255 CASE_FLT_FN (BUILT_IN_FDIM):
10256 if (validate_arg (arg0, REAL_TYPE)
10257 && validate_arg(arg1, REAL_TYPE))
10258 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10259 break;
10260
10261 CASE_FLT_FN (BUILT_IN_HYPOT):
10262 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10263
10264 CASE_FLT_FN (BUILT_IN_LDEXP):
10265 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10266 CASE_FLT_FN (BUILT_IN_SCALBN):
10267 CASE_FLT_FN (BUILT_IN_SCALBLN):
10268 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10269
10270 CASE_FLT_FN (BUILT_IN_FREXP):
10271 return fold_builtin_frexp (arg0, arg1, type);
10272
10273 CASE_FLT_FN (BUILT_IN_MODF):
10274 return fold_builtin_modf (arg0, arg1, type);
10275
10276 case BUILT_IN_BZERO:
10277 return fold_builtin_bzero (arg0, arg1, ignore);
10278
10279 case BUILT_IN_FPUTS:
10280 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10281
10282 case BUILT_IN_FPUTS_UNLOCKED:
10283 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10284
10285 case BUILT_IN_STRSTR:
10286 return fold_builtin_strstr (arg0, arg1, type);
10287
10288 case BUILT_IN_STRCAT:
10289 return fold_builtin_strcat (arg0, arg1);
10290
10291 case BUILT_IN_STRSPN:
10292 return fold_builtin_strspn (arg0, arg1);
10293
10294 case BUILT_IN_STRCSPN:
10295 return fold_builtin_strcspn (arg0, arg1);
10296
10297 case BUILT_IN_STRCHR:
10298 case BUILT_IN_INDEX:
10299 return fold_builtin_strchr (arg0, arg1, type);
10300
10301 case BUILT_IN_STRRCHR:
10302 case BUILT_IN_RINDEX:
10303 return fold_builtin_strrchr (arg0, arg1, type);
10304
10305 case BUILT_IN_STRCPY:
10306 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10307
10308 case BUILT_IN_STRCMP:
10309 return fold_builtin_strcmp (arg0, arg1);
10310
10311 case BUILT_IN_STRPBRK:
10312 return fold_builtin_strpbrk (arg0, arg1, type);
10313
10314 case BUILT_IN_EXPECT:
10315 return fold_builtin_expect (arg0, arg1);
10316
10317 CASE_FLT_FN (BUILT_IN_POW):
10318 return fold_builtin_pow (fndecl, arg0, arg1, type);
10319
10320 CASE_FLT_FN (BUILT_IN_POWI):
10321 return fold_builtin_powi (fndecl, arg0, arg1, type);
10322
10323 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10324 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10325
10326 CASE_FLT_FN (BUILT_IN_FMIN):
10327 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10328
10329 CASE_FLT_FN (BUILT_IN_FMAX):
10330 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10331
10332 case BUILT_IN_ISGREATER:
10333 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10334 case BUILT_IN_ISGREATEREQUAL:
10335 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10336 case BUILT_IN_ISLESS:
10337 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10338 case BUILT_IN_ISLESSEQUAL:
10339 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10340 case BUILT_IN_ISLESSGREATER:
10341 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10342 case BUILT_IN_ISUNORDERED:
10343 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10344 NOP_EXPR);
10345
10346 /* We do the folding for va_start in the expander. */
10347 case BUILT_IN_VA_START:
10348 break;
10349
10350 case BUILT_IN_SPRINTF:
10351 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10352
10353 case BUILT_IN_OBJECT_SIZE:
10354 return fold_builtin_object_size (arg0, arg1);
10355
10356 case BUILT_IN_PRINTF:
10357 case BUILT_IN_PRINTF_UNLOCKED:
10358 case BUILT_IN_VPRINTF:
10359 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10360
10361 case BUILT_IN_PRINTF_CHK:
10362 case BUILT_IN_VPRINTF_CHK:
10363 if (!validate_arg (arg0, INTEGER_TYPE)
10364 || TREE_SIDE_EFFECTS (arg0))
10365 return NULL_TREE;
10366 else
10367 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10368 break;
10369
10370 case BUILT_IN_FPRINTF:
10371 case BUILT_IN_FPRINTF_UNLOCKED:
10372 case BUILT_IN_VFPRINTF:
10373 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10374 ignore, fcode);
10375
10376 default:
10377 break;
10378 }
10379 return NULL_TREE;
10380 }
10381
10382 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10383 and ARG2. IGNORE is true if the result of the function call is ignored.
10384 This function returns NULL_TREE if no simplification was possible. */
10385
10386 static tree
10387 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10388 {
10389 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10390 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10391 switch (fcode)
10392 {
10393
10394 CASE_FLT_FN (BUILT_IN_SINCOS):
10395 return fold_builtin_sincos (arg0, arg1, arg2);
10396
10397 CASE_FLT_FN (BUILT_IN_FMA):
10398 if (validate_arg (arg0, REAL_TYPE)
10399 && validate_arg(arg1, REAL_TYPE)
10400 && validate_arg(arg2, REAL_TYPE))
10401 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10402 break;
10403
10404 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10405 CASE_FLT_FN (BUILT_IN_REMQUO):
10406 if (validate_arg (arg0, REAL_TYPE)
10407 && validate_arg(arg1, REAL_TYPE)
10408 && validate_arg(arg2, POINTER_TYPE))
10409 return do_mpfr_remquo (arg0, arg1, arg2);
10410 break;
10411 #endif
10412
10413 case BUILT_IN_MEMSET:
10414 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10415
10416 case BUILT_IN_BCOPY:
10417 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10418
10419 case BUILT_IN_MEMCPY:
10420 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10421
10422 case BUILT_IN_MEMPCPY:
10423 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10424
10425 case BUILT_IN_MEMMOVE:
10426 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10427
10428 case BUILT_IN_STRNCAT:
10429 return fold_builtin_strncat (arg0, arg1, arg2);
10430
10431 case BUILT_IN_STRNCPY:
10432 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10433
10434 case BUILT_IN_STRNCMP:
10435 return fold_builtin_strncmp (arg0, arg1, arg2);
10436
10437 case BUILT_IN_MEMCHR:
10438 return fold_builtin_memchr (arg0, arg1, arg2, type);
10439
10440 case BUILT_IN_BCMP:
10441 case BUILT_IN_MEMCMP:
10442 return fold_builtin_memcmp (arg0, arg1, arg2);;
10443
10444 case BUILT_IN_SPRINTF:
10445 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10446
10447 case BUILT_IN_STRCPY_CHK:
10448 case BUILT_IN_STPCPY_CHK:
10449 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10450 ignore, fcode);
10451
10452 case BUILT_IN_STRCAT_CHK:
10453 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10454
10455 case BUILT_IN_PRINTF_CHK:
10456 case BUILT_IN_VPRINTF_CHK:
10457 if (!validate_arg (arg0, INTEGER_TYPE)
10458 || TREE_SIDE_EFFECTS (arg0))
10459 return NULL_TREE;
10460 else
10461 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10462 break;
10463
10464 case BUILT_IN_FPRINTF:
10465 case BUILT_IN_FPRINTF_UNLOCKED:
10466 case BUILT_IN_VFPRINTF:
10467 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10468
10469 case BUILT_IN_FPRINTF_CHK:
10470 case BUILT_IN_VFPRINTF_CHK:
10471 if (!validate_arg (arg1, INTEGER_TYPE)
10472 || TREE_SIDE_EFFECTS (arg1))
10473 return NULL_TREE;
10474 else
10475 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10476 ignore, fcode);
10477
10478 default:
10479 break;
10480 }
10481 return NULL_TREE;
10482 }
10483
10484 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10485 ARG2, and ARG3. IGNORE is true if the result of the function call is
10486 ignored. This function returns NULL_TREE if no simplification was
10487 possible. */
10488
10489 static tree
10490 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10491 bool ignore)
10492 {
10493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10494
10495 switch (fcode)
10496 {
10497 case BUILT_IN_MEMCPY_CHK:
10498 case BUILT_IN_MEMPCPY_CHK:
10499 case BUILT_IN_MEMMOVE_CHK:
10500 case BUILT_IN_MEMSET_CHK:
10501 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10502 NULL_TREE, ignore,
10503 DECL_FUNCTION_CODE (fndecl));
10504
10505 case BUILT_IN_STRNCPY_CHK:
10506 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10507
10508 case BUILT_IN_STRNCAT_CHK:
10509 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10510
10511 case BUILT_IN_FPRINTF_CHK:
10512 case BUILT_IN_VFPRINTF_CHK:
10513 if (!validate_arg (arg1, INTEGER_TYPE)
10514 || TREE_SIDE_EFFECTS (arg1))
10515 return NULL_TREE;
10516 else
10517 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10518 ignore, fcode);
10519 break;
10520
10521 default:
10522 break;
10523 }
10524 return NULL_TREE;
10525 }
10526
10527 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10528 arguments, where NARGS <= 4. IGNORE is true if the result of the
10529 function call is ignored. This function returns NULL_TREE if no
10530 simplification was possible. Note that this only folds builtins with
10531 fixed argument patterns. Foldings that do varargs-to-varargs
10532 transformations, or that match calls with more than 4 arguments,
10533 need to be handled with fold_builtin_varargs instead. */
10534
10535 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10536
10537 static tree
10538 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10539 {
10540 tree ret = NULL_TREE;
10541
10542 switch (nargs)
10543 {
10544 case 0:
10545 ret = fold_builtin_0 (fndecl, ignore);
10546 break;
10547 case 1:
10548 ret = fold_builtin_1 (fndecl, args[0], ignore);
10549 break;
10550 case 2:
10551 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10552 break;
10553 case 3:
10554 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10555 break;
10556 case 4:
10557 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10558 ignore);
10559 break;
10560 default:
10561 break;
10562 }
10563 if (ret)
10564 {
10565 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10566 TREE_NO_WARNING (ret) = 1;
10567 return ret;
10568 }
10569 return NULL_TREE;
10570 }
10571
10572 /* Builtins with folding operations that operate on "..." arguments
10573 need special handling; we need to store the arguments in a convenient
10574 data structure before attempting any folding. Fortunately there are
10575 only a few builtins that fall into this category. FNDECL is the
10576 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10577 result of the function call is ignored. */
10578
10579 static tree
10580 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10581 {
10582 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10583 tree ret = NULL_TREE;
10584
10585 switch (fcode)
10586 {
10587 case BUILT_IN_SPRINTF_CHK:
10588 case BUILT_IN_VSPRINTF_CHK:
10589 ret = fold_builtin_sprintf_chk (exp, fcode);
10590 break;
10591
10592 case BUILT_IN_SNPRINTF_CHK:
10593 case BUILT_IN_VSNPRINTF_CHK:
10594 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10595 break;
10596
10597 case BUILT_IN_FPCLASSIFY:
10598 ret = fold_builtin_fpclassify (exp);
10599 break;
10600
10601 default:
10602 break;
10603 }
10604 if (ret)
10605 {
10606 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10607 TREE_NO_WARNING (ret) = 1;
10608 return ret;
10609 }
10610 return NULL_TREE;
10611 }
10612
10613 /* A wrapper function for builtin folding that prevents warnings for
10614 "statement without effect" and the like, caused by removing the
10615 call node earlier than the warning is generated. */
10616
10617 tree
10618 fold_call_expr (tree exp, bool ignore)
10619 {
10620 tree ret = NULL_TREE;
10621 tree fndecl = get_callee_fndecl (exp);
10622 if (fndecl
10623 && TREE_CODE (fndecl) == FUNCTION_DECL
10624 && DECL_BUILT_IN (fndecl)
10625 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10626 yet. Defer folding until we see all the arguments
10627 (after inlining). */
10628 && !CALL_EXPR_VA_ARG_PACK (exp))
10629 {
10630 int nargs = call_expr_nargs (exp);
10631
10632 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10633 instead last argument is __builtin_va_arg_pack (). Defer folding
10634 even in that case, until arguments are finalized. */
10635 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10636 {
10637 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10638 if (fndecl2
10639 && TREE_CODE (fndecl2) == FUNCTION_DECL
10640 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10641 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10642 return NULL_TREE;
10643 }
10644
10645 /* FIXME: Don't use a list in this interface. */
10646 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10647 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10648 else
10649 {
10650 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10651 {
10652 tree *args = CALL_EXPR_ARGP (exp);
10653 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10654 }
10655 if (!ret)
10656 ret = fold_builtin_varargs (fndecl, exp, ignore);
10657 if (ret)
10658 {
10659 /* Propagate location information from original call to
10660 expansion of builtin. Otherwise things like
10661 maybe_emit_chk_warning, that operate on the expansion
10662 of a builtin, will use the wrong location information. */
10663 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10664 {
10665 tree realret = ret;
10666 if (TREE_CODE (ret) == NOP_EXPR)
10667 realret = TREE_OPERAND (ret, 0);
10668 if (CAN_HAVE_LOCATION_P (realret)
10669 && !EXPR_HAS_LOCATION (realret))
10670 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10671 return realret;
10672 }
10673 return ret;
10674 }
10675 }
10676 }
10677 return NULL_TREE;
10678 }
10679
10680 /* Conveniently construct a function call expression. FNDECL names the
10681 function to be called and ARGLIST is a TREE_LIST of arguments. */
10682
10683 tree
10684 build_function_call_expr (tree fndecl, tree arglist)
10685 {
10686 tree fntype = TREE_TYPE (fndecl);
10687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10688 int n = list_length (arglist);
10689 tree *argarray = (tree *) alloca (n * sizeof (tree));
10690 int i;
10691
10692 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10693 argarray[i] = TREE_VALUE (arglist);
10694 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10695 }
10696
10697 /* Conveniently construct a function call expression. FNDECL names the
10698 function to be called, N is the number of arguments, and the "..."
10699 parameters are the argument expressions. */
10700
10701 tree
10702 build_call_expr (tree fndecl, int n, ...)
10703 {
10704 va_list ap;
10705 tree fntype = TREE_TYPE (fndecl);
10706 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10707 tree *argarray = (tree *) alloca (n * sizeof (tree));
10708 int i;
10709
10710 va_start (ap, n);
10711 for (i = 0; i < n; i++)
10712 argarray[i] = va_arg (ap, tree);
10713 va_end (ap);
10714 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10715 }
10716
10717 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10718 N arguments are passed in the array ARGARRAY. */
10719
10720 tree
10721 fold_builtin_call_array (tree type,
10722 tree fn,
10723 int n,
10724 tree *argarray)
10725 {
10726 tree ret = NULL_TREE;
10727 int i;
10728 tree exp;
10729
10730 if (TREE_CODE (fn) == ADDR_EXPR)
10731 {
10732 tree fndecl = TREE_OPERAND (fn, 0);
10733 if (TREE_CODE (fndecl) == FUNCTION_DECL
10734 && DECL_BUILT_IN (fndecl))
10735 {
10736 /* If last argument is __builtin_va_arg_pack (), arguments to this
10737 function are not finalized yet. Defer folding until they are. */
10738 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10739 {
10740 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10741 if (fndecl2
10742 && TREE_CODE (fndecl2) == FUNCTION_DECL
10743 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10744 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10745 return build_call_array (type, fn, n, argarray);
10746 }
10747 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10748 {
10749 tree arglist = NULL_TREE;
10750 for (i = n - 1; i >= 0; i--)
10751 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10752 ret = targetm.fold_builtin (fndecl, arglist, false);
10753 if (ret)
10754 return ret;
10755 }
10756 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10757 {
10758 /* First try the transformations that don't require consing up
10759 an exp. */
10760 ret = fold_builtin_n (fndecl, argarray, n, false);
10761 if (ret)
10762 return ret;
10763 }
10764
10765 /* If we got this far, we need to build an exp. */
10766 exp = build_call_array (type, fn, n, argarray);
10767 ret = fold_builtin_varargs (fndecl, exp, false);
10768 return ret ? ret : exp;
10769 }
10770 }
10771
10772 return build_call_array (type, fn, n, argarray);
10773 }
10774
10775 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10776 along with N new arguments specified as the "..." parameters. SKIP
10777 is the number of arguments in EXP to be omitted. This function is used
10778 to do varargs-to-varargs transformations. */
10779
10780 static tree
10781 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10782 {
10783 int oldnargs = call_expr_nargs (exp);
10784 int nargs = oldnargs - skip + n;
10785 tree fntype = TREE_TYPE (fndecl);
10786 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10787 tree *buffer;
10788
10789 if (n > 0)
10790 {
10791 int i, j;
10792 va_list ap;
10793
10794 buffer = alloca (nargs * sizeof (tree));
10795 va_start (ap, n);
10796 for (i = 0; i < n; i++)
10797 buffer[i] = va_arg (ap, tree);
10798 va_end (ap);
10799 for (j = skip; j < oldnargs; j++, i++)
10800 buffer[i] = CALL_EXPR_ARG (exp, j);
10801 }
10802 else
10803 buffer = CALL_EXPR_ARGP (exp) + skip;
10804
10805 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10806 }
10807
10808 /* Validate a single argument ARG against a tree code CODE representing
10809 a type. */
10810
10811 static bool
10812 validate_arg (const_tree arg, enum tree_code code)
10813 {
10814 if (!arg)
10815 return false;
10816 else if (code == POINTER_TYPE)
10817 return POINTER_TYPE_P (TREE_TYPE (arg));
10818 else if (code == INTEGER_TYPE)
10819 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10820 return code == TREE_CODE (TREE_TYPE (arg));
10821 }
10822
10823 /* This function validates the types of a function call argument list
10824 against a specified list of tree_codes. If the last specifier is a 0,
10825 that represents an ellipses, otherwise the last specifier must be a
10826 VOID_TYPE. */
10827
10828 bool
10829 validate_arglist (const_tree callexpr, ...)
10830 {
10831 enum tree_code code;
10832 bool res = 0;
10833 va_list ap;
10834 const_call_expr_arg_iterator iter;
10835 const_tree arg;
10836
10837 va_start (ap, callexpr);
10838 init_const_call_expr_arg_iterator (callexpr, &iter);
10839
10840 do
10841 {
10842 code = va_arg (ap, enum tree_code);
10843 switch (code)
10844 {
10845 case 0:
10846 /* This signifies an ellipses, any further arguments are all ok. */
10847 res = true;
10848 goto end;
10849 case VOID_TYPE:
10850 /* This signifies an endlink, if no arguments remain, return
10851 true, otherwise return false. */
10852 res = !more_const_call_expr_args_p (&iter);
10853 goto end;
10854 default:
10855 /* If no parameters remain or the parameter's code does not
10856 match the specified code, return false. Otherwise continue
10857 checking any remaining arguments. */
10858 arg = next_const_call_expr_arg (&iter);
10859 if (!validate_arg (arg, code))
10860 goto end;
10861 break;
10862 }
10863 }
10864 while (1);
10865
10866 /* We need gotos here since we can only have one VA_CLOSE in a
10867 function. */
10868 end: ;
10869 va_end (ap);
10870
10871 return res;
10872 }
10873
10874 /* Default target-specific builtin expander that does nothing. */
10875
10876 rtx
10877 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10878 rtx target ATTRIBUTE_UNUSED,
10879 rtx subtarget ATTRIBUTE_UNUSED,
10880 enum machine_mode mode ATTRIBUTE_UNUSED,
10881 int ignore ATTRIBUTE_UNUSED)
10882 {
10883 return NULL_RTX;
10884 }
10885
10886 /* Returns true is EXP represents data that would potentially reside
10887 in a readonly section. */
10888
10889 static bool
10890 readonly_data_expr (tree exp)
10891 {
10892 STRIP_NOPS (exp);
10893
10894 if (TREE_CODE (exp) != ADDR_EXPR)
10895 return false;
10896
10897 exp = get_base_address (TREE_OPERAND (exp, 0));
10898 if (!exp)
10899 return false;
10900
10901 /* Make sure we call decl_readonly_section only for trees it
10902 can handle (since it returns true for everything it doesn't
10903 understand). */
10904 if (TREE_CODE (exp) == STRING_CST
10905 || TREE_CODE (exp) == CONSTRUCTOR
10906 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10907 return decl_readonly_section (exp, 0);
10908 else
10909 return false;
10910 }
10911
10912 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10913 to the call, and TYPE is its return type.
10914
10915 Return NULL_TREE if no simplification was possible, otherwise return the
10916 simplified form of the call as a tree.
10917
10918 The simplified form may be a constant or other expression which
10919 computes the same value, but in a more efficient manner (including
10920 calls to other builtin functions).
10921
10922 The call may contain arguments which need to be evaluated, but
10923 which are not useful to determine the result of the call. In
10924 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10925 COMPOUND_EXPR will be an argument which must be evaluated.
10926 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10927 COMPOUND_EXPR in the chain will contain the tree for the simplified
10928 form of the builtin function call. */
10929
10930 static tree
10931 fold_builtin_strstr (tree s1, tree s2, tree type)
10932 {
10933 if (!validate_arg (s1, POINTER_TYPE)
10934 || !validate_arg (s2, POINTER_TYPE))
10935 return NULL_TREE;
10936 else
10937 {
10938 tree fn;
10939 const char *p1, *p2;
10940
10941 p2 = c_getstr (s2);
10942 if (p2 == NULL)
10943 return NULL_TREE;
10944
10945 p1 = c_getstr (s1);
10946 if (p1 != NULL)
10947 {
10948 const char *r = strstr (p1, p2);
10949 tree tem;
10950
10951 if (r == NULL)
10952 return build_int_cst (TREE_TYPE (s1), 0);
10953
10954 /* Return an offset into the constant string argument. */
10955 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10956 s1, size_int (r - p1));
10957 return fold_convert (type, tem);
10958 }
10959
10960 /* The argument is const char *, and the result is char *, so we need
10961 a type conversion here to avoid a warning. */
10962 if (p2[0] == '\0')
10963 return fold_convert (type, s1);
10964
10965 if (p2[1] != '\0')
10966 return NULL_TREE;
10967
10968 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10969 if (!fn)
10970 return NULL_TREE;
10971
10972 /* New argument list transforming strstr(s1, s2) to
10973 strchr(s1, s2[0]). */
10974 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10975 }
10976 }
10977
10978 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10979 the call, and TYPE is its return type.
10980
10981 Return NULL_TREE if no simplification was possible, otherwise return the
10982 simplified form of the call as a tree.
10983
10984 The simplified form may be a constant or other expression which
10985 computes the same value, but in a more efficient manner (including
10986 calls to other builtin functions).
10987
10988 The call may contain arguments which need to be evaluated, but
10989 which are not useful to determine the result of the call. In
10990 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10991 COMPOUND_EXPR will be an argument which must be evaluated.
10992 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10993 COMPOUND_EXPR in the chain will contain the tree for the simplified
10994 form of the builtin function call. */
10995
10996 static tree
10997 fold_builtin_strchr (tree s1, tree s2, tree type)
10998 {
10999 if (!validate_arg (s1, POINTER_TYPE)
11000 || !validate_arg (s2, INTEGER_TYPE))
11001 return NULL_TREE;
11002 else
11003 {
11004 const char *p1;
11005
11006 if (TREE_CODE (s2) != INTEGER_CST)
11007 return NULL_TREE;
11008
11009 p1 = c_getstr (s1);
11010 if (p1 != NULL)
11011 {
11012 char c;
11013 const char *r;
11014 tree tem;
11015
11016 if (target_char_cast (s2, &c))
11017 return NULL_TREE;
11018
11019 r = strchr (p1, c);
11020
11021 if (r == NULL)
11022 return build_int_cst (TREE_TYPE (s1), 0);
11023
11024 /* Return an offset into the constant string argument. */
11025 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11026 s1, size_int (r - p1));
11027 return fold_convert (type, tem);
11028 }
11029 return NULL_TREE;
11030 }
11031 }
11032
11033 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11034 the call, and TYPE is its return type.
11035
11036 Return NULL_TREE if no simplification was possible, otherwise return the
11037 simplified form of the call as a tree.
11038
11039 The simplified form may be a constant or other expression which
11040 computes the same value, but in a more efficient manner (including
11041 calls to other builtin functions).
11042
11043 The call may contain arguments which need to be evaluated, but
11044 which are not useful to determine the result of the call. In
11045 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11046 COMPOUND_EXPR will be an argument which must be evaluated.
11047 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11048 COMPOUND_EXPR in the chain will contain the tree for the simplified
11049 form of the builtin function call. */
11050
11051 static tree
11052 fold_builtin_strrchr (tree s1, tree s2, tree type)
11053 {
11054 if (!validate_arg (s1, POINTER_TYPE)
11055 || !validate_arg (s2, INTEGER_TYPE))
11056 return NULL_TREE;
11057 else
11058 {
11059 tree fn;
11060 const char *p1;
11061
11062 if (TREE_CODE (s2) != INTEGER_CST)
11063 return NULL_TREE;
11064
11065 p1 = c_getstr (s1);
11066 if (p1 != NULL)
11067 {
11068 char c;
11069 const char *r;
11070 tree tem;
11071
11072 if (target_char_cast (s2, &c))
11073 return NULL_TREE;
11074
11075 r = strrchr (p1, c);
11076
11077 if (r == NULL)
11078 return build_int_cst (TREE_TYPE (s1), 0);
11079
11080 /* Return an offset into the constant string argument. */
11081 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11082 s1, size_int (r - p1));
11083 return fold_convert (type, tem);
11084 }
11085
11086 if (! integer_zerop (s2))
11087 return NULL_TREE;
11088
11089 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11090 if (!fn)
11091 return NULL_TREE;
11092
11093 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11094 return build_call_expr (fn, 2, s1, s2);
11095 }
11096 }
11097
11098 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11099 to the call, and TYPE is its return type.
11100
11101 Return NULL_TREE if no simplification was possible, otherwise return the
11102 simplified form of the call as a tree.
11103
11104 The simplified form may be a constant or other expression which
11105 computes the same value, but in a more efficient manner (including
11106 calls to other builtin functions).
11107
11108 The call may contain arguments which need to be evaluated, but
11109 which are not useful to determine the result of the call. In
11110 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11111 COMPOUND_EXPR will be an argument which must be evaluated.
11112 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11113 COMPOUND_EXPR in the chain will contain the tree for the simplified
11114 form of the builtin function call. */
11115
11116 static tree
11117 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11118 {
11119 if (!validate_arg (s1, POINTER_TYPE)
11120 || !validate_arg (s2, POINTER_TYPE))
11121 return NULL_TREE;
11122 else
11123 {
11124 tree fn;
11125 const char *p1, *p2;
11126
11127 p2 = c_getstr (s2);
11128 if (p2 == NULL)
11129 return NULL_TREE;
11130
11131 p1 = c_getstr (s1);
11132 if (p1 != NULL)
11133 {
11134 const char *r = strpbrk (p1, p2);
11135 tree tem;
11136
11137 if (r == NULL)
11138 return build_int_cst (TREE_TYPE (s1), 0);
11139
11140 /* Return an offset into the constant string argument. */
11141 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11142 s1, size_int (r - p1));
11143 return fold_convert (type, tem);
11144 }
11145
11146 if (p2[0] == '\0')
11147 /* strpbrk(x, "") == NULL.
11148 Evaluate and ignore s1 in case it had side-effects. */
11149 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11150
11151 if (p2[1] != '\0')
11152 return NULL_TREE; /* Really call strpbrk. */
11153
11154 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11155 if (!fn)
11156 return NULL_TREE;
11157
11158 /* New argument list transforming strpbrk(s1, s2) to
11159 strchr(s1, s2[0]). */
11160 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11161 }
11162 }
11163
11164 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11165 to the call.
11166
11167 Return NULL_TREE if no simplification was possible, otherwise return the
11168 simplified form of the call as a tree.
11169
11170 The simplified form may be a constant or other expression which
11171 computes the same value, but in a more efficient manner (including
11172 calls to other builtin functions).
11173
11174 The call may contain arguments which need to be evaluated, but
11175 which are not useful to determine the result of the call. In
11176 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11177 COMPOUND_EXPR will be an argument which must be evaluated.
11178 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11179 COMPOUND_EXPR in the chain will contain the tree for the simplified
11180 form of the builtin function call. */
11181
11182 static tree
11183 fold_builtin_strcat (tree dst, tree src)
11184 {
11185 if (!validate_arg (dst, POINTER_TYPE)
11186 || !validate_arg (src, POINTER_TYPE))
11187 return NULL_TREE;
11188 else
11189 {
11190 const char *p = c_getstr (src);
11191
11192 /* If the string length is zero, return the dst parameter. */
11193 if (p && *p == '\0')
11194 return dst;
11195
11196 return NULL_TREE;
11197 }
11198 }
11199
11200 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11201 arguments to the call.
11202
11203 Return NULL_TREE if no simplification was possible, otherwise return the
11204 simplified form of the call as a tree.
11205
11206 The simplified form may be a constant or other expression which
11207 computes the same value, but in a more efficient manner (including
11208 calls to other builtin functions).
11209
11210 The call may contain arguments which need to be evaluated, but
11211 which are not useful to determine the result of the call. In
11212 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11213 COMPOUND_EXPR will be an argument which must be evaluated.
11214 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11215 COMPOUND_EXPR in the chain will contain the tree for the simplified
11216 form of the builtin function call. */
11217
11218 static tree
11219 fold_builtin_strncat (tree dst, tree src, tree len)
11220 {
11221 if (!validate_arg (dst, POINTER_TYPE)
11222 || !validate_arg (src, POINTER_TYPE)
11223 || !validate_arg (len, INTEGER_TYPE))
11224 return NULL_TREE;
11225 else
11226 {
11227 const char *p = c_getstr (src);
11228
11229 /* If the requested length is zero, or the src parameter string
11230 length is zero, return the dst parameter. */
11231 if (integer_zerop (len) || (p && *p == '\0'))
11232 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11233
11234 /* If the requested len is greater than or equal to the string
11235 length, call strcat. */
11236 if (TREE_CODE (len) == INTEGER_CST && p
11237 && compare_tree_int (len, strlen (p)) >= 0)
11238 {
11239 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11240
11241 /* If the replacement _DECL isn't initialized, don't do the
11242 transformation. */
11243 if (!fn)
11244 return NULL_TREE;
11245
11246 return build_call_expr (fn, 2, dst, src);
11247 }
11248 return NULL_TREE;
11249 }
11250 }
11251
11252 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11253 to the call.
11254
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11257
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11261
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11269
11270 static tree
11271 fold_builtin_strspn (tree s1, tree s2)
11272 {
11273 if (!validate_arg (s1, POINTER_TYPE)
11274 || !validate_arg (s2, POINTER_TYPE))
11275 return NULL_TREE;
11276 else
11277 {
11278 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11279
11280 /* If both arguments are constants, evaluate at compile-time. */
11281 if (p1 && p2)
11282 {
11283 const size_t r = strspn (p1, p2);
11284 return size_int (r);
11285 }
11286
11287 /* If either argument is "", return NULL_TREE. */
11288 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11289 /* Evaluate and ignore both arguments in case either one has
11290 side-effects. */
11291 return omit_two_operands (integer_type_node, integer_zero_node,
11292 s1, s2);
11293 return NULL_TREE;
11294 }
11295 }
11296
11297 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11298 to the call.
11299
11300 Return NULL_TREE if no simplification was possible, otherwise return the
11301 simplified form of the call as a tree.
11302
11303 The simplified form may be a constant or other expression which
11304 computes the same value, but in a more efficient manner (including
11305 calls to other builtin functions).
11306
11307 The call may contain arguments which need to be evaluated, but
11308 which are not useful to determine the result of the call. In
11309 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11310 COMPOUND_EXPR will be an argument which must be evaluated.
11311 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11312 COMPOUND_EXPR in the chain will contain the tree for the simplified
11313 form of the builtin function call. */
11314
11315 static tree
11316 fold_builtin_strcspn (tree s1, tree s2)
11317 {
11318 if (!validate_arg (s1, POINTER_TYPE)
11319 || !validate_arg (s2, POINTER_TYPE))
11320 return NULL_TREE;
11321 else
11322 {
11323 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11324
11325 /* If both arguments are constants, evaluate at compile-time. */
11326 if (p1 && p2)
11327 {
11328 const size_t r = strcspn (p1, p2);
11329 return size_int (r);
11330 }
11331
11332 /* If the first argument is "", return NULL_TREE. */
11333 if (p1 && *p1 == '\0')
11334 {
11335 /* Evaluate and ignore argument s2 in case it has
11336 side-effects. */
11337 return omit_one_operand (integer_type_node,
11338 integer_zero_node, s2);
11339 }
11340
11341 /* If the second argument is "", return __builtin_strlen(s1). */
11342 if (p2 && *p2 == '\0')
11343 {
11344 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11345
11346 /* If the replacement _DECL isn't initialized, don't do the
11347 transformation. */
11348 if (!fn)
11349 return NULL_TREE;
11350
11351 return build_call_expr (fn, 1, s1);
11352 }
11353 return NULL_TREE;
11354 }
11355 }
11356
11357 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11358 to the call. IGNORE is true if the value returned
11359 by the builtin will be ignored. UNLOCKED is true is true if this
11360 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11361 the known length of the string. Return NULL_TREE if no simplification
11362 was possible. */
11363
11364 tree
11365 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11366 {
11367 /* If we're using an unlocked function, assume the other unlocked
11368 functions exist explicitly. */
11369 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11370 : implicit_built_in_decls[BUILT_IN_FPUTC];
11371 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11372 : implicit_built_in_decls[BUILT_IN_FWRITE];
11373
11374 /* If the return value is used, don't do the transformation. */
11375 if (!ignore)
11376 return NULL_TREE;
11377
11378 /* Verify the arguments in the original call. */
11379 if (!validate_arg (arg0, POINTER_TYPE)
11380 || !validate_arg (arg1, POINTER_TYPE))
11381 return NULL_TREE;
11382
11383 if (! len)
11384 len = c_strlen (arg0, 0);
11385
11386 /* Get the length of the string passed to fputs. If the length
11387 can't be determined, punt. */
11388 if (!len
11389 || TREE_CODE (len) != INTEGER_CST)
11390 return NULL_TREE;
11391
11392 switch (compare_tree_int (len, 1))
11393 {
11394 case -1: /* length is 0, delete the call entirely . */
11395 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11396
11397 case 0: /* length is 1, call fputc. */
11398 {
11399 const char *p = c_getstr (arg0);
11400
11401 if (p != NULL)
11402 {
11403 if (fn_fputc)
11404 return build_call_expr (fn_fputc, 2,
11405 build_int_cst (NULL_TREE, p[0]), arg1);
11406 else
11407 return NULL_TREE;
11408 }
11409 }
11410 /* FALLTHROUGH */
11411 case 1: /* length is greater than 1, call fwrite. */
11412 {
11413 /* If optimizing for size keep fputs. */
11414 if (optimize_size)
11415 return NULL_TREE;
11416 /* New argument list transforming fputs(string, stream) to
11417 fwrite(string, 1, len, stream). */
11418 if (fn_fwrite)
11419 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11420 else
11421 return NULL_TREE;
11422 }
11423 default:
11424 gcc_unreachable ();
11425 }
11426 return NULL_TREE;
11427 }
11428
11429 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11430 produced. False otherwise. This is done so that we don't output the error
11431 or warning twice or three times. */
11432 bool
11433 fold_builtin_next_arg (tree exp, bool va_start_p)
11434 {
11435 tree fntype = TREE_TYPE (current_function_decl);
11436 int nargs = call_expr_nargs (exp);
11437 tree arg;
11438
11439 if (TYPE_ARG_TYPES (fntype) == 0
11440 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11441 == void_type_node))
11442 {
11443 error ("%<va_start%> used in function with fixed args");
11444 return true;
11445 }
11446
11447 if (va_start_p)
11448 {
11449 if (va_start_p && (nargs != 2))
11450 {
11451 error ("wrong number of arguments to function %<va_start%>");
11452 return true;
11453 }
11454 arg = CALL_EXPR_ARG (exp, 1);
11455 }
11456 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11457 when we checked the arguments and if needed issued a warning. */
11458 else
11459 {
11460 if (nargs == 0)
11461 {
11462 /* Evidently an out of date version of <stdarg.h>; can't validate
11463 va_start's second argument, but can still work as intended. */
11464 warning (0, "%<__builtin_next_arg%> called without an argument");
11465 return true;
11466 }
11467 else if (nargs > 1)
11468 {
11469 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11470 return true;
11471 }
11472 arg = CALL_EXPR_ARG (exp, 0);
11473 }
11474
11475 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11476 or __builtin_next_arg (0) the first time we see it, after checking
11477 the arguments and if needed issuing a warning. */
11478 if (!integer_zerop (arg))
11479 {
11480 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11481
11482 /* Strip off all nops for the sake of the comparison. This
11483 is not quite the same as STRIP_NOPS. It does more.
11484 We must also strip off INDIRECT_EXPR for C++ reference
11485 parameters. */
11486 while (CONVERT_EXPR_P (arg)
11487 || TREE_CODE (arg) == INDIRECT_REF)
11488 arg = TREE_OPERAND (arg, 0);
11489 if (arg != last_parm)
11490 {
11491 /* FIXME: Sometimes with the tree optimizers we can get the
11492 not the last argument even though the user used the last
11493 argument. We just warn and set the arg to be the last
11494 argument so that we will get wrong-code because of
11495 it. */
11496 warning (0, "second parameter of %<va_start%> not last named argument");
11497 }
11498 /* We want to verify the second parameter just once before the tree
11499 optimizers are run and then avoid keeping it in the tree,
11500 as otherwise we could warn even for correct code like:
11501 void foo (int i, ...)
11502 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11503 if (va_start_p)
11504 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11505 else
11506 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11507 }
11508 return false;
11509 }
11510
11511
11512 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11513 ORIG may be null if this is a 2-argument call. We don't attempt to
11514 simplify calls with more than 3 arguments.
11515
11516 Return NULL_TREE if no simplification was possible, otherwise return the
11517 simplified form of the call as a tree. If IGNORED is true, it means that
11518 the caller does not use the returned value of the function. */
11519
11520 static tree
11521 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11522 {
11523 tree call, retval;
11524 const char *fmt_str = NULL;
11525
11526 /* Verify the required arguments in the original call. We deal with two
11527 types of sprintf() calls: 'sprintf (str, fmt)' and
11528 'sprintf (dest, "%s", orig)'. */
11529 if (!validate_arg (dest, POINTER_TYPE)
11530 || !validate_arg (fmt, POINTER_TYPE))
11531 return NULL_TREE;
11532 if (orig && !validate_arg (orig, POINTER_TYPE))
11533 return NULL_TREE;
11534
11535 /* Check whether the format is a literal string constant. */
11536 fmt_str = c_getstr (fmt);
11537 if (fmt_str == NULL)
11538 return NULL_TREE;
11539
11540 call = NULL_TREE;
11541 retval = NULL_TREE;
11542
11543 if (!init_target_chars ())
11544 return NULL_TREE;
11545
11546 /* If the format doesn't contain % args or %%, use strcpy. */
11547 if (strchr (fmt_str, target_percent) == NULL)
11548 {
11549 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11550
11551 if (!fn)
11552 return NULL_TREE;
11553
11554 /* Don't optimize sprintf (buf, "abc", ptr++). */
11555 if (orig)
11556 return NULL_TREE;
11557
11558 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11559 'format' is known to contain no % formats. */
11560 call = build_call_expr (fn, 2, dest, fmt);
11561 if (!ignored)
11562 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11563 }
11564
11565 /* If the format is "%s", use strcpy if the result isn't used. */
11566 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11567 {
11568 tree fn;
11569 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11570
11571 if (!fn)
11572 return NULL_TREE;
11573
11574 /* Don't crash on sprintf (str1, "%s"). */
11575 if (!orig)
11576 return NULL_TREE;
11577
11578 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11579 if (!ignored)
11580 {
11581 retval = c_strlen (orig, 1);
11582 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11583 return NULL_TREE;
11584 }
11585 call = build_call_expr (fn, 2, dest, orig);
11586 }
11587
11588 if (call && retval)
11589 {
11590 retval = fold_convert
11591 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11592 retval);
11593 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11594 }
11595 else
11596 return call;
11597 }
11598
11599 /* Expand a call EXP to __builtin_object_size. */
11600
11601 rtx
11602 expand_builtin_object_size (tree exp)
11603 {
11604 tree ost;
11605 int object_size_type;
11606 tree fndecl = get_callee_fndecl (exp);
11607
11608 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11609 {
11610 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11611 exp, fndecl);
11612 expand_builtin_trap ();
11613 return const0_rtx;
11614 }
11615
11616 ost = CALL_EXPR_ARG (exp, 1);
11617 STRIP_NOPS (ost);
11618
11619 if (TREE_CODE (ost) != INTEGER_CST
11620 || tree_int_cst_sgn (ost) < 0
11621 || compare_tree_int (ost, 3) > 0)
11622 {
11623 error ("%Klast argument of %D is not integer constant between 0 and 3",
11624 exp, fndecl);
11625 expand_builtin_trap ();
11626 return const0_rtx;
11627 }
11628
11629 object_size_type = tree_low_cst (ost, 0);
11630
11631 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11632 }
11633
11634 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11635 FCODE is the BUILT_IN_* to use.
11636 Return NULL_RTX if we failed; the caller should emit a normal call,
11637 otherwise try to get the result in TARGET, if convenient (and in
11638 mode MODE if that's convenient). */
11639
11640 static rtx
11641 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11642 enum built_in_function fcode)
11643 {
11644 tree dest, src, len, size;
11645
11646 if (!validate_arglist (exp,
11647 POINTER_TYPE,
11648 fcode == BUILT_IN_MEMSET_CHK
11649 ? INTEGER_TYPE : POINTER_TYPE,
11650 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11651 return NULL_RTX;
11652
11653 dest = CALL_EXPR_ARG (exp, 0);
11654 src = CALL_EXPR_ARG (exp, 1);
11655 len = CALL_EXPR_ARG (exp, 2);
11656 size = CALL_EXPR_ARG (exp, 3);
11657
11658 if (! host_integerp (size, 1))
11659 return NULL_RTX;
11660
11661 if (host_integerp (len, 1) || integer_all_onesp (size))
11662 {
11663 tree fn;
11664
11665 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11666 {
11667 warning (0, "%Kcall to %D will always overflow destination buffer",
11668 exp, get_callee_fndecl (exp));
11669 return NULL_RTX;
11670 }
11671
11672 fn = NULL_TREE;
11673 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11674 mem{cpy,pcpy,move,set} is available. */
11675 switch (fcode)
11676 {
11677 case BUILT_IN_MEMCPY_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMCPY];
11679 break;
11680 case BUILT_IN_MEMPCPY_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMPCPY];
11682 break;
11683 case BUILT_IN_MEMMOVE_CHK:
11684 fn = built_in_decls[BUILT_IN_MEMMOVE];
11685 break;
11686 case BUILT_IN_MEMSET_CHK:
11687 fn = built_in_decls[BUILT_IN_MEMSET];
11688 break;
11689 default:
11690 break;
11691 }
11692
11693 if (! fn)
11694 return NULL_RTX;
11695
11696 fn = build_call_expr (fn, 3, dest, src, len);
11697 STRIP_TYPE_NOPS (fn);
11698 while (TREE_CODE (fn) == COMPOUND_EXPR)
11699 {
11700 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11701 EXPAND_NORMAL);
11702 fn = TREE_OPERAND (fn, 1);
11703 }
11704 if (TREE_CODE (fn) == CALL_EXPR)
11705 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11706 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11707 }
11708 else if (fcode == BUILT_IN_MEMSET_CHK)
11709 return NULL_RTX;
11710 else
11711 {
11712 unsigned int dest_align
11713 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11714
11715 /* If DEST is not a pointer type, call the normal function. */
11716 if (dest_align == 0)
11717 return NULL_RTX;
11718
11719 /* If SRC and DEST are the same (and not volatile), do nothing. */
11720 if (operand_equal_p (src, dest, 0))
11721 {
11722 tree expr;
11723
11724 if (fcode != BUILT_IN_MEMPCPY_CHK)
11725 {
11726 /* Evaluate and ignore LEN in case it has side-effects. */
11727 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11728 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11729 }
11730
11731 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11732 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11733 }
11734
11735 /* __memmove_chk special case. */
11736 if (fcode == BUILT_IN_MEMMOVE_CHK)
11737 {
11738 unsigned int src_align
11739 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11740
11741 if (src_align == 0)
11742 return NULL_RTX;
11743
11744 /* If src is categorized for a readonly section we can use
11745 normal __memcpy_chk. */
11746 if (readonly_data_expr (src))
11747 {
11748 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11749 if (!fn)
11750 return NULL_RTX;
11751 fn = build_call_expr (fn, 4, dest, src, len, size);
11752 STRIP_TYPE_NOPS (fn);
11753 while (TREE_CODE (fn) == COMPOUND_EXPR)
11754 {
11755 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11756 EXPAND_NORMAL);
11757 fn = TREE_OPERAND (fn, 1);
11758 }
11759 if (TREE_CODE (fn) == CALL_EXPR)
11760 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11761 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11762 }
11763 }
11764 return NULL_RTX;
11765 }
11766 }
11767
11768 /* Emit warning if a buffer overflow is detected at compile time. */
11769
11770 static void
11771 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11772 {
11773 int is_strlen = 0;
11774 tree len, size;
11775
11776 switch (fcode)
11777 {
11778 case BUILT_IN_STRCPY_CHK:
11779 case BUILT_IN_STPCPY_CHK:
11780 /* For __strcat_chk the warning will be emitted only if overflowing
11781 by at least strlen (dest) + 1 bytes. */
11782 case BUILT_IN_STRCAT_CHK:
11783 len = CALL_EXPR_ARG (exp, 1);
11784 size = CALL_EXPR_ARG (exp, 2);
11785 is_strlen = 1;
11786 break;
11787 case BUILT_IN_STRNCAT_CHK:
11788 case BUILT_IN_STRNCPY_CHK:
11789 len = CALL_EXPR_ARG (exp, 2);
11790 size = CALL_EXPR_ARG (exp, 3);
11791 break;
11792 case BUILT_IN_SNPRINTF_CHK:
11793 case BUILT_IN_VSNPRINTF_CHK:
11794 len = CALL_EXPR_ARG (exp, 1);
11795 size = CALL_EXPR_ARG (exp, 3);
11796 break;
11797 default:
11798 gcc_unreachable ();
11799 }
11800
11801 if (!len || !size)
11802 return;
11803
11804 if (! host_integerp (size, 1) || integer_all_onesp (size))
11805 return;
11806
11807 if (is_strlen)
11808 {
11809 len = c_strlen (len, 1);
11810 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11811 return;
11812 }
11813 else if (fcode == BUILT_IN_STRNCAT_CHK)
11814 {
11815 tree src = CALL_EXPR_ARG (exp, 1);
11816 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11817 return;
11818 src = c_strlen (src, 1);
11819 if (! src || ! host_integerp (src, 1))
11820 {
11821 warning (0, "%Kcall to %D might overflow destination buffer",
11822 exp, get_callee_fndecl (exp));
11823 return;
11824 }
11825 else if (tree_int_cst_lt (src, size))
11826 return;
11827 }
11828 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11829 return;
11830
11831 warning (0, "%Kcall to %D will always overflow destination buffer",
11832 exp, get_callee_fndecl (exp));
11833 }
11834
11835 /* Emit warning if a buffer overflow is detected at compile time
11836 in __sprintf_chk/__vsprintf_chk calls. */
11837
11838 static void
11839 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11840 {
11841 tree dest, size, len, fmt, flag;
11842 const char *fmt_str;
11843 int nargs = call_expr_nargs (exp);
11844
11845 /* Verify the required arguments in the original call. */
11846
11847 if (nargs < 4)
11848 return;
11849 dest = CALL_EXPR_ARG (exp, 0);
11850 flag = CALL_EXPR_ARG (exp, 1);
11851 size = CALL_EXPR_ARG (exp, 2);
11852 fmt = CALL_EXPR_ARG (exp, 3);
11853
11854 if (! host_integerp (size, 1) || integer_all_onesp (size))
11855 return;
11856
11857 /* Check whether the format is a literal string constant. */
11858 fmt_str = c_getstr (fmt);
11859 if (fmt_str == NULL)
11860 return;
11861
11862 if (!init_target_chars ())
11863 return;
11864
11865 /* If the format doesn't contain % args or %%, we know its size. */
11866 if (strchr (fmt_str, target_percent) == 0)
11867 len = build_int_cstu (size_type_node, strlen (fmt_str));
11868 /* If the format is "%s" and first ... argument is a string literal,
11869 we know it too. */
11870 else if (fcode == BUILT_IN_SPRINTF_CHK
11871 && strcmp (fmt_str, target_percent_s) == 0)
11872 {
11873 tree arg;
11874
11875 if (nargs < 5)
11876 return;
11877 arg = CALL_EXPR_ARG (exp, 4);
11878 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11879 return;
11880
11881 len = c_strlen (arg, 1);
11882 if (!len || ! host_integerp (len, 1))
11883 return;
11884 }
11885 else
11886 return;
11887
11888 if (! tree_int_cst_lt (len, size))
11889 {
11890 warning (0, "%Kcall to %D will always overflow destination buffer",
11891 exp, get_callee_fndecl (exp));
11892 }
11893 }
11894
11895 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11896 if possible. */
11897
11898 tree
11899 fold_builtin_object_size (tree ptr, tree ost)
11900 {
11901 tree ret = NULL_TREE;
11902 int object_size_type;
11903
11904 if (!validate_arg (ptr, POINTER_TYPE)
11905 || !validate_arg (ost, INTEGER_TYPE))
11906 return NULL_TREE;
11907
11908 STRIP_NOPS (ost);
11909
11910 if (TREE_CODE (ost) != INTEGER_CST
11911 || tree_int_cst_sgn (ost) < 0
11912 || compare_tree_int (ost, 3) > 0)
11913 return NULL_TREE;
11914
11915 object_size_type = tree_low_cst (ost, 0);
11916
11917 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11918 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11919 and (size_t) 0 for types 2 and 3. */
11920 if (TREE_SIDE_EFFECTS (ptr))
11921 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11922
11923 if (TREE_CODE (ptr) == ADDR_EXPR)
11924 ret = build_int_cstu (size_type_node,
11925 compute_builtin_object_size (ptr, object_size_type));
11926
11927 else if (TREE_CODE (ptr) == SSA_NAME)
11928 {
11929 unsigned HOST_WIDE_INT bytes;
11930
11931 /* If object size is not known yet, delay folding until
11932 later. Maybe subsequent passes will help determining
11933 it. */
11934 bytes = compute_builtin_object_size (ptr, object_size_type);
11935 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11936 ? -1 : 0))
11937 ret = build_int_cstu (size_type_node, bytes);
11938 }
11939
11940 if (ret)
11941 {
11942 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11943 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11944 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11945 ret = NULL_TREE;
11946 }
11947
11948 return ret;
11949 }
11950
11951 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11952 DEST, SRC, LEN, and SIZE are the arguments to the call.
11953 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11954 code of the builtin. If MAXLEN is not NULL, it is maximum length
11955 passed as third argument. */
11956
11957 tree
11958 fold_builtin_memory_chk (tree fndecl,
11959 tree dest, tree src, tree len, tree size,
11960 tree maxlen, bool ignore,
11961 enum built_in_function fcode)
11962 {
11963 tree fn;
11964
11965 if (!validate_arg (dest, POINTER_TYPE)
11966 || !validate_arg (src,
11967 (fcode == BUILT_IN_MEMSET_CHK
11968 ? INTEGER_TYPE : POINTER_TYPE))
11969 || !validate_arg (len, INTEGER_TYPE)
11970 || !validate_arg (size, INTEGER_TYPE))
11971 return NULL_TREE;
11972
11973 /* If SRC and DEST are the same (and not volatile), return DEST
11974 (resp. DEST+LEN for __mempcpy_chk). */
11975 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11976 {
11977 if (fcode != BUILT_IN_MEMPCPY_CHK)
11978 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11979 else
11980 {
11981 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11982 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11983 }
11984 }
11985
11986 if (! host_integerp (size, 1))
11987 return NULL_TREE;
11988
11989 if (! integer_all_onesp (size))
11990 {
11991 if (! host_integerp (len, 1))
11992 {
11993 /* If LEN is not constant, try MAXLEN too.
11994 For MAXLEN only allow optimizing into non-_ocs function
11995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11996 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11997 {
11998 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11999 {
12000 /* (void) __mempcpy_chk () can be optimized into
12001 (void) __memcpy_chk (). */
12002 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12003 if (!fn)
12004 return NULL_TREE;
12005
12006 return build_call_expr (fn, 4, dest, src, len, size);
12007 }
12008 return NULL_TREE;
12009 }
12010 }
12011 else
12012 maxlen = len;
12013
12014 if (tree_int_cst_lt (size, maxlen))
12015 return NULL_TREE;
12016 }
12017
12018 fn = NULL_TREE;
12019 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12020 mem{cpy,pcpy,move,set} is available. */
12021 switch (fcode)
12022 {
12023 case BUILT_IN_MEMCPY_CHK:
12024 fn = built_in_decls[BUILT_IN_MEMCPY];
12025 break;
12026 case BUILT_IN_MEMPCPY_CHK:
12027 fn = built_in_decls[BUILT_IN_MEMPCPY];
12028 break;
12029 case BUILT_IN_MEMMOVE_CHK:
12030 fn = built_in_decls[BUILT_IN_MEMMOVE];
12031 break;
12032 case BUILT_IN_MEMSET_CHK:
12033 fn = built_in_decls[BUILT_IN_MEMSET];
12034 break;
12035 default:
12036 break;
12037 }
12038
12039 if (!fn)
12040 return NULL_TREE;
12041
12042 return build_call_expr (fn, 3, dest, src, len);
12043 }
12044
12045 /* Fold a call to the __st[rp]cpy_chk builtin.
12046 DEST, SRC, and SIZE are the arguments to the call.
12047 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12048 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12049 strings passed as second argument. */
12050
12051 tree
12052 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12053 tree maxlen, bool ignore,
12054 enum built_in_function fcode)
12055 {
12056 tree len, fn;
12057
12058 if (!validate_arg (dest, POINTER_TYPE)
12059 || !validate_arg (src, POINTER_TYPE)
12060 || !validate_arg (size, INTEGER_TYPE))
12061 return NULL_TREE;
12062
12063 /* If SRC and DEST are the same (and not volatile), return DEST. */
12064 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12065 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12066
12067 if (! host_integerp (size, 1))
12068 return NULL_TREE;
12069
12070 if (! integer_all_onesp (size))
12071 {
12072 len = c_strlen (src, 1);
12073 if (! len || ! host_integerp (len, 1))
12074 {
12075 /* If LEN is not constant, try MAXLEN too.
12076 For MAXLEN only allow optimizing into non-_ocs function
12077 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12078 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12079 {
12080 if (fcode == BUILT_IN_STPCPY_CHK)
12081 {
12082 if (! ignore)
12083 return NULL_TREE;
12084
12085 /* If return value of __stpcpy_chk is ignored,
12086 optimize into __strcpy_chk. */
12087 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12088 if (!fn)
12089 return NULL_TREE;
12090
12091 return build_call_expr (fn, 3, dest, src, size);
12092 }
12093
12094 if (! len || TREE_SIDE_EFFECTS (len))
12095 return NULL_TREE;
12096
12097 /* If c_strlen returned something, but not a constant,
12098 transform __strcpy_chk into __memcpy_chk. */
12099 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12100 if (!fn)
12101 return NULL_TREE;
12102
12103 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12104 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12105 build_call_expr (fn, 4,
12106 dest, src, len, size));
12107 }
12108 }
12109 else
12110 maxlen = len;
12111
12112 if (! tree_int_cst_lt (maxlen, size))
12113 return NULL_TREE;
12114 }
12115
12116 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12117 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12118 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12119 if (!fn)
12120 return NULL_TREE;
12121
12122 return build_call_expr (fn, 2, dest, src);
12123 }
12124
12125 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12126 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12127 length passed as third argument. */
12128
12129 tree
12130 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12131 tree maxlen)
12132 {
12133 tree fn;
12134
12135 if (!validate_arg (dest, POINTER_TYPE)
12136 || !validate_arg (src, POINTER_TYPE)
12137 || !validate_arg (len, INTEGER_TYPE)
12138 || !validate_arg (size, INTEGER_TYPE))
12139 return NULL_TREE;
12140
12141 if (! host_integerp (size, 1))
12142 return NULL_TREE;
12143
12144 if (! integer_all_onesp (size))
12145 {
12146 if (! host_integerp (len, 1))
12147 {
12148 /* If LEN is not constant, try MAXLEN too.
12149 For MAXLEN only allow optimizing into non-_ocs function
12150 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12151 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12152 return NULL_TREE;
12153 }
12154 else
12155 maxlen = len;
12156
12157 if (tree_int_cst_lt (size, maxlen))
12158 return NULL_TREE;
12159 }
12160
12161 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12162 fn = built_in_decls[BUILT_IN_STRNCPY];
12163 if (!fn)
12164 return NULL_TREE;
12165
12166 return build_call_expr (fn, 3, dest, src, len);
12167 }
12168
12169 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12170 are the arguments to the call. */
12171
12172 static tree
12173 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12174 {
12175 tree fn;
12176 const char *p;
12177
12178 if (!validate_arg (dest, POINTER_TYPE)
12179 || !validate_arg (src, POINTER_TYPE)
12180 || !validate_arg (size, INTEGER_TYPE))
12181 return NULL_TREE;
12182
12183 p = c_getstr (src);
12184 /* If the SRC parameter is "", return DEST. */
12185 if (p && *p == '\0')
12186 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12187
12188 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12189 return NULL_TREE;
12190
12191 /* If __builtin_strcat_chk is used, assume strcat is available. */
12192 fn = built_in_decls[BUILT_IN_STRCAT];
12193 if (!fn)
12194 return NULL_TREE;
12195
12196 return build_call_expr (fn, 2, dest, src);
12197 }
12198
12199 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12200 LEN, and SIZE. */
12201
12202 static tree
12203 fold_builtin_strncat_chk (tree fndecl,
12204 tree dest, tree src, tree len, tree size)
12205 {
12206 tree fn;
12207 const char *p;
12208
12209 if (!validate_arg (dest, POINTER_TYPE)
12210 || !validate_arg (src, POINTER_TYPE)
12211 || !validate_arg (size, INTEGER_TYPE)
12212 || !validate_arg (size, INTEGER_TYPE))
12213 return NULL_TREE;
12214
12215 p = c_getstr (src);
12216 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12217 if (p && *p == '\0')
12218 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12219 else if (integer_zerop (len))
12220 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12221
12222 if (! host_integerp (size, 1))
12223 return NULL_TREE;
12224
12225 if (! integer_all_onesp (size))
12226 {
12227 tree src_len = c_strlen (src, 1);
12228 if (src_len
12229 && host_integerp (src_len, 1)
12230 && host_integerp (len, 1)
12231 && ! tree_int_cst_lt (len, src_len))
12232 {
12233 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12234 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12235 if (!fn)
12236 return NULL_TREE;
12237
12238 return build_call_expr (fn, 3, dest, src, size);
12239 }
12240 return NULL_TREE;
12241 }
12242
12243 /* If __builtin_strncat_chk is used, assume strncat is available. */
12244 fn = built_in_decls[BUILT_IN_STRNCAT];
12245 if (!fn)
12246 return NULL_TREE;
12247
12248 return build_call_expr (fn, 3, dest, src, len);
12249 }
12250
12251 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12252 a normal call should be emitted rather than expanding the function
12253 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12254
12255 static tree
12256 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12257 {
12258 tree dest, size, len, fn, fmt, flag;
12259 const char *fmt_str;
12260 int nargs = call_expr_nargs (exp);
12261
12262 /* Verify the required arguments in the original call. */
12263 if (nargs < 4)
12264 return NULL_TREE;
12265 dest = CALL_EXPR_ARG (exp, 0);
12266 if (!validate_arg (dest, POINTER_TYPE))
12267 return NULL_TREE;
12268 flag = CALL_EXPR_ARG (exp, 1);
12269 if (!validate_arg (flag, INTEGER_TYPE))
12270 return NULL_TREE;
12271 size = CALL_EXPR_ARG (exp, 2);
12272 if (!validate_arg (size, INTEGER_TYPE))
12273 return NULL_TREE;
12274 fmt = CALL_EXPR_ARG (exp, 3);
12275 if (!validate_arg (fmt, POINTER_TYPE))
12276 return NULL_TREE;
12277
12278 if (! host_integerp (size, 1))
12279 return NULL_TREE;
12280
12281 len = NULL_TREE;
12282
12283 if (!init_target_chars ())
12284 return NULL_TREE;
12285
12286 /* Check whether the format is a literal string constant. */
12287 fmt_str = c_getstr (fmt);
12288 if (fmt_str != NULL)
12289 {
12290 /* If the format doesn't contain % args or %%, we know the size. */
12291 if (strchr (fmt_str, target_percent) == 0)
12292 {
12293 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12294 len = build_int_cstu (size_type_node, strlen (fmt_str));
12295 }
12296 /* If the format is "%s" and first ... argument is a string literal,
12297 we know the size too. */
12298 else if (fcode == BUILT_IN_SPRINTF_CHK
12299 && strcmp (fmt_str, target_percent_s) == 0)
12300 {
12301 tree arg;
12302
12303 if (nargs == 5)
12304 {
12305 arg = CALL_EXPR_ARG (exp, 4);
12306 if (validate_arg (arg, POINTER_TYPE))
12307 {
12308 len = c_strlen (arg, 1);
12309 if (! len || ! host_integerp (len, 1))
12310 len = NULL_TREE;
12311 }
12312 }
12313 }
12314 }
12315
12316 if (! integer_all_onesp (size))
12317 {
12318 if (! len || ! tree_int_cst_lt (len, size))
12319 return NULL_TREE;
12320 }
12321
12322 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12323 or if format doesn't contain % chars or is "%s". */
12324 if (! integer_zerop (flag))
12325 {
12326 if (fmt_str == NULL)
12327 return NULL_TREE;
12328 if (strchr (fmt_str, target_percent) != NULL
12329 && strcmp (fmt_str, target_percent_s))
12330 return NULL_TREE;
12331 }
12332
12333 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12334 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12335 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12336 if (!fn)
12337 return NULL_TREE;
12338
12339 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12340 }
12341
12342 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12343 a normal call should be emitted rather than expanding the function
12344 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12345 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12346 passed as second argument. */
12347
12348 tree
12349 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12350 enum built_in_function fcode)
12351 {
12352 tree dest, size, len, fn, fmt, flag;
12353 const char *fmt_str;
12354
12355 /* Verify the required arguments in the original call. */
12356 if (call_expr_nargs (exp) < 5)
12357 return NULL_TREE;
12358 dest = CALL_EXPR_ARG (exp, 0);
12359 if (!validate_arg (dest, POINTER_TYPE))
12360 return NULL_TREE;
12361 len = CALL_EXPR_ARG (exp, 1);
12362 if (!validate_arg (len, INTEGER_TYPE))
12363 return NULL_TREE;
12364 flag = CALL_EXPR_ARG (exp, 2);
12365 if (!validate_arg (flag, INTEGER_TYPE))
12366 return NULL_TREE;
12367 size = CALL_EXPR_ARG (exp, 3);
12368 if (!validate_arg (size, INTEGER_TYPE))
12369 return NULL_TREE;
12370 fmt = CALL_EXPR_ARG (exp, 4);
12371 if (!validate_arg (fmt, POINTER_TYPE))
12372 return NULL_TREE;
12373
12374 if (! host_integerp (size, 1))
12375 return NULL_TREE;
12376
12377 if (! integer_all_onesp (size))
12378 {
12379 if (! host_integerp (len, 1))
12380 {
12381 /* If LEN is not constant, try MAXLEN too.
12382 For MAXLEN only allow optimizing into non-_ocs function
12383 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12384 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12385 return NULL_TREE;
12386 }
12387 else
12388 maxlen = len;
12389
12390 if (tree_int_cst_lt (size, maxlen))
12391 return NULL_TREE;
12392 }
12393
12394 if (!init_target_chars ())
12395 return NULL_TREE;
12396
12397 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12398 or if format doesn't contain % chars or is "%s". */
12399 if (! integer_zerop (flag))
12400 {
12401 fmt_str = c_getstr (fmt);
12402 if (fmt_str == NULL)
12403 return NULL_TREE;
12404 if (strchr (fmt_str, target_percent) != NULL
12405 && strcmp (fmt_str, target_percent_s))
12406 return NULL_TREE;
12407 }
12408
12409 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12410 available. */
12411 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12412 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12413 if (!fn)
12414 return NULL_TREE;
12415
12416 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12417 }
12418
12419 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12420 FMT and ARG are the arguments to the call; we don't fold cases with
12421 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12422
12423 Return NULL_TREE if no simplification was possible, otherwise return the
12424 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12425 code of the function to be simplified. */
12426
12427 static tree
12428 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12429 enum built_in_function fcode)
12430 {
12431 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12432 const char *fmt_str = NULL;
12433
12434 /* If the return value is used, don't do the transformation. */
12435 if (! ignore)
12436 return NULL_TREE;
12437
12438 /* Verify the required arguments in the original call. */
12439 if (!validate_arg (fmt, POINTER_TYPE))
12440 return NULL_TREE;
12441
12442 /* Check whether the format is a literal string constant. */
12443 fmt_str = c_getstr (fmt);
12444 if (fmt_str == NULL)
12445 return NULL_TREE;
12446
12447 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12448 {
12449 /* If we're using an unlocked function, assume the other
12450 unlocked functions exist explicitly. */
12451 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12452 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12453 }
12454 else
12455 {
12456 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12457 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12458 }
12459
12460 if (!init_target_chars ())
12461 return NULL_TREE;
12462
12463 if (strcmp (fmt_str, target_percent_s) == 0
12464 || strchr (fmt_str, target_percent) == NULL)
12465 {
12466 const char *str;
12467
12468 if (strcmp (fmt_str, target_percent_s) == 0)
12469 {
12470 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12471 return NULL_TREE;
12472
12473 if (!arg || !validate_arg (arg, POINTER_TYPE))
12474 return NULL_TREE;
12475
12476 str = c_getstr (arg);
12477 if (str == NULL)
12478 return NULL_TREE;
12479 }
12480 else
12481 {
12482 /* The format specifier doesn't contain any '%' characters. */
12483 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12484 && arg)
12485 return NULL_TREE;
12486 str = fmt_str;
12487 }
12488
12489 /* If the string was "", printf does nothing. */
12490 if (str[0] == '\0')
12491 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12492
12493 /* If the string has length of 1, call putchar. */
12494 if (str[1] == '\0')
12495 {
12496 /* Given printf("c"), (where c is any one character,)
12497 convert "c"[0] to an int and pass that to the replacement
12498 function. */
12499 newarg = build_int_cst (NULL_TREE, str[0]);
12500 if (fn_putchar)
12501 call = build_call_expr (fn_putchar, 1, newarg);
12502 }
12503 else
12504 {
12505 /* If the string was "string\n", call puts("string"). */
12506 size_t len = strlen (str);
12507 if ((unsigned char)str[len - 1] == target_newline)
12508 {
12509 /* Create a NUL-terminated string that's one char shorter
12510 than the original, stripping off the trailing '\n'. */
12511 char *newstr = alloca (len);
12512 memcpy (newstr, str, len - 1);
12513 newstr[len - 1] = 0;
12514
12515 newarg = build_string_literal (len, newstr);
12516 if (fn_puts)
12517 call = build_call_expr (fn_puts, 1, newarg);
12518 }
12519 else
12520 /* We'd like to arrange to call fputs(string,stdout) here,
12521 but we need stdout and don't have a way to get it yet. */
12522 return NULL_TREE;
12523 }
12524 }
12525
12526 /* The other optimizations can be done only on the non-va_list variants. */
12527 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12528 return NULL_TREE;
12529
12530 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12531 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12532 {
12533 if (!arg || !validate_arg (arg, POINTER_TYPE))
12534 return NULL_TREE;
12535 if (fn_puts)
12536 call = build_call_expr (fn_puts, 1, arg);
12537 }
12538
12539 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12540 else if (strcmp (fmt_str, target_percent_c) == 0)
12541 {
12542 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12543 return NULL_TREE;
12544 if (fn_putchar)
12545 call = build_call_expr (fn_putchar, 1, arg);
12546 }
12547
12548 if (!call)
12549 return NULL_TREE;
12550
12551 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12552 }
12553
12554 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12555 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12556 more than 3 arguments, and ARG may be null in the 2-argument case.
12557
12558 Return NULL_TREE if no simplification was possible, otherwise return the
12559 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12560 code of the function to be simplified. */
12561
12562 static tree
12563 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12564 enum built_in_function fcode)
12565 {
12566 tree fn_fputc, fn_fputs, call = NULL_TREE;
12567 const char *fmt_str = NULL;
12568
12569 /* If the return value is used, don't do the transformation. */
12570 if (! ignore)
12571 return NULL_TREE;
12572
12573 /* Verify the required arguments in the original call. */
12574 if (!validate_arg (fp, POINTER_TYPE))
12575 return NULL_TREE;
12576 if (!validate_arg (fmt, POINTER_TYPE))
12577 return NULL_TREE;
12578
12579 /* Check whether the format is a literal string constant. */
12580 fmt_str = c_getstr (fmt);
12581 if (fmt_str == NULL)
12582 return NULL_TREE;
12583
12584 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12585 {
12586 /* If we're using an unlocked function, assume the other
12587 unlocked functions exist explicitly. */
12588 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12589 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12590 }
12591 else
12592 {
12593 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12594 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12595 }
12596
12597 if (!init_target_chars ())
12598 return NULL_TREE;
12599
12600 /* If the format doesn't contain % args or %%, use strcpy. */
12601 if (strchr (fmt_str, target_percent) == NULL)
12602 {
12603 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12604 && arg)
12605 return NULL_TREE;
12606
12607 /* If the format specifier was "", fprintf does nothing. */
12608 if (fmt_str[0] == '\0')
12609 {
12610 /* If FP has side-effects, just wait until gimplification is
12611 done. */
12612 if (TREE_SIDE_EFFECTS (fp))
12613 return NULL_TREE;
12614
12615 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12616 }
12617
12618 /* When "string" doesn't contain %, replace all cases of
12619 fprintf (fp, string) with fputs (string, fp). The fputs
12620 builtin will take care of special cases like length == 1. */
12621 if (fn_fputs)
12622 call = build_call_expr (fn_fputs, 2, fmt, fp);
12623 }
12624
12625 /* The other optimizations can be done only on the non-va_list variants. */
12626 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12627 return NULL_TREE;
12628
12629 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_s) == 0)
12631 {
12632 if (!arg || !validate_arg (arg, POINTER_TYPE))
12633 return NULL_TREE;
12634 if (fn_fputs)
12635 call = build_call_expr (fn_fputs, 2, arg, fp);
12636 }
12637
12638 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12639 else if (strcmp (fmt_str, target_percent_c) == 0)
12640 {
12641 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12642 return NULL_TREE;
12643 if (fn_fputc)
12644 call = build_call_expr (fn_fputc, 2, arg, fp);
12645 }
12646
12647 if (!call)
12648 return NULL_TREE;
12649 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12650 }
12651
12652 /* Initialize format string characters in the target charset. */
12653
12654 static bool
12655 init_target_chars (void)
12656 {
12657 static bool init;
12658 if (!init)
12659 {
12660 target_newline = lang_hooks.to_target_charset ('\n');
12661 target_percent = lang_hooks.to_target_charset ('%');
12662 target_c = lang_hooks.to_target_charset ('c');
12663 target_s = lang_hooks.to_target_charset ('s');
12664 if (target_newline == 0 || target_percent == 0 || target_c == 0
12665 || target_s == 0)
12666 return false;
12667
12668 target_percent_c[0] = target_percent;
12669 target_percent_c[1] = target_c;
12670 target_percent_c[2] = '\0';
12671
12672 target_percent_s[0] = target_percent;
12673 target_percent_s[1] = target_s;
12674 target_percent_s[2] = '\0';
12675
12676 target_percent_s_newline[0] = target_percent;
12677 target_percent_s_newline[1] = target_s;
12678 target_percent_s_newline[2] = target_newline;
12679 target_percent_s_newline[3] = '\0';
12680
12681 init = true;
12682 }
12683 return true;
12684 }
12685
12686 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12687 and no overflow/underflow occurred. INEXACT is true if M was not
12688 exactly calculated. TYPE is the tree type for the result. This
12689 function assumes that you cleared the MPFR flags and then
12690 calculated M to see if anything subsequently set a flag prior to
12691 entering this function. Return NULL_TREE if any checks fail. */
12692
12693 static tree
12694 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12695 {
12696 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12697 overflow/underflow occurred. If -frounding-math, proceed iff the
12698 result of calling FUNC was exact. */
12699 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12700 && (!flag_rounding_math || !inexact))
12701 {
12702 REAL_VALUE_TYPE rr;
12703
12704 real_from_mpfr (&rr, m, type, GMP_RNDN);
12705 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12706 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12707 but the mpft_t is not, then we underflowed in the
12708 conversion. */
12709 if (real_isfinite (&rr)
12710 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12711 {
12712 REAL_VALUE_TYPE rmode;
12713
12714 real_convert (&rmode, TYPE_MODE (type), &rr);
12715 /* Proceed iff the specified mode can hold the value. */
12716 if (real_identical (&rmode, &rr))
12717 return build_real (type, rmode);
12718 }
12719 }
12720 return NULL_TREE;
12721 }
12722
12723 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12724 FUNC on it and return the resulting value as a tree with type TYPE.
12725 If MIN and/or MAX are not NULL, then the supplied ARG must be
12726 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12727 acceptable values, otherwise they are not. The mpfr precision is
12728 set to the precision of TYPE. We assume that function FUNC returns
12729 zero if the result could be calculated exactly within the requested
12730 precision. */
12731
12732 static tree
12733 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12734 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12735 bool inclusive)
12736 {
12737 tree result = NULL_TREE;
12738
12739 STRIP_NOPS (arg);
12740
12741 /* To proceed, MPFR must exactly represent the target floating point
12742 format, which only happens when the target base equals two. */
12743 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12744 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12745 {
12746 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12747
12748 if (real_isfinite (ra)
12749 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12750 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12751 {
12752 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12753 int inexact;
12754 mpfr_t m;
12755
12756 mpfr_init2 (m, prec);
12757 mpfr_from_real (m, ra, GMP_RNDN);
12758 mpfr_clear_flags ();
12759 inexact = func (m, m, GMP_RNDN);
12760 result = do_mpfr_ckconv (m, type, inexact);
12761 mpfr_clear (m);
12762 }
12763 }
12764
12765 return result;
12766 }
12767
12768 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12769 FUNC on it and return the resulting value as a tree with type TYPE.
12770 The mpfr precision is set to the precision of TYPE. We assume that
12771 function FUNC returns zero if the result could be calculated
12772 exactly within the requested precision. */
12773
12774 static tree
12775 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12776 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12777 {
12778 tree result = NULL_TREE;
12779
12780 STRIP_NOPS (arg1);
12781 STRIP_NOPS (arg2);
12782
12783 /* To proceed, MPFR must exactly represent the target floating point
12784 format, which only happens when the target base equals two. */
12785 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12786 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12787 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12788 {
12789 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12790 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12791
12792 if (real_isfinite (ra1) && real_isfinite (ra2))
12793 {
12794 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12795 int inexact;
12796 mpfr_t m1, m2;
12797
12798 mpfr_inits2 (prec, m1, m2, NULL);
12799 mpfr_from_real (m1, ra1, GMP_RNDN);
12800 mpfr_from_real (m2, ra2, GMP_RNDN);
12801 mpfr_clear_flags ();
12802 inexact = func (m1, m1, m2, GMP_RNDN);
12803 result = do_mpfr_ckconv (m1, type, inexact);
12804 mpfr_clears (m1, m2, NULL);
12805 }
12806 }
12807
12808 return result;
12809 }
12810
12811 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12812 FUNC on it and return the resulting value as a tree with type TYPE.
12813 The mpfr precision is set to the precision of TYPE. We assume that
12814 function FUNC returns zero if the result could be calculated
12815 exactly within the requested precision. */
12816
12817 static tree
12818 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12819 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12820 {
12821 tree result = NULL_TREE;
12822
12823 STRIP_NOPS (arg1);
12824 STRIP_NOPS (arg2);
12825 STRIP_NOPS (arg3);
12826
12827 /* To proceed, MPFR must exactly represent the target floating point
12828 format, which only happens when the target base equals two. */
12829 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12830 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12831 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12832 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12833 {
12834 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12835 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12836 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12837
12838 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12839 {
12840 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12841 int inexact;
12842 mpfr_t m1, m2, m3;
12843
12844 mpfr_inits2 (prec, m1, m2, m3, NULL);
12845 mpfr_from_real (m1, ra1, GMP_RNDN);
12846 mpfr_from_real (m2, ra2, GMP_RNDN);
12847 mpfr_from_real (m3, ra3, GMP_RNDN);
12848 mpfr_clear_flags ();
12849 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12850 result = do_mpfr_ckconv (m1, type, inexact);
12851 mpfr_clears (m1, m2, m3, NULL);
12852 }
12853 }
12854
12855 return result;
12856 }
12857
12858 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12859 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12860 If ARG_SINP and ARG_COSP are NULL then the result is returned
12861 as a complex value.
12862 The type is taken from the type of ARG and is used for setting the
12863 precision of the calculation and results. */
12864
12865 static tree
12866 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12867 {
12868 tree const type = TREE_TYPE (arg);
12869 tree result = NULL_TREE;
12870
12871 STRIP_NOPS (arg);
12872
12873 /* To proceed, MPFR must exactly represent the target floating point
12874 format, which only happens when the target base equals two. */
12875 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12876 && TREE_CODE (arg) == REAL_CST
12877 && !TREE_OVERFLOW (arg))
12878 {
12879 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12880
12881 if (real_isfinite (ra))
12882 {
12883 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12884 tree result_s, result_c;
12885 int inexact;
12886 mpfr_t m, ms, mc;
12887
12888 mpfr_inits2 (prec, m, ms, mc, NULL);
12889 mpfr_from_real (m, ra, GMP_RNDN);
12890 mpfr_clear_flags ();
12891 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12892 result_s = do_mpfr_ckconv (ms, type, inexact);
12893 result_c = do_mpfr_ckconv (mc, type, inexact);
12894 mpfr_clears (m, ms, mc, NULL);
12895 if (result_s && result_c)
12896 {
12897 /* If we are to return in a complex value do so. */
12898 if (!arg_sinp && !arg_cosp)
12899 return build_complex (build_complex_type (type),
12900 result_c, result_s);
12901
12902 /* Dereference the sin/cos pointer arguments. */
12903 arg_sinp = build_fold_indirect_ref (arg_sinp);
12904 arg_cosp = build_fold_indirect_ref (arg_cosp);
12905 /* Proceed if valid pointer type were passed in. */
12906 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12907 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12908 {
12909 /* Set the values. */
12910 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12911 result_s);
12912 TREE_SIDE_EFFECTS (result_s) = 1;
12913 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12914 result_c);
12915 TREE_SIDE_EFFECTS (result_c) = 1;
12916 /* Combine the assignments into a compound expr. */
12917 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12918 result_s, result_c));
12919 }
12920 }
12921 }
12922 }
12923 return result;
12924 }
12925
12926 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12927 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12928 two-argument mpfr order N Bessel function FUNC on them and return
12929 the resulting value as a tree with type TYPE. The mpfr precision
12930 is set to the precision of TYPE. We assume that function FUNC
12931 returns zero if the result could be calculated exactly within the
12932 requested precision. */
12933 static tree
12934 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12935 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12936 const REAL_VALUE_TYPE *min, bool inclusive)
12937 {
12938 tree result = NULL_TREE;
12939
12940 STRIP_NOPS (arg1);
12941 STRIP_NOPS (arg2);
12942
12943 /* To proceed, MPFR must exactly represent the target floating point
12944 format, which only happens when the target base equals two. */
12945 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12946 && host_integerp (arg1, 0)
12947 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12948 {
12949 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12950 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12951
12952 if (n == (long)n
12953 && real_isfinite (ra)
12954 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12955 {
12956 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12957 int inexact;
12958 mpfr_t m;
12959
12960 mpfr_init2 (m, prec);
12961 mpfr_from_real (m, ra, GMP_RNDN);
12962 mpfr_clear_flags ();
12963 inexact = func (m, n, m, GMP_RNDN);
12964 result = do_mpfr_ckconv (m, type, inexact);
12965 mpfr_clear (m);
12966 }
12967 }
12968
12969 return result;
12970 }
12971
12972 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12973 the pointer *(ARG_QUO) and return the result. The type is taken
12974 from the type of ARG0 and is used for setting the precision of the
12975 calculation and results. */
12976
12977 static tree
12978 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12979 {
12980 tree const type = TREE_TYPE (arg0);
12981 tree result = NULL_TREE;
12982
12983 STRIP_NOPS (arg0);
12984 STRIP_NOPS (arg1);
12985
12986 /* To proceed, MPFR must exactly represent the target floating point
12987 format, which only happens when the target base equals two. */
12988 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12989 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12990 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12991 {
12992 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12993 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12994
12995 if (real_isfinite (ra0) && real_isfinite (ra1))
12996 {
12997 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12998 tree result_rem;
12999 long integer_quo;
13000 mpfr_t m0, m1;
13001
13002 mpfr_inits2 (prec, m0, m1, NULL);
13003 mpfr_from_real (m0, ra0, GMP_RNDN);
13004 mpfr_from_real (m1, ra1, GMP_RNDN);
13005 mpfr_clear_flags ();
13006 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13007 /* Remquo is independent of the rounding mode, so pass
13008 inexact=0 to do_mpfr_ckconv(). */
13009 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13010 mpfr_clears (m0, m1, NULL);
13011 if (result_rem)
13012 {
13013 /* MPFR calculates quo in the host's long so it may
13014 return more bits in quo than the target int can hold
13015 if sizeof(host long) > sizeof(target int). This can
13016 happen even for native compilers in LP64 mode. In
13017 these cases, modulo the quo value with the largest
13018 number that the target int can hold while leaving one
13019 bit for the sign. */
13020 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13021 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13022
13023 /* Dereference the quo pointer argument. */
13024 arg_quo = build_fold_indirect_ref (arg_quo);
13025 /* Proceed iff a valid pointer type was passed in. */
13026 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13027 {
13028 /* Set the value. */
13029 tree result_quo = fold_build2 (MODIFY_EXPR,
13030 TREE_TYPE (arg_quo), arg_quo,
13031 build_int_cst (NULL, integer_quo));
13032 TREE_SIDE_EFFECTS (result_quo) = 1;
13033 /* Combine the quo assignment with the rem. */
13034 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13035 result_quo, result_rem));
13036 }
13037 }
13038 }
13039 }
13040 return result;
13041 }
13042
13043 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13044 resulting value as a tree with type TYPE. The mpfr precision is
13045 set to the precision of TYPE. We assume that this mpfr function
13046 returns zero if the result could be calculated exactly within the
13047 requested precision. In addition, the integer pointer represented
13048 by ARG_SG will be dereferenced and set to the appropriate signgam
13049 (-1,1) value. */
13050
13051 static tree
13052 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13053 {
13054 tree result = NULL_TREE;
13055
13056 STRIP_NOPS (arg);
13057
13058 /* To proceed, MPFR must exactly represent the target floating point
13059 format, which only happens when the target base equals two. Also
13060 verify ARG is a constant and that ARG_SG is an int pointer. */
13061 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13062 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13063 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13064 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13065 {
13066 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13067
13068 /* In addition to NaN and Inf, the argument cannot be zero or a
13069 negative integer. */
13070 if (real_isfinite (ra)
13071 && ra->cl != rvc_zero
13072 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13073 {
13074 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13075 int inexact, sg;
13076 mpfr_t m;
13077 tree result_lg;
13078
13079 mpfr_init2 (m, prec);
13080 mpfr_from_real (m, ra, GMP_RNDN);
13081 mpfr_clear_flags ();
13082 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13083 result_lg = do_mpfr_ckconv (m, type, inexact);
13084 mpfr_clear (m);
13085 if (result_lg)
13086 {
13087 tree result_sg;
13088
13089 /* Dereference the arg_sg pointer argument. */
13090 arg_sg = build_fold_indirect_ref (arg_sg);
13091 /* Assign the signgam value into *arg_sg. */
13092 result_sg = fold_build2 (MODIFY_EXPR,
13093 TREE_TYPE (arg_sg), arg_sg,
13094 build_int_cst (NULL, sg));
13095 TREE_SIDE_EFFECTS (result_sg) = 1;
13096 /* Combine the signgam assignment with the lgamma result. */
13097 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13098 result_sg, result_lg));
13099 }
13100 }
13101 }
13102
13103 return result;
13104 }
13105 #endif