]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
alias.c: Use REG_P...
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
83
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list (tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (tree);
158 static tree fold_builtin_inf (tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (tree, tree);
168 static tree fold_builtin_cbrt (tree, tree);
169 static tree fold_builtin_pow (tree, tree, tree, tree);
170 static tree fold_builtin_powi (tree, tree, tree, tree);
171 static tree fold_builtin_cos (tree, tree, tree);
172 static tree fold_builtin_cosh (tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (tree, tree);
175 static tree fold_builtin_floor (tree, tree);
176 static tree fold_builtin_ceil (tree, tree);
177 static tree fold_builtin_round (tree, tree);
178 static tree fold_builtin_int_roundingfn (tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (tree, tree, tree);
182 static tree fold_builtin_memchr (tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (tree, tree, tree);
184 static tree fold_builtin_strcmp (tree, tree);
185 static tree fold_builtin_strncmp (tree, tree, tree);
186 static tree fold_builtin_signbit (tree, tree);
187 static tree fold_builtin_copysign (tree, tree, tree, tree);
188 static tree fold_builtin_isascii (tree);
189 static tree fold_builtin_toascii (tree);
190 static tree fold_builtin_isdigit (tree);
191 static tree fold_builtin_fabs (tree, tree);
192 static tree fold_builtin_abs (tree, tree);
193 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (tree, tree *, int, bool);
196 static tree fold_builtin_0 (tree, bool);
197 static tree fold_builtin_1 (tree, tree, bool);
198 static tree fold_builtin_2 (tree, tree, tree, bool);
199 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (tree, tree, bool);
202
203 static tree fold_builtin_strpbrk (tree, tree, tree);
204 static tree fold_builtin_strstr (tree, tree, tree);
205 static tree fold_builtin_strrchr (tree, tree, tree);
206 static tree fold_builtin_strcat (tree, tree);
207 static tree fold_builtin_strncat (tree, tree, tree);
208 static tree fold_builtin_strspn (tree, tree);
209 static tree fold_builtin_strcspn (tree, tree);
210 static tree fold_builtin_sprintf (tree, tree, tree, int);
211
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
222 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
226
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
246
247 bool
248 is_builtin_name (const char *name)
249 {
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
255 }
256
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
260
261 static bool
262 called_as_built_in (tree node)
263 {
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
269 }
270
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
274
275 int
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
277 {
278 unsigned int inner;
279
280 inner = max_align;
281 if (handled_component_p (exp))
282 {
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
287
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
293 {
294 tree next_offset;
295
296 if (TREE_CODE (offset) == PLUS_EXPR)
297 {
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
300 }
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
304 {
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
309
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
312 }
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
315 {
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
321
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
324 }
325 else
326 {
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
329 }
330 offset = next_offset;
331 }
332 }
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
345 }
346
347 /* Return the alignment in bits of EXP, a pointer valued expression.
348 But don't return more than MAX_ALIGN no matter what.
349 The alignment returned is, by default, the alignment of the thing that
350 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
351
352 Otherwise, look at the expression to see if we can do better, i.e., if the
353 expression is actually pointing at an object whose alignment is tighter. */
354
355 int
356 get_pointer_alignment (tree exp, unsigned int max_align)
357 {
358 unsigned int align, inner;
359
360 /* We rely on TER to compute accurate alignment information. */
361 if (!(optimize && flag_tree_ter))
362 return 0;
363
364 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
365 return 0;
366
367 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
368 align = MIN (align, max_align);
369
370 while (1)
371 {
372 switch (TREE_CODE (exp))
373 {
374 CASE_CONVERT:
375 exp = TREE_OPERAND (exp, 0);
376 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
377 return align;
378
379 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
380 align = MIN (inner, max_align);
381 break;
382
383 case POINTER_PLUS_EXPR:
384 /* If sum of pointer + int, restrict our maximum alignment to that
385 imposed by the integer. If not, we can't do any better than
386 ALIGN. */
387 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
388 return align;
389
390 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
391 & (max_align / BITS_PER_UNIT - 1))
392 != 0)
393 max_align >>= 1;
394
395 exp = TREE_OPERAND (exp, 0);
396 break;
397
398 case ADDR_EXPR:
399 /* See what we are pointing at and look at its alignment. */
400 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
401
402 default:
403 return align;
404 }
405 }
406 }
407
408 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
409 way, because it could contain a zero byte in the middle.
410 TREE_STRING_LENGTH is the size of the character array, not the string.
411
412 ONLY_VALUE should be nonzero if the result is not going to be emitted
413 into the instruction stream and zero if it is going to be expanded.
414 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
415 is returned, otherwise NULL, since
416 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
417 evaluate the side-effects.
418
419 The value returned is of type `ssizetype'.
420
421 Unfortunately, string_constant can't access the values of const char
422 arrays with initializers, so neither can we do so here. */
423
424 tree
425 c_strlen (tree src, int only_value)
426 {
427 tree offset_node;
428 HOST_WIDE_INT offset;
429 int max;
430 const char *ptr;
431
432 STRIP_NOPS (src);
433 if (TREE_CODE (src) == COND_EXPR
434 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
435 {
436 tree len1, len2;
437
438 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
439 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
440 if (tree_int_cst_equal (len1, len2))
441 return len1;
442 }
443
444 if (TREE_CODE (src) == COMPOUND_EXPR
445 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
446 return c_strlen (TREE_OPERAND (src, 1), only_value);
447
448 src = string_constant (src, &offset_node);
449 if (src == 0)
450 return NULL_TREE;
451
452 max = TREE_STRING_LENGTH (src) - 1;
453 ptr = TREE_STRING_POINTER (src);
454
455 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
456 {
457 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
458 compute the offset to the following null if we don't know where to
459 start searching for it. */
460 int i;
461
462 for (i = 0; i < max; i++)
463 if (ptr[i] == 0)
464 return NULL_TREE;
465
466 /* We don't know the starting offset, but we do know that the string
467 has no internal zero bytes. We can assume that the offset falls
468 within the bounds of the string; otherwise, the programmer deserves
469 what he gets. Subtract the offset from the length of the string,
470 and return that. This would perhaps not be valid if we were dealing
471 with named arrays in addition to literal string constants. */
472
473 return size_diffop (size_int (max), offset_node);
474 }
475
476 /* We have a known offset into the string. Start searching there for
477 a null character if we can represent it as a single HOST_WIDE_INT. */
478 if (offset_node == 0)
479 offset = 0;
480 else if (! host_integerp (offset_node, 0))
481 offset = -1;
482 else
483 offset = tree_low_cst (offset_node, 0);
484
485 /* If the offset is known to be out of bounds, warn, and call strlen at
486 runtime. */
487 if (offset < 0 || offset > max)
488 {
489 /* Suppress multiple warnings for propagated constant strings. */
490 if (! TREE_NO_WARNING (src))
491 {
492 warning (0, "offset outside bounds of constant string");
493 TREE_NO_WARNING (src) = 1;
494 }
495 return NULL_TREE;
496 }
497
498 /* Use strlen to search for the first zero byte. Since any strings
499 constructed with build_string will have nulls appended, we win even
500 if we get handed something like (char[4])"abcd".
501
502 Since OFFSET is our starting index into the string, no further
503 calculation is needed. */
504 return ssize_int (strlen (ptr + offset));
505 }
506
507 /* Return a char pointer for a C string if it is a string constant
508 or sum of string constant and integer constant. */
509
510 static const char *
511 c_getstr (tree src)
512 {
513 tree offset_node;
514
515 src = string_constant (src, &offset_node);
516 if (src == 0)
517 return 0;
518
519 if (offset_node == 0)
520 return TREE_STRING_POINTER (src);
521 else if (!host_integerp (offset_node, 1)
522 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
523 return 0;
524
525 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
526 }
527
528 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
529 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
530
531 static rtx
532 c_readstr (const char *str, enum machine_mode mode)
533 {
534 HOST_WIDE_INT c[2];
535 HOST_WIDE_INT ch;
536 unsigned int i, j;
537
538 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
539
540 c[0] = 0;
541 c[1] = 0;
542 ch = 1;
543 for (i = 0; i < GET_MODE_SIZE (mode); i++)
544 {
545 j = i;
546 if (WORDS_BIG_ENDIAN)
547 j = GET_MODE_SIZE (mode) - i - 1;
548 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
549 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
550 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
551 j *= BITS_PER_UNIT;
552 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
553
554 if (ch)
555 ch = (unsigned char) str[i];
556 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
557 }
558 return immed_double_const (c[0], c[1], mode);
559 }
560
561 /* Cast a target constant CST to target CHAR and if that value fits into
562 host char type, return zero and put that value into variable pointed to by
563 P. */
564
565 static int
566 target_char_cast (tree cst, char *p)
567 {
568 unsigned HOST_WIDE_INT val, hostval;
569
570 if (!host_integerp (cst, 1)
571 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
572 return 1;
573
574 val = tree_low_cst (cst, 1);
575 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
576 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
577
578 hostval = val;
579 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
580 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
581
582 if (val != hostval)
583 return 1;
584
585 *p = hostval;
586 return 0;
587 }
588
589 /* Similar to save_expr, but assumes that arbitrary code is not executed
590 in between the multiple evaluations. In particular, we assume that a
591 non-addressable local variable will not be modified. */
592
593 static tree
594 builtin_save_expr (tree exp)
595 {
596 if (TREE_ADDRESSABLE (exp) == 0
597 && (TREE_CODE (exp) == PARM_DECL
598 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
599 return exp;
600
601 return save_expr (exp);
602 }
603
604 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
605 times to get the address of either a higher stack frame, or a return
606 address located within it (depending on FNDECL_CODE). */
607
608 static rtx
609 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
610 {
611 int i;
612
613 #ifdef INITIAL_FRAME_ADDRESS_RTX
614 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
615 #else
616 rtx tem;
617
618 /* For a zero count with __builtin_return_address, we don't care what
619 frame address we return, because target-specific definitions will
620 override us. Therefore frame pointer elimination is OK, and using
621 the soft frame pointer is OK.
622
623 For a nonzero count, or a zero count with __builtin_frame_address,
624 we require a stable offset from the current frame pointer to the
625 previous one, so we must use the hard frame pointer, and
626 we must disable frame pointer elimination. */
627 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
628 tem = frame_pointer_rtx;
629 else
630 {
631 tem = hard_frame_pointer_rtx;
632
633 /* Tell reload not to eliminate the frame pointer. */
634 crtl->accesses_prior_frames = 1;
635 }
636 #endif
637
638 /* Some machines need special handling before we can access
639 arbitrary frames. For example, on the SPARC, we must first flush
640 all register windows to the stack. */
641 #ifdef SETUP_FRAME_ADDRESSES
642 if (count > 0)
643 SETUP_FRAME_ADDRESSES ();
644 #endif
645
646 /* On the SPARC, the return address is not in the frame, it is in a
647 register. There is no way to access it off of the current frame
648 pointer, but it can be accessed off the previous frame pointer by
649 reading the value from the register window save area. */
650 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
651 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
652 count--;
653 #endif
654
655 /* Scan back COUNT frames to the specified frame. */
656 for (i = 0; i < count; i++)
657 {
658 /* Assume the dynamic chain pointer is in the word that the
659 frame address points to, unless otherwise specified. */
660 #ifdef DYNAMIC_CHAIN_ADDRESS
661 tem = DYNAMIC_CHAIN_ADDRESS (tem);
662 #endif
663 tem = memory_address (Pmode, tem);
664 tem = gen_frame_mem (Pmode, tem);
665 tem = copy_to_reg (tem);
666 }
667
668 /* For __builtin_frame_address, return what we've got. But, on
669 the SPARC for example, we may have to add a bias. */
670 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
671 #ifdef FRAME_ADDR_RTX
672 return FRAME_ADDR_RTX (tem);
673 #else
674 return tem;
675 #endif
676
677 /* For __builtin_return_address, get the return address from that frame. */
678 #ifdef RETURN_ADDR_RTX
679 tem = RETURN_ADDR_RTX (count, tem);
680 #else
681 tem = memory_address (Pmode,
682 plus_constant (tem, GET_MODE_SIZE (Pmode)));
683 tem = gen_frame_mem (Pmode, tem);
684 #endif
685 return tem;
686 }
687
688 /* Alias set used for setjmp buffer. */
689 static alias_set_type setjmp_alias_set = -1;
690
691 /* Construct the leading half of a __builtin_setjmp call. Control will
692 return to RECEIVER_LABEL. This is also called directly by the SJLJ
693 exception handling code. */
694
695 void
696 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
697 {
698 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
699 rtx stack_save;
700 rtx mem;
701
702 if (setjmp_alias_set == -1)
703 setjmp_alias_set = new_alias_set ();
704
705 buf_addr = convert_memory_address (Pmode, buf_addr);
706
707 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
708
709 /* We store the frame pointer and the address of receiver_label in
710 the buffer and use the rest of it for the stack save area, which
711 is machine-dependent. */
712
713 mem = gen_rtx_MEM (Pmode, buf_addr);
714 set_mem_alias_set (mem, setjmp_alias_set);
715 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
716
717 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
718 set_mem_alias_set (mem, setjmp_alias_set);
719
720 emit_move_insn (validize_mem (mem),
721 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
722
723 stack_save = gen_rtx_MEM (sa_mode,
724 plus_constant (buf_addr,
725 2 * GET_MODE_SIZE (Pmode)));
726 set_mem_alias_set (stack_save, setjmp_alias_set);
727 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
728
729 /* If there is further processing to do, do it. */
730 #ifdef HAVE_builtin_setjmp_setup
731 if (HAVE_builtin_setjmp_setup)
732 emit_insn (gen_builtin_setjmp_setup (buf_addr));
733 #endif
734
735 /* Tell optimize_save_area_alloca that extra work is going to
736 need to go on during alloca. */
737 cfun->calls_setjmp = 1;
738
739 /* We have a nonlocal label. */
740 cfun->has_nonlocal_label = 1;
741 }
742
743 /* Construct the trailing part of a __builtin_setjmp call. This is
744 also called directly by the SJLJ exception handling code. */
745
746 void
747 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
748 {
749 /* Clobber the FP when we get here, so we have to make sure it's
750 marked as used by this function. */
751 emit_use (hard_frame_pointer_rtx);
752
753 /* Mark the static chain as clobbered here so life information
754 doesn't get messed up for it. */
755 emit_clobber (static_chain_rtx);
756
757 /* Now put in the code to restore the frame pointer, and argument
758 pointer, if needed. */
759 #ifdef HAVE_nonlocal_goto
760 if (! HAVE_nonlocal_goto)
761 #endif
762 {
763 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
764 /* This might change the hard frame pointer in ways that aren't
765 apparent to early optimization passes, so force a clobber. */
766 emit_clobber (hard_frame_pointer_rtx);
767 }
768
769 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
770 if (fixed_regs[ARG_POINTER_REGNUM])
771 {
772 #ifdef ELIMINABLE_REGS
773 size_t i;
774 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
775
776 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
777 if (elim_regs[i].from == ARG_POINTER_REGNUM
778 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
779 break;
780
781 if (i == ARRAY_SIZE (elim_regs))
782 #endif
783 {
784 /* Now restore our arg pointer from the address at which it
785 was saved in our stack frame. */
786 emit_move_insn (crtl->args.internal_arg_pointer,
787 copy_to_reg (get_arg_pointer_save_area ()));
788 }
789 }
790 #endif
791
792 #ifdef HAVE_builtin_setjmp_receiver
793 if (HAVE_builtin_setjmp_receiver)
794 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
795 else
796 #endif
797 #ifdef HAVE_nonlocal_goto_receiver
798 if (HAVE_nonlocal_goto_receiver)
799 emit_insn (gen_nonlocal_goto_receiver ());
800 else
801 #endif
802 { /* Nothing */ }
803
804 /* We must not allow the code we just generated to be reordered by
805 scheduling. Specifically, the update of the frame pointer must
806 happen immediately, not later. */
807 emit_insn (gen_blockage ());
808 }
809
810 /* __builtin_longjmp is passed a pointer to an array of five words (not
811 all will be used on all machines). It operates similarly to the C
812 library function of the same name, but is more efficient. Much of
813 the code below is copied from the handling of non-local gotos. */
814
815 static void
816 expand_builtin_longjmp (rtx buf_addr, rtx value)
817 {
818 rtx fp, lab, stack, insn, last;
819 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
820
821 /* DRAP is needed for stack realign if longjmp is expanded to current
822 function */
823 if (SUPPORTS_STACK_ALIGNMENT)
824 crtl->need_drap = true;
825
826 if (setjmp_alias_set == -1)
827 setjmp_alias_set = new_alias_set ();
828
829 buf_addr = convert_memory_address (Pmode, buf_addr);
830
831 buf_addr = force_reg (Pmode, buf_addr);
832
833 /* We used to store value in static_chain_rtx, but that fails if pointers
834 are smaller than integers. We instead require that the user must pass
835 a second argument of 1, because that is what builtin_setjmp will
836 return. This also makes EH slightly more efficient, since we are no
837 longer copying around a value that we don't care about. */
838 gcc_assert (value == const1_rtx);
839
840 last = get_last_insn ();
841 #ifdef HAVE_builtin_longjmp
842 if (HAVE_builtin_longjmp)
843 emit_insn (gen_builtin_longjmp (buf_addr));
844 else
845 #endif
846 {
847 fp = gen_rtx_MEM (Pmode, buf_addr);
848 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
849 GET_MODE_SIZE (Pmode)));
850
851 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
852 2 * GET_MODE_SIZE (Pmode)));
853 set_mem_alias_set (fp, setjmp_alias_set);
854 set_mem_alias_set (lab, setjmp_alias_set);
855 set_mem_alias_set (stack, setjmp_alias_set);
856
857 /* Pick up FP, label, and SP from the block and jump. This code is
858 from expand_goto in stmt.c; see there for detailed comments. */
859 #ifdef HAVE_nonlocal_goto
860 if (HAVE_nonlocal_goto)
861 /* We have to pass a value to the nonlocal_goto pattern that will
862 get copied into the static_chain pointer, but it does not matter
863 what that value is, because builtin_setjmp does not use it. */
864 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
865 else
866 #endif
867 {
868 lab = copy_to_reg (lab);
869
870 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
871 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
872
873 emit_move_insn (hard_frame_pointer_rtx, fp);
874 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
875
876 emit_use (hard_frame_pointer_rtx);
877 emit_use (stack_pointer_rtx);
878 emit_indirect_jump (lab);
879 }
880 }
881
882 /* Search backwards and mark the jump insn as a non-local goto.
883 Note that this precludes the use of __builtin_longjmp to a
884 __builtin_setjmp target in the same function. However, we've
885 already cautioned the user that these functions are for
886 internal exception handling use only. */
887 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
888 {
889 gcc_assert (insn != last);
890
891 if (JUMP_P (insn))
892 {
893 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
894 break;
895 }
896 else if (CALL_P (insn))
897 break;
898 }
899 }
900
901 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
902 and the address of the save area. */
903
904 static rtx
905 expand_builtin_nonlocal_goto (tree exp)
906 {
907 tree t_label, t_save_area;
908 rtx r_label, r_save_area, r_fp, r_sp, insn;
909
910 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
911 return NULL_RTX;
912
913 t_label = CALL_EXPR_ARG (exp, 0);
914 t_save_area = CALL_EXPR_ARG (exp, 1);
915
916 r_label = expand_normal (t_label);
917 r_label = convert_memory_address (Pmode, r_label);
918 r_save_area = expand_normal (t_save_area);
919 r_save_area = convert_memory_address (Pmode, r_save_area);
920 /* Copy the address of the save location to a register just in case it was based
921 on the frame pointer. */
922 r_save_area = copy_to_reg (r_save_area);
923 r_fp = gen_rtx_MEM (Pmode, r_save_area);
924 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
925 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
926
927 crtl->has_nonlocal_goto = 1;
928
929 #ifdef HAVE_nonlocal_goto
930 /* ??? We no longer need to pass the static chain value, afaik. */
931 if (HAVE_nonlocal_goto)
932 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
933 else
934 #endif
935 {
936 r_label = copy_to_reg (r_label);
937
938 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
939 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
940
941 /* Restore frame pointer for containing function.
942 This sets the actual hard register used for the frame pointer
943 to the location of the function's incoming static chain info.
944 The non-local goto handler will then adjust it to contain the
945 proper value and reload the argument pointer, if needed. */
946 emit_move_insn (hard_frame_pointer_rtx, r_fp);
947 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
948
949 /* USE of hard_frame_pointer_rtx added for consistency;
950 not clear if really needed. */
951 emit_use (hard_frame_pointer_rtx);
952 emit_use (stack_pointer_rtx);
953
954 /* If the architecture is using a GP register, we must
955 conservatively assume that the target function makes use of it.
956 The prologue of functions with nonlocal gotos must therefore
957 initialize the GP register to the appropriate value, and we
958 must then make sure that this value is live at the point
959 of the jump. (Note that this doesn't necessarily apply
960 to targets with a nonlocal_goto pattern; they are free
961 to implement it in their own way. Note also that this is
962 a no-op if the GP register is a global invariant.) */
963 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
964 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
965 emit_use (pic_offset_table_rtx);
966
967 emit_indirect_jump (r_label);
968 }
969
970 /* Search backwards to the jump insn and mark it as a
971 non-local goto. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 {
974 if (JUMP_P (insn))
975 {
976 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
977 break;
978 }
979 else if (CALL_P (insn))
980 break;
981 }
982
983 return const0_rtx;
984 }
985
986 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
987 (not all will be used on all machines) that was passed to __builtin_setjmp.
988 It updates the stack pointer in that block to correspond to the current
989 stack pointer. */
990
991 static void
992 expand_builtin_update_setjmp_buf (rtx buf_addr)
993 {
994 enum machine_mode sa_mode = Pmode;
995 rtx stack_save;
996
997
998 #ifdef HAVE_save_stack_nonlocal
999 if (HAVE_save_stack_nonlocal)
1000 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1001 #endif
1002 #ifdef STACK_SAVEAREA_MODE
1003 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004 #endif
1005
1006 stack_save
1007 = gen_rtx_MEM (sa_mode,
1008 memory_address
1009 (sa_mode,
1010 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1011
1012 #ifdef HAVE_setjmp
1013 if (HAVE_setjmp)
1014 emit_insn (gen_setjmp ());
1015 #endif
1016
1017 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1018 }
1019
1020 /* Expand a call to __builtin_prefetch. For a target that does not support
1021 data prefetch, evaluate the memory address argument in case it has side
1022 effects. */
1023
1024 static void
1025 expand_builtin_prefetch (tree exp)
1026 {
1027 tree arg0, arg1, arg2;
1028 int nargs;
1029 rtx op0, op1, op2;
1030
1031 if (!validate_arglist (exp, POINTER_TYPE, 0))
1032 return;
1033
1034 arg0 = CALL_EXPR_ARG (exp, 0);
1035
1036 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1037 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1038 locality). */
1039 nargs = call_expr_nargs (exp);
1040 if (nargs > 1)
1041 arg1 = CALL_EXPR_ARG (exp, 1);
1042 else
1043 arg1 = integer_zero_node;
1044 if (nargs > 2)
1045 arg2 = CALL_EXPR_ARG (exp, 2);
1046 else
1047 arg2 = build_int_cst (NULL_TREE, 3);
1048
1049 /* Argument 0 is an address. */
1050 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1051
1052 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1053 if (TREE_CODE (arg1) != INTEGER_CST)
1054 {
1055 error ("second argument to %<__builtin_prefetch%> must be a constant");
1056 arg1 = integer_zero_node;
1057 }
1058 op1 = expand_normal (arg1);
1059 /* Argument 1 must be either zero or one. */
1060 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1061 {
1062 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1063 " using zero");
1064 op1 = const0_rtx;
1065 }
1066
1067 /* Argument 2 (locality) must be a compile-time constant int. */
1068 if (TREE_CODE (arg2) != INTEGER_CST)
1069 {
1070 error ("third argument to %<__builtin_prefetch%> must be a constant");
1071 arg2 = integer_zero_node;
1072 }
1073 op2 = expand_normal (arg2);
1074 /* Argument 2 must be 0, 1, 2, or 3. */
1075 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1076 {
1077 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1078 op2 = const0_rtx;
1079 }
1080
1081 #ifdef HAVE_prefetch
1082 if (HAVE_prefetch)
1083 {
1084 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1085 (op0,
1086 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1087 || (GET_MODE (op0) != Pmode))
1088 {
1089 op0 = convert_memory_address (Pmode, op0);
1090 op0 = force_reg (Pmode, op0);
1091 }
1092 emit_insn (gen_prefetch (op0, op1, op2));
1093 }
1094 #endif
1095
1096 /* Don't do anything with direct references to volatile memory, but
1097 generate code to handle other side effects. */
1098 if (!MEM_P (op0) && side_effects_p (op0))
1099 emit_insn (op0);
1100 }
1101
1102 /* Get a MEM rtx for expression EXP which is the address of an operand
1103 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1104 the maximum length of the block of memory that might be accessed or
1105 NULL if unknown. */
1106
1107 static rtx
1108 get_memory_rtx (tree exp, tree len)
1109 {
1110 tree orig_exp = exp;
1111 rtx addr, mem;
1112 HOST_WIDE_INT off;
1113
1114 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1115 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1116 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1117 exp = TREE_OPERAND (exp, 0);
1118
1119 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1120 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1121
1122 /* Get an expression we can use to find the attributes to assign to MEM.
1123 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1124 we can. First remove any nops. */
1125 while (CONVERT_EXPR_P (exp)
1126 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1127 exp = TREE_OPERAND (exp, 0);
1128
1129 off = 0;
1130 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1131 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1132 && host_integerp (TREE_OPERAND (exp, 1), 0)
1133 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1134 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1135 else if (TREE_CODE (exp) == ADDR_EXPR)
1136 exp = TREE_OPERAND (exp, 0);
1137 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1138 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1139 else
1140 exp = NULL;
1141
1142 /* Honor attributes derived from exp, except for the alias set
1143 (as builtin stringops may alias with anything) and the size
1144 (as stringops may access multiple array elements). */
1145 if (exp)
1146 {
1147 set_mem_attributes (mem, exp, 0);
1148
1149 if (off)
1150 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1151
1152 /* Allow the string and memory builtins to overflow from one
1153 field into another, see http://gcc.gnu.org/PR23561.
1154 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1155 memory accessed by the string or memory builtin will fit
1156 within the field. */
1157 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1158 {
1159 tree mem_expr = MEM_EXPR (mem);
1160 HOST_WIDE_INT offset = -1, length = -1;
1161 tree inner = exp;
1162
1163 while (TREE_CODE (inner) == ARRAY_REF
1164 || CONVERT_EXPR_P (inner)
1165 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1166 || TREE_CODE (inner) == SAVE_EXPR)
1167 inner = TREE_OPERAND (inner, 0);
1168
1169 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1170
1171 if (MEM_OFFSET (mem)
1172 && CONST_INT_P (MEM_OFFSET (mem)))
1173 offset = INTVAL (MEM_OFFSET (mem));
1174
1175 if (offset >= 0 && len && host_integerp (len, 0))
1176 length = tree_low_cst (len, 0);
1177
1178 while (TREE_CODE (inner) == COMPONENT_REF)
1179 {
1180 tree field = TREE_OPERAND (inner, 1);
1181 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1182 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1183
1184 /* Bitfields are generally not byte-addressable. */
1185 gcc_assert (!DECL_BIT_FIELD (field)
1186 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1187 % BITS_PER_UNIT) == 0
1188 && host_integerp (DECL_SIZE (field), 0)
1189 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1190 % BITS_PER_UNIT) == 0));
1191
1192 /* If we can prove that the memory starting at XEXP (mem, 0) and
1193 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1194 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1195 fields without DECL_SIZE_UNIT like flexible array members. */
1196 if (length >= 0
1197 && DECL_SIZE_UNIT (field)
1198 && host_integerp (DECL_SIZE_UNIT (field), 0))
1199 {
1200 HOST_WIDE_INT size
1201 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1202 if (offset <= size
1203 && length <= size
1204 && offset + length <= size)
1205 break;
1206 }
1207
1208 if (offset >= 0
1209 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1210 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1211 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1212 / BITS_PER_UNIT;
1213 else
1214 {
1215 offset = -1;
1216 length = -1;
1217 }
1218
1219 mem_expr = TREE_OPERAND (mem_expr, 0);
1220 inner = TREE_OPERAND (inner, 0);
1221 }
1222
1223 if (mem_expr == NULL)
1224 offset = -1;
1225 if (mem_expr != MEM_EXPR (mem))
1226 {
1227 set_mem_expr (mem, mem_expr);
1228 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1229 }
1230 }
1231 set_mem_alias_set (mem, 0);
1232 set_mem_size (mem, NULL_RTX);
1233 }
1234
1235 return mem;
1236 }
1237 \f
1238 /* Built-in functions to perform an untyped call and return. */
1239
1240 /* For each register that may be used for calling a function, this
1241 gives a mode used to copy the register's value. VOIDmode indicates
1242 the register is not used for calling a function. If the machine
1243 has register windows, this gives only the outbound registers.
1244 INCOMING_REGNO gives the corresponding inbound register. */
1245 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1246
1247 /* For each register that may be used for returning values, this gives
1248 a mode used to copy the register's value. VOIDmode indicates the
1249 register is not used for returning values. If the machine has
1250 register windows, this gives only the outbound registers.
1251 INCOMING_REGNO gives the corresponding inbound register. */
1252 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1253
1254 /* For each register that may be used for calling a function, this
1255 gives the offset of that register into the block returned by
1256 __builtin_apply_args. 0 indicates that the register is not
1257 used for calling a function. */
1258 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1259
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1262
1263 static int
1264 apply_args_size (void)
1265 {
1266 static int size = -1;
1267 int align;
1268 unsigned int regno;
1269 enum machine_mode mode;
1270
1271 /* The values computed by this function never change. */
1272 if (size < 0)
1273 {
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1276
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1281
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1284 {
1285 mode = reg_raw_mode[regno];
1286
1287 gcc_assert (mode != VOIDmode);
1288
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 apply_args_reg_offset[regno] = size;
1293 size += GET_MODE_SIZE (mode);
1294 apply_args_mode[regno] = mode;
1295 }
1296 else
1297 {
1298 apply_args_mode[regno] = VOIDmode;
1299 apply_args_reg_offset[regno] = 0;
1300 }
1301 }
1302 return size;
1303 }
1304
1305 /* Return the size required for the block returned by __builtin_apply,
1306 and initialize apply_result_mode. */
1307
1308 static int
1309 apply_result_size (void)
1310 {
1311 static int size = -1;
1312 int align, regno;
1313 enum machine_mode mode;
1314
1315 /* The values computed by this function never change. */
1316 if (size < 0)
1317 {
1318 size = 0;
1319
1320 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1321 if (FUNCTION_VALUE_REGNO_P (regno))
1322 {
1323 mode = reg_raw_mode[regno];
1324
1325 gcc_assert (mode != VOIDmode);
1326
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 size += GET_MODE_SIZE (mode);
1331 apply_result_mode[regno] = mode;
1332 }
1333 else
1334 apply_result_mode[regno] = VOIDmode;
1335
1336 /* Allow targets that use untyped_call and untyped_return to override
1337 the size so that machine-specific information can be stored here. */
1338 #ifdef APPLY_RESULT_SIZE
1339 size = APPLY_RESULT_SIZE;
1340 #endif
1341 }
1342 return size;
1343 }
1344
1345 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1346 /* Create a vector describing the result block RESULT. If SAVEP is true,
1347 the result block is used to save the values; otherwise it is used to
1348 restore the values. */
1349
1350 static rtx
1351 result_vector (int savep, rtx result)
1352 {
1353 int regno, size, align, nelts;
1354 enum machine_mode mode;
1355 rtx reg, mem;
1356 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1357
1358 size = nelts = 0;
1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1360 if ((mode = apply_result_mode[regno]) != VOIDmode)
1361 {
1362 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1363 if (size % align != 0)
1364 size = CEIL (size, align) * align;
1365 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1366 mem = adjust_address (result, mode, size);
1367 savevec[nelts++] = (savep
1368 ? gen_rtx_SET (VOIDmode, mem, reg)
1369 : gen_rtx_SET (VOIDmode, reg, mem));
1370 size += GET_MODE_SIZE (mode);
1371 }
1372 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1373 }
1374 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1375
1376 /* Save the state required to perform an untyped call with the same
1377 arguments as were passed to the current function. */
1378
1379 static rtx
1380 expand_builtin_apply_args_1 (void)
1381 {
1382 rtx registers, tem;
1383 int size, align, regno;
1384 enum machine_mode mode;
1385 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1386
1387 /* Create a block where the arg-pointer, structure value address,
1388 and argument registers can be saved. */
1389 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1390
1391 /* Walk past the arg-pointer and structure value address. */
1392 size = GET_MODE_SIZE (Pmode);
1393 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1394 size += GET_MODE_SIZE (Pmode);
1395
1396 /* Save each register used in calling a function to the block. */
1397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1398 if ((mode = apply_args_mode[regno]) != VOIDmode)
1399 {
1400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1401 if (size % align != 0)
1402 size = CEIL (size, align) * align;
1403
1404 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1405
1406 emit_move_insn (adjust_address (registers, mode, size), tem);
1407 size += GET_MODE_SIZE (mode);
1408 }
1409
1410 /* Save the arg pointer to the block. */
1411 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1412 #ifdef STACK_GROWS_DOWNWARD
1413 /* We need the pointer as the caller actually passed them to us, not
1414 as we might have pretended they were passed. Make sure it's a valid
1415 operand, as emit_move_insn isn't expected to handle a PLUS. */
1416 tem
1417 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1418 NULL_RTX);
1419 #endif
1420 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1421
1422 size = GET_MODE_SIZE (Pmode);
1423
1424 /* Save the structure value address unless this is passed as an
1425 "invisible" first argument. */
1426 if (struct_incoming_value)
1427 {
1428 emit_move_insn (adjust_address (registers, Pmode, size),
1429 copy_to_reg (struct_incoming_value));
1430 size += GET_MODE_SIZE (Pmode);
1431 }
1432
1433 /* Return the address of the block. */
1434 return copy_addr_to_reg (XEXP (registers, 0));
1435 }
1436
1437 /* __builtin_apply_args returns block of memory allocated on
1438 the stack into which is stored the arg pointer, structure
1439 value address, static chain, and all the registers that might
1440 possibly be used in performing a function call. The code is
1441 moved to the start of the function so the incoming values are
1442 saved. */
1443
1444 static rtx
1445 expand_builtin_apply_args (void)
1446 {
1447 /* Don't do __builtin_apply_args more than once in a function.
1448 Save the result of the first call and reuse it. */
1449 if (apply_args_value != 0)
1450 return apply_args_value;
1451 {
1452 /* When this function is called, it means that registers must be
1453 saved on entry to this function. So we migrate the
1454 call to the first insn of this function. */
1455 rtx temp;
1456 rtx seq;
1457
1458 start_sequence ();
1459 temp = expand_builtin_apply_args_1 ();
1460 seq = get_insns ();
1461 end_sequence ();
1462
1463 apply_args_value = temp;
1464
1465 /* Put the insns after the NOTE that starts the function.
1466 If this is inside a start_sequence, make the outer-level insn
1467 chain current, so the code is placed at the start of the
1468 function. If internal_arg_pointer is a non-virtual pseudo,
1469 it needs to be placed after the function that initializes
1470 that pseudo. */
1471 push_topmost_sequence ();
1472 if (REG_P (crtl->args.internal_arg_pointer)
1473 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1474 emit_insn_before (seq, parm_birth_insn);
1475 else
1476 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1477 pop_topmost_sequence ();
1478 return temp;
1479 }
1480 }
1481
1482 /* Perform an untyped call and save the state required to perform an
1483 untyped return of whatever value was returned by the given function. */
1484
1485 static rtx
1486 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1487 {
1488 int size, align, regno;
1489 enum machine_mode mode;
1490 rtx incoming_args, result, reg, dest, src, call_insn;
1491 rtx old_stack_level = 0;
1492 rtx call_fusage = 0;
1493 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1494
1495 arguments = convert_memory_address (Pmode, arguments);
1496
1497 /* Create a block where the return registers can be saved. */
1498 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1499
1500 /* Fetch the arg pointer from the ARGUMENTS block. */
1501 incoming_args = gen_reg_rtx (Pmode);
1502 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1503 #ifndef STACK_GROWS_DOWNWARD
1504 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1505 incoming_args, 0, OPTAB_LIB_WIDEN);
1506 #endif
1507
1508 /* Push a new argument block and copy the arguments. Do not allow
1509 the (potential) memcpy call below to interfere with our stack
1510 manipulations. */
1511 do_pending_stack_adjust ();
1512 NO_DEFER_POP;
1513
1514 /* Save the stack with nonlocal if available. */
1515 #ifdef HAVE_save_stack_nonlocal
1516 if (HAVE_save_stack_nonlocal)
1517 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1518 else
1519 #endif
1520 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1521
1522 /* Allocate a block of memory onto the stack and copy the memory
1523 arguments to the outgoing arguments address. */
1524 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1525
1526 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1527 may have already set current_function_calls_alloca to true.
1528 current_function_calls_alloca won't be set if argsize is zero,
1529 so we have to guarantee need_drap is true here. */
1530 if (SUPPORTS_STACK_ALIGNMENT)
1531 crtl->need_drap = true;
1532
1533 dest = virtual_outgoing_args_rtx;
1534 #ifndef STACK_GROWS_DOWNWARD
1535 if (CONST_INT_P (argsize))
1536 dest = plus_constant (dest, -INTVAL (argsize));
1537 else
1538 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1539 #endif
1540 dest = gen_rtx_MEM (BLKmode, dest);
1541 set_mem_align (dest, PARM_BOUNDARY);
1542 src = gen_rtx_MEM (BLKmode, incoming_args);
1543 set_mem_align (src, PARM_BOUNDARY);
1544 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1545
1546 /* Refer to the argument block. */
1547 apply_args_size ();
1548 arguments = gen_rtx_MEM (BLKmode, arguments);
1549 set_mem_align (arguments, PARM_BOUNDARY);
1550
1551 /* Walk past the arg-pointer and structure value address. */
1552 size = GET_MODE_SIZE (Pmode);
1553 if (struct_value)
1554 size += GET_MODE_SIZE (Pmode);
1555
1556 /* Restore each of the registers previously saved. Make USE insns
1557 for each of these registers for use in making the call. */
1558 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1559 if ((mode = apply_args_mode[regno]) != VOIDmode)
1560 {
1561 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1562 if (size % align != 0)
1563 size = CEIL (size, align) * align;
1564 reg = gen_rtx_REG (mode, regno);
1565 emit_move_insn (reg, adjust_address (arguments, mode, size));
1566 use_reg (&call_fusage, reg);
1567 size += GET_MODE_SIZE (mode);
1568 }
1569
1570 /* Restore the structure value address unless this is passed as an
1571 "invisible" first argument. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1574 {
1575 rtx value = gen_reg_rtx (Pmode);
1576 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1577 emit_move_insn (struct_value, value);
1578 if (REG_P (struct_value))
1579 use_reg (&call_fusage, struct_value);
1580 size += GET_MODE_SIZE (Pmode);
1581 }
1582
1583 /* All arguments and registers used for the call are set up by now! */
1584 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1585
1586 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1587 and we don't want to load it into a register as an optimization,
1588 because prepare_call_address already did it if it should be done. */
1589 if (GET_CODE (function) != SYMBOL_REF)
1590 function = memory_address (FUNCTION_MODE, function);
1591
1592 /* Generate the actual call instruction and save the return value. */
1593 #ifdef HAVE_untyped_call
1594 if (HAVE_untyped_call)
1595 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1596 result, result_vector (1, result)));
1597 else
1598 #endif
1599 #ifdef HAVE_call_value
1600 if (HAVE_call_value)
1601 {
1602 rtx valreg = 0;
1603
1604 /* Locate the unique return register. It is not possible to
1605 express a call that sets more than one return register using
1606 call_value; use untyped_call for that. In fact, untyped_call
1607 only needs to save the return registers in the given block. */
1608 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1609 if ((mode = apply_result_mode[regno]) != VOIDmode)
1610 {
1611 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1612
1613 valreg = gen_rtx_REG (mode, regno);
1614 }
1615
1616 emit_call_insn (GEN_CALL_VALUE (valreg,
1617 gen_rtx_MEM (FUNCTION_MODE, function),
1618 const0_rtx, NULL_RTX, const0_rtx));
1619
1620 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1621 }
1622 else
1623 #endif
1624 gcc_unreachable ();
1625
1626 /* Find the CALL insn we just emitted, and attach the register usage
1627 information. */
1628 call_insn = last_call_insn ();
1629 add_function_usage_to (call_insn, call_fusage);
1630
1631 /* Restore the stack. */
1632 #ifdef HAVE_save_stack_nonlocal
1633 if (HAVE_save_stack_nonlocal)
1634 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1635 else
1636 #endif
1637 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1638
1639 OK_DEFER_POP;
1640
1641 /* Return the address of the result block. */
1642 result = copy_addr_to_reg (XEXP (result, 0));
1643 return convert_memory_address (ptr_mode, result);
1644 }
1645
1646 /* Perform an untyped return. */
1647
1648 static void
1649 expand_builtin_return (rtx result)
1650 {
1651 int size, align, regno;
1652 enum machine_mode mode;
1653 rtx reg;
1654 rtx call_fusage = 0;
1655
1656 result = convert_memory_address (Pmode, result);
1657
1658 apply_result_size ();
1659 result = gen_rtx_MEM (BLKmode, result);
1660
1661 #ifdef HAVE_untyped_return
1662 if (HAVE_untyped_return)
1663 {
1664 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1665 emit_barrier ();
1666 return;
1667 }
1668 #endif
1669
1670 /* Restore the return value and note that each value is used. */
1671 size = 0;
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1674 {
1675 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1676 if (size % align != 0)
1677 size = CEIL (size, align) * align;
1678 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1679 emit_move_insn (reg, adjust_address (result, mode, size));
1680
1681 push_to_sequence (call_fusage);
1682 emit_use (reg);
1683 call_fusage = get_insns ();
1684 end_sequence ();
1685 size += GET_MODE_SIZE (mode);
1686 }
1687
1688 /* Put the USE insns before the return. */
1689 emit_insn (call_fusage);
1690
1691 /* Return whatever values was restored by jumping directly to the end
1692 of the function. */
1693 expand_naked_return ();
1694 }
1695
1696 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1697
1698 static enum type_class
1699 type_to_class (tree type)
1700 {
1701 switch (TREE_CODE (type))
1702 {
1703 case VOID_TYPE: return void_type_class;
1704 case INTEGER_TYPE: return integer_type_class;
1705 case ENUMERAL_TYPE: return enumeral_type_class;
1706 case BOOLEAN_TYPE: return boolean_type_class;
1707 case POINTER_TYPE: return pointer_type_class;
1708 case REFERENCE_TYPE: return reference_type_class;
1709 case OFFSET_TYPE: return offset_type_class;
1710 case REAL_TYPE: return real_type_class;
1711 case COMPLEX_TYPE: return complex_type_class;
1712 case FUNCTION_TYPE: return function_type_class;
1713 case METHOD_TYPE: return method_type_class;
1714 case RECORD_TYPE: return record_type_class;
1715 case UNION_TYPE:
1716 case QUAL_UNION_TYPE: return union_type_class;
1717 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1718 ? string_type_class : array_type_class);
1719 case LANG_TYPE: return lang_type_class;
1720 default: return no_type_class;
1721 }
1722 }
1723
1724 /* Expand a call EXP to __builtin_classify_type. */
1725
1726 static rtx
1727 expand_builtin_classify_type (tree exp)
1728 {
1729 if (call_expr_nargs (exp))
1730 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1731 return GEN_INT (no_type_class);
1732 }
1733
1734 /* This helper macro, meant to be used in mathfn_built_in below,
1735 determines which among a set of three builtin math functions is
1736 appropriate for a given type mode. The `F' and `L' cases are
1737 automatically generated from the `double' case. */
1738 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1739 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1740 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1741 fcodel = BUILT_IN_MATHFN##L ; break;
1742 /* Similar to above, but appends _R after any F/L suffix. */
1743 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1744 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1745 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1746 fcodel = BUILT_IN_MATHFN##L_R ; break;
1747
1748 /* Return mathematic function equivalent to FN but operating directly
1749 on TYPE, if available. If IMPLICIT is true find the function in
1750 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1751 can't do the conversion, return zero. */
1752
1753 static tree
1754 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1755 {
1756 tree const *const fn_arr
1757 = implicit ? implicit_built_in_decls : built_in_decls;
1758 enum built_in_function fcode, fcodef, fcodel;
1759
1760 switch (fn)
1761 {
1762 CASE_MATHFN (BUILT_IN_ACOS)
1763 CASE_MATHFN (BUILT_IN_ACOSH)
1764 CASE_MATHFN (BUILT_IN_ASIN)
1765 CASE_MATHFN (BUILT_IN_ASINH)
1766 CASE_MATHFN (BUILT_IN_ATAN)
1767 CASE_MATHFN (BUILT_IN_ATAN2)
1768 CASE_MATHFN (BUILT_IN_ATANH)
1769 CASE_MATHFN (BUILT_IN_CBRT)
1770 CASE_MATHFN (BUILT_IN_CEIL)
1771 CASE_MATHFN (BUILT_IN_CEXPI)
1772 CASE_MATHFN (BUILT_IN_COPYSIGN)
1773 CASE_MATHFN (BUILT_IN_COS)
1774 CASE_MATHFN (BUILT_IN_COSH)
1775 CASE_MATHFN (BUILT_IN_DREM)
1776 CASE_MATHFN (BUILT_IN_ERF)
1777 CASE_MATHFN (BUILT_IN_ERFC)
1778 CASE_MATHFN (BUILT_IN_EXP)
1779 CASE_MATHFN (BUILT_IN_EXP10)
1780 CASE_MATHFN (BUILT_IN_EXP2)
1781 CASE_MATHFN (BUILT_IN_EXPM1)
1782 CASE_MATHFN (BUILT_IN_FABS)
1783 CASE_MATHFN (BUILT_IN_FDIM)
1784 CASE_MATHFN (BUILT_IN_FLOOR)
1785 CASE_MATHFN (BUILT_IN_FMA)
1786 CASE_MATHFN (BUILT_IN_FMAX)
1787 CASE_MATHFN (BUILT_IN_FMIN)
1788 CASE_MATHFN (BUILT_IN_FMOD)
1789 CASE_MATHFN (BUILT_IN_FREXP)
1790 CASE_MATHFN (BUILT_IN_GAMMA)
1791 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1792 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1793 CASE_MATHFN (BUILT_IN_HYPOT)
1794 CASE_MATHFN (BUILT_IN_ILOGB)
1795 CASE_MATHFN (BUILT_IN_INF)
1796 CASE_MATHFN (BUILT_IN_ISINF)
1797 CASE_MATHFN (BUILT_IN_J0)
1798 CASE_MATHFN (BUILT_IN_J1)
1799 CASE_MATHFN (BUILT_IN_JN)
1800 CASE_MATHFN (BUILT_IN_LCEIL)
1801 CASE_MATHFN (BUILT_IN_LDEXP)
1802 CASE_MATHFN (BUILT_IN_LFLOOR)
1803 CASE_MATHFN (BUILT_IN_LGAMMA)
1804 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1805 CASE_MATHFN (BUILT_IN_LLCEIL)
1806 CASE_MATHFN (BUILT_IN_LLFLOOR)
1807 CASE_MATHFN (BUILT_IN_LLRINT)
1808 CASE_MATHFN (BUILT_IN_LLROUND)
1809 CASE_MATHFN (BUILT_IN_LOG)
1810 CASE_MATHFN (BUILT_IN_LOG10)
1811 CASE_MATHFN (BUILT_IN_LOG1P)
1812 CASE_MATHFN (BUILT_IN_LOG2)
1813 CASE_MATHFN (BUILT_IN_LOGB)
1814 CASE_MATHFN (BUILT_IN_LRINT)
1815 CASE_MATHFN (BUILT_IN_LROUND)
1816 CASE_MATHFN (BUILT_IN_MODF)
1817 CASE_MATHFN (BUILT_IN_NAN)
1818 CASE_MATHFN (BUILT_IN_NANS)
1819 CASE_MATHFN (BUILT_IN_NEARBYINT)
1820 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1821 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1822 CASE_MATHFN (BUILT_IN_POW)
1823 CASE_MATHFN (BUILT_IN_POWI)
1824 CASE_MATHFN (BUILT_IN_POW10)
1825 CASE_MATHFN (BUILT_IN_REMAINDER)
1826 CASE_MATHFN (BUILT_IN_REMQUO)
1827 CASE_MATHFN (BUILT_IN_RINT)
1828 CASE_MATHFN (BUILT_IN_ROUND)
1829 CASE_MATHFN (BUILT_IN_SCALB)
1830 CASE_MATHFN (BUILT_IN_SCALBLN)
1831 CASE_MATHFN (BUILT_IN_SCALBN)
1832 CASE_MATHFN (BUILT_IN_SIGNBIT)
1833 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1834 CASE_MATHFN (BUILT_IN_SIN)
1835 CASE_MATHFN (BUILT_IN_SINCOS)
1836 CASE_MATHFN (BUILT_IN_SINH)
1837 CASE_MATHFN (BUILT_IN_SQRT)
1838 CASE_MATHFN (BUILT_IN_TAN)
1839 CASE_MATHFN (BUILT_IN_TANH)
1840 CASE_MATHFN (BUILT_IN_TGAMMA)
1841 CASE_MATHFN (BUILT_IN_TRUNC)
1842 CASE_MATHFN (BUILT_IN_Y0)
1843 CASE_MATHFN (BUILT_IN_Y1)
1844 CASE_MATHFN (BUILT_IN_YN)
1845
1846 default:
1847 return NULL_TREE;
1848 }
1849
1850 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1851 return fn_arr[fcode];
1852 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1853 return fn_arr[fcodef];
1854 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1855 return fn_arr[fcodel];
1856 else
1857 return NULL_TREE;
1858 }
1859
1860 /* Like mathfn_built_in_1(), but always use the implicit array. */
1861
1862 tree
1863 mathfn_built_in (tree type, enum built_in_function fn)
1864 {
1865 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1866 }
1867
1868 /* If errno must be maintained, expand the RTL to check if the result,
1869 TARGET, of a built-in function call, EXP, is NaN, and if so set
1870 errno to EDOM. */
1871
1872 static void
1873 expand_errno_check (tree exp, rtx target)
1874 {
1875 rtx lab = gen_label_rtx ();
1876
1877 /* Test the result; if it is NaN, set errno=EDOM because
1878 the argument was not in the domain. */
1879 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1880 0, lab);
1881
1882 #ifdef TARGET_EDOM
1883 /* If this built-in doesn't throw an exception, set errno directly. */
1884 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1885 {
1886 #ifdef GEN_ERRNO_RTX
1887 rtx errno_rtx = GEN_ERRNO_RTX;
1888 #else
1889 rtx errno_rtx
1890 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1891 #endif
1892 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1893 emit_label (lab);
1894 return;
1895 }
1896 #endif
1897
1898 /* Make sure the library call isn't expanded as a tail call. */
1899 CALL_EXPR_TAILCALL (exp) = 0;
1900
1901 /* We can't set errno=EDOM directly; let the library call do it.
1902 Pop the arguments right away in case the call gets deleted. */
1903 NO_DEFER_POP;
1904 expand_call (exp, target, 0);
1905 OK_DEFER_POP;
1906 emit_label (lab);
1907 }
1908
1909 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1910 Return NULL_RTX if a normal call should be emitted rather than expanding
1911 the function in-line. EXP is the expression that is a call to the builtin
1912 function; if convenient, the result should be placed in TARGET.
1913 SUBTARGET may be used as the target for computing one of EXP's operands. */
1914
1915 static rtx
1916 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1917 {
1918 optab builtin_optab;
1919 rtx op0, insns, before_call;
1920 tree fndecl = get_callee_fndecl (exp);
1921 enum machine_mode mode;
1922 bool errno_set = false;
1923 tree arg;
1924
1925 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1926 return NULL_RTX;
1927
1928 arg = CALL_EXPR_ARG (exp, 0);
1929
1930 switch (DECL_FUNCTION_CODE (fndecl))
1931 {
1932 CASE_FLT_FN (BUILT_IN_SQRT):
1933 errno_set = ! tree_expr_nonnegative_p (arg);
1934 builtin_optab = sqrt_optab;
1935 break;
1936 CASE_FLT_FN (BUILT_IN_EXP):
1937 errno_set = true; builtin_optab = exp_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP10):
1939 CASE_FLT_FN (BUILT_IN_POW10):
1940 errno_set = true; builtin_optab = exp10_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXP2):
1942 errno_set = true; builtin_optab = exp2_optab; break;
1943 CASE_FLT_FN (BUILT_IN_EXPM1):
1944 errno_set = true; builtin_optab = expm1_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOGB):
1946 errno_set = true; builtin_optab = logb_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG):
1948 errno_set = true; builtin_optab = log_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG10):
1950 errno_set = true; builtin_optab = log10_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG2):
1952 errno_set = true; builtin_optab = log2_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG1P):
1954 errno_set = true; builtin_optab = log1p_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ASIN):
1956 builtin_optab = asin_optab; break;
1957 CASE_FLT_FN (BUILT_IN_ACOS):
1958 builtin_optab = acos_optab; break;
1959 CASE_FLT_FN (BUILT_IN_TAN):
1960 builtin_optab = tan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ATAN):
1962 builtin_optab = atan_optab; break;
1963 CASE_FLT_FN (BUILT_IN_FLOOR):
1964 builtin_optab = floor_optab; break;
1965 CASE_FLT_FN (BUILT_IN_CEIL):
1966 builtin_optab = ceil_optab; break;
1967 CASE_FLT_FN (BUILT_IN_TRUNC):
1968 builtin_optab = btrunc_optab; break;
1969 CASE_FLT_FN (BUILT_IN_ROUND):
1970 builtin_optab = round_optab; break;
1971 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1972 builtin_optab = nearbyint_optab;
1973 if (flag_trapping_math)
1974 break;
1975 /* Else fallthrough and expand as rint. */
1976 CASE_FLT_FN (BUILT_IN_RINT):
1977 builtin_optab = rint_optab; break;
1978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1979 builtin_optab = significand_optab; break;
1980 default:
1981 gcc_unreachable ();
1982 }
1983
1984 /* Make a suitable register to place result in. */
1985 mode = TYPE_MODE (TREE_TYPE (exp));
1986
1987 if (! flag_errno_math || ! HONOR_NANS (mode))
1988 errno_set = false;
1989
1990 /* Before working hard, check whether the instruction is available. */
1991 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1992 {
1993 target = gen_reg_rtx (mode);
1994
1995 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1996 need to expand the argument again. This way, we will not perform
1997 side-effects more the once. */
1998 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1999
2000 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2001
2002 start_sequence ();
2003
2004 /* Compute into TARGET.
2005 Set TARGET to wherever the result comes back. */
2006 target = expand_unop (mode, builtin_optab, op0, target, 0);
2007
2008 if (target != 0)
2009 {
2010 if (errno_set)
2011 expand_errno_check (exp, target);
2012
2013 /* Output the entire sequence. */
2014 insns = get_insns ();
2015 end_sequence ();
2016 emit_insn (insns);
2017 return target;
2018 }
2019
2020 /* If we were unable to expand via the builtin, stop the sequence
2021 (without outputting the insns) and call to the library function
2022 with the stabilized argument list. */
2023 end_sequence ();
2024 }
2025
2026 before_call = get_last_insn ();
2027
2028 return expand_call (exp, target, target == const0_rtx);
2029 }
2030
2031 /* Expand a call to the builtin binary math functions (pow and atan2).
2032 Return NULL_RTX if a normal call should be emitted rather than expanding the
2033 function in-line. EXP is the expression that is a call to the builtin
2034 function; if convenient, the result should be placed in TARGET.
2035 SUBTARGET may be used as the target for computing one of EXP's
2036 operands. */
2037
2038 static rtx
2039 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2040 {
2041 optab builtin_optab;
2042 rtx op0, op1, insns;
2043 int op1_type = REAL_TYPE;
2044 tree fndecl = get_callee_fndecl (exp);
2045 tree arg0, arg1;
2046 enum machine_mode mode;
2047 bool errno_set = true;
2048
2049 switch (DECL_FUNCTION_CODE (fndecl))
2050 {
2051 CASE_FLT_FN (BUILT_IN_SCALBN):
2052 CASE_FLT_FN (BUILT_IN_SCALBLN):
2053 CASE_FLT_FN (BUILT_IN_LDEXP):
2054 op1_type = INTEGER_TYPE;
2055 default:
2056 break;
2057 }
2058
2059 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2060 return NULL_RTX;
2061
2062 arg0 = CALL_EXPR_ARG (exp, 0);
2063 arg1 = CALL_EXPR_ARG (exp, 1);
2064
2065 switch (DECL_FUNCTION_CODE (fndecl))
2066 {
2067 CASE_FLT_FN (BUILT_IN_POW):
2068 builtin_optab = pow_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ATAN2):
2070 builtin_optab = atan2_optab; break;
2071 CASE_FLT_FN (BUILT_IN_SCALB):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2073 return 0;
2074 builtin_optab = scalb_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALBN):
2076 CASE_FLT_FN (BUILT_IN_SCALBLN):
2077 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 return 0;
2079 /* Fall through... */
2080 CASE_FLT_FN (BUILT_IN_LDEXP):
2081 builtin_optab = ldexp_optab; break;
2082 CASE_FLT_FN (BUILT_IN_FMOD):
2083 builtin_optab = fmod_optab; break;
2084 CASE_FLT_FN (BUILT_IN_REMAINDER):
2085 CASE_FLT_FN (BUILT_IN_DREM):
2086 builtin_optab = remainder_optab; break;
2087 default:
2088 gcc_unreachable ();
2089 }
2090
2091 /* Make a suitable register to place result in. */
2092 mode = TYPE_MODE (TREE_TYPE (exp));
2093
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2096 return NULL_RTX;
2097
2098 target = gen_reg_rtx (mode);
2099
2100 if (! flag_errno_math || ! HONOR_NANS (mode))
2101 errno_set = false;
2102
2103 /* Always stabilize the argument list. */
2104 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2105 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2106
2107 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2108 op1 = expand_normal (arg1);
2109
2110 start_sequence ();
2111
2112 /* Compute into TARGET.
2113 Set TARGET to wherever the result comes back. */
2114 target = expand_binop (mode, builtin_optab, op0, op1,
2115 target, 0, OPTAB_DIRECT);
2116
2117 /* If we were unable to expand via the builtin, stop the sequence
2118 (without outputting the insns) and call to the library function
2119 with the stabilized argument list. */
2120 if (target == 0)
2121 {
2122 end_sequence ();
2123 return expand_call (exp, target, target == const0_rtx);
2124 }
2125
2126 if (errno_set)
2127 expand_errno_check (exp, target);
2128
2129 /* Output the entire sequence. */
2130 insns = get_insns ();
2131 end_sequence ();
2132 emit_insn (insns);
2133
2134 return target;
2135 }
2136
2137 /* Expand a call to the builtin sin and cos math functions.
2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2142 operands. */
2143
2144 static rtx
2145 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2146 {
2147 optab builtin_optab;
2148 rtx op0, insns;
2149 tree fndecl = get_callee_fndecl (exp);
2150 enum machine_mode mode;
2151 tree arg;
2152
2153 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2154 return NULL_RTX;
2155
2156 arg = CALL_EXPR_ARG (exp, 0);
2157
2158 switch (DECL_FUNCTION_CODE (fndecl))
2159 {
2160 CASE_FLT_FN (BUILT_IN_SIN):
2161 CASE_FLT_FN (BUILT_IN_COS):
2162 builtin_optab = sincos_optab; break;
2163 default:
2164 gcc_unreachable ();
2165 }
2166
2167 /* Make a suitable register to place result in. */
2168 mode = TYPE_MODE (TREE_TYPE (exp));
2169
2170 /* Check if sincos insn is available, otherwise fallback
2171 to sin or cos insn. */
2172 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2173 switch (DECL_FUNCTION_CODE (fndecl))
2174 {
2175 CASE_FLT_FN (BUILT_IN_SIN):
2176 builtin_optab = sin_optab; break;
2177 CASE_FLT_FN (BUILT_IN_COS):
2178 builtin_optab = cos_optab; break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182
2183 /* Before working hard, check whether the instruction is available. */
2184 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2185 {
2186 target = gen_reg_rtx (mode);
2187
2188 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2189 need to expand the argument again. This way, we will not perform
2190 side-effects more the once. */
2191 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2192
2193 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2194
2195 start_sequence ();
2196
2197 /* Compute into TARGET.
2198 Set TARGET to wherever the result comes back. */
2199 if (builtin_optab == sincos_optab)
2200 {
2201 int result;
2202
2203 switch (DECL_FUNCTION_CODE (fndecl))
2204 {
2205 CASE_FLT_FN (BUILT_IN_SIN):
2206 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2207 break;
2208 CASE_FLT_FN (BUILT_IN_COS):
2209 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2210 break;
2211 default:
2212 gcc_unreachable ();
2213 }
2214 gcc_assert (result);
2215 }
2216 else
2217 {
2218 target = expand_unop (mode, builtin_optab, op0, target, 0);
2219 }
2220
2221 if (target != 0)
2222 {
2223 /* Output the entire sequence. */
2224 insns = get_insns ();
2225 end_sequence ();
2226 emit_insn (insns);
2227 return target;
2228 }
2229
2230 /* If we were unable to expand via the builtin, stop the sequence
2231 (without outputting the insns) and call to the library function
2232 with the stabilized argument list. */
2233 end_sequence ();
2234 }
2235
2236 target = expand_call (exp, target, target == const0_rtx);
2237
2238 return target;
2239 }
2240
2241 /* Expand a call to one of the builtin math functions that operate on
2242 floating point argument and output an integer result (ilogb, isinf,
2243 isnan, etc).
2244 Return 0 if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function; if convenient, the result should be placed in TARGET.
2247 SUBTARGET may be used as the target for computing one of EXP's operands. */
2248
2249 static rtx
2250 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2251 {
2252 optab builtin_optab = 0;
2253 enum insn_code icode = CODE_FOR_nothing;
2254 rtx op0;
2255 tree fndecl = get_callee_fndecl (exp);
2256 enum machine_mode mode;
2257 bool errno_set = false;
2258 tree arg;
2259
2260 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2261 return NULL_RTX;
2262
2263 arg = CALL_EXPR_ARG (exp, 0);
2264
2265 switch (DECL_FUNCTION_CODE (fndecl))
2266 {
2267 CASE_FLT_FN (BUILT_IN_ILOGB):
2268 errno_set = true; builtin_optab = ilogb_optab; break;
2269 CASE_FLT_FN (BUILT_IN_ISINF):
2270 builtin_optab = isinf_optab; break;
2271 case BUILT_IN_ISNORMAL:
2272 case BUILT_IN_ISFINITE:
2273 CASE_FLT_FN (BUILT_IN_FINITE):
2274 /* These builtins have no optabs (yet). */
2275 break;
2276 default:
2277 gcc_unreachable ();
2278 }
2279
2280 /* There's no easy way to detect the case we need to set EDOM. */
2281 if (flag_errno_math && errno_set)
2282 return NULL_RTX;
2283
2284 /* Optab mode depends on the mode of the input argument. */
2285 mode = TYPE_MODE (TREE_TYPE (arg));
2286
2287 if (builtin_optab)
2288 icode = optab_handler (builtin_optab, mode)->insn_code;
2289
2290 /* Before working hard, check whether the instruction is available. */
2291 if (icode != CODE_FOR_nothing)
2292 {
2293 /* Make a suitable register to place result in. */
2294 if (!target
2295 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2296 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2297
2298 gcc_assert (insn_data[icode].operand[0].predicate
2299 (target, GET_MODE (target)));
2300
2301 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2302 need to expand the argument again. This way, we will not perform
2303 side-effects more the once. */
2304 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2305
2306 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2307
2308 if (mode != GET_MODE (op0))
2309 op0 = convert_to_mode (mode, op0, 0);
2310
2311 /* Compute into TARGET.
2312 Set TARGET to wherever the result comes back. */
2313 emit_unop_insn (icode, target, op0, UNKNOWN);
2314 return target;
2315 }
2316
2317 /* If there is no optab, try generic code. */
2318 switch (DECL_FUNCTION_CODE (fndecl))
2319 {
2320 tree result;
2321
2322 CASE_FLT_FN (BUILT_IN_ISINF):
2323 {
2324 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2325 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2326 tree const type = TREE_TYPE (arg);
2327 REAL_VALUE_TYPE r;
2328 char buf[128];
2329
2330 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2331 real_from_string (&r, buf);
2332 result = build_call_expr (isgr_fn, 2,
2333 fold_build1 (ABS_EXPR, type, arg),
2334 build_real (type, r));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2336 }
2337 CASE_FLT_FN (BUILT_IN_FINITE):
2338 case BUILT_IN_ISFINITE:
2339 {
2340 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2341 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2342 tree const type = TREE_TYPE (arg);
2343 REAL_VALUE_TYPE r;
2344 char buf[128];
2345
2346 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2347 real_from_string (&r, buf);
2348 result = build_call_expr (isle_fn, 2,
2349 fold_build1 (ABS_EXPR, type, arg),
2350 build_real (type, r));
2351 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2352 }
2353 case BUILT_IN_ISNORMAL:
2354 {
2355 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2356 islessequal(fabs(x),DBL_MAX). */
2357 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2358 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2359 tree const type = TREE_TYPE (arg);
2360 REAL_VALUE_TYPE rmax, rmin;
2361 char buf[128];
2362
2363 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2364 real_from_string (&rmax, buf);
2365 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2366 real_from_string (&rmin, buf);
2367 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2368 result = build_call_expr (isle_fn, 2, arg,
2369 build_real (type, rmax));
2370 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2371 build_call_expr (isge_fn, 2, arg,
2372 build_real (type, rmin)));
2373 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2374 }
2375 default:
2376 break;
2377 }
2378
2379 target = expand_call (exp, target, target == const0_rtx);
2380
2381 return target;
2382 }
2383
2384 /* Expand a call to the builtin sincos math function.
2385 Return NULL_RTX if a normal call should be emitted rather than expanding the
2386 function in-line. EXP is the expression that is a call to the builtin
2387 function. */
2388
2389 static rtx
2390 expand_builtin_sincos (tree exp)
2391 {
2392 rtx op0, op1, op2, target1, target2;
2393 enum machine_mode mode;
2394 tree arg, sinp, cosp;
2395 int result;
2396
2397 if (!validate_arglist (exp, REAL_TYPE,
2398 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2399 return NULL_RTX;
2400
2401 arg = CALL_EXPR_ARG (exp, 0);
2402 sinp = CALL_EXPR_ARG (exp, 1);
2403 cosp = CALL_EXPR_ARG (exp, 2);
2404
2405 /* Make a suitable register to place result in. */
2406 mode = TYPE_MODE (TREE_TYPE (arg));
2407
2408 /* Check if sincos insn is available, otherwise emit the call. */
2409 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2410 return NULL_RTX;
2411
2412 target1 = gen_reg_rtx (mode);
2413 target2 = gen_reg_rtx (mode);
2414
2415 op0 = expand_normal (arg);
2416 op1 = expand_normal (build_fold_indirect_ref (sinp));
2417 op2 = expand_normal (build_fold_indirect_ref (cosp));
2418
2419 /* Compute into target1 and target2.
2420 Set TARGET to wherever the result comes back. */
2421 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2422 gcc_assert (result);
2423
2424 /* Move target1 and target2 to the memory locations indicated
2425 by op1 and op2. */
2426 emit_move_insn (op1, target1);
2427 emit_move_insn (op2, target2);
2428
2429 return const0_rtx;
2430 }
2431
2432 /* Expand a call to the internal cexpi builtin to the sincos math function.
2433 EXP is the expression that is a call to the builtin function; if convenient,
2434 the result should be placed in TARGET. SUBTARGET may be used as the target
2435 for computing one of EXP's operands. */
2436
2437 static rtx
2438 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2439 {
2440 tree fndecl = get_callee_fndecl (exp);
2441 tree arg, type;
2442 enum machine_mode mode;
2443 rtx op0, op1, op2;
2444
2445 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2446 return NULL_RTX;
2447
2448 arg = CALL_EXPR_ARG (exp, 0);
2449 type = TREE_TYPE (arg);
2450 mode = TYPE_MODE (TREE_TYPE (arg));
2451
2452 /* Try expanding via a sincos optab, fall back to emitting a libcall
2453 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2454 is only generated from sincos, cexp or if we have either of them. */
2455 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2456 {
2457 op1 = gen_reg_rtx (mode);
2458 op2 = gen_reg_rtx (mode);
2459
2460 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2461
2462 /* Compute into op1 and op2. */
2463 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2464 }
2465 else if (TARGET_HAS_SINCOS)
2466 {
2467 tree call, fn = NULL_TREE;
2468 tree top1, top2;
2469 rtx op1a, op2a;
2470
2471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2472 fn = built_in_decls[BUILT_IN_SINCOSF];
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2474 fn = built_in_decls[BUILT_IN_SINCOS];
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2476 fn = built_in_decls[BUILT_IN_SINCOSL];
2477 else
2478 gcc_unreachable ();
2479
2480 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2481 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2482 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2483 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2484 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2485 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2486
2487 /* Make sure not to fold the sincos call again. */
2488 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2489 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2490 call, 3, arg, top1, top2));
2491 }
2492 else
2493 {
2494 tree call, fn = NULL_TREE, narg;
2495 tree ctype = build_complex_type (type);
2496
2497 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2498 fn = built_in_decls[BUILT_IN_CEXPF];
2499 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2500 fn = built_in_decls[BUILT_IN_CEXP];
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2502 fn = built_in_decls[BUILT_IN_CEXPL];
2503 else
2504 gcc_unreachable ();
2505
2506 /* If we don't have a decl for cexp create one. This is the
2507 friendliest fallback if the user calls __builtin_cexpi
2508 without full target C99 function support. */
2509 if (fn == NULL_TREE)
2510 {
2511 tree fntype;
2512 const char *name = NULL;
2513
2514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2515 name = "cexpf";
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2517 name = "cexp";
2518 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2519 name = "cexpl";
2520
2521 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2522 fn = build_fn_decl (name, fntype);
2523 }
2524
2525 narg = fold_build2 (COMPLEX_EXPR, ctype,
2526 build_real (type, dconst0), arg);
2527
2528 /* Make sure not to fold the cexp call again. */
2529 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2530 return expand_expr (build_call_nary (ctype, call, 1, narg),
2531 target, VOIDmode, EXPAND_NORMAL);
2532 }
2533
2534 /* Now build the proper return type. */
2535 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2536 make_tree (TREE_TYPE (arg), op2),
2537 make_tree (TREE_TYPE (arg), op1)),
2538 target, VOIDmode, EXPAND_NORMAL);
2539 }
2540
2541 /* Expand a call to one of the builtin rounding functions gcc defines
2542 as an extension (lfloor and lceil). As these are gcc extensions we
2543 do not need to worry about setting errno to EDOM.
2544 If expanding via optab fails, lower expression to (int)(floor(x)).
2545 EXP is the expression that is a call to the builtin function;
2546 if convenient, the result should be placed in TARGET. */
2547
2548 static rtx
2549 expand_builtin_int_roundingfn (tree exp, rtx target)
2550 {
2551 convert_optab builtin_optab;
2552 rtx op0, insns, tmp;
2553 tree fndecl = get_callee_fndecl (exp);
2554 enum built_in_function fallback_fn;
2555 tree fallback_fndecl;
2556 enum machine_mode mode;
2557 tree arg;
2558
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 gcc_unreachable ();
2561
2562 arg = CALL_EXPR_ARG (exp, 0);
2563
2564 switch (DECL_FUNCTION_CODE (fndecl))
2565 {
2566 CASE_FLT_FN (BUILT_IN_LCEIL):
2567 CASE_FLT_FN (BUILT_IN_LLCEIL):
2568 builtin_optab = lceil_optab;
2569 fallback_fn = BUILT_IN_CEIL;
2570 break;
2571
2572 CASE_FLT_FN (BUILT_IN_LFLOOR):
2573 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2574 builtin_optab = lfloor_optab;
2575 fallback_fn = BUILT_IN_FLOOR;
2576 break;
2577
2578 default:
2579 gcc_unreachable ();
2580 }
2581
2582 /* Make a suitable register to place result in. */
2583 mode = TYPE_MODE (TREE_TYPE (exp));
2584
2585 target = gen_reg_rtx (mode);
2586
2587 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2588 need to expand the argument again. This way, we will not perform
2589 side-effects more the once. */
2590 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2591
2592 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2593
2594 start_sequence ();
2595
2596 /* Compute into TARGET. */
2597 if (expand_sfix_optab (target, op0, builtin_optab))
2598 {
2599 /* Output the entire sequence. */
2600 insns = get_insns ();
2601 end_sequence ();
2602 emit_insn (insns);
2603 return target;
2604 }
2605
2606 /* If we were unable to expand via the builtin, stop the sequence
2607 (without outputting the insns). */
2608 end_sequence ();
2609
2610 /* Fall back to floating point rounding optab. */
2611 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2612
2613 /* For non-C99 targets we may end up without a fallback fndecl here
2614 if the user called __builtin_lfloor directly. In this case emit
2615 a call to the floor/ceil variants nevertheless. This should result
2616 in the best user experience for not full C99 targets. */
2617 if (fallback_fndecl == NULL_TREE)
2618 {
2619 tree fntype;
2620 const char *name = NULL;
2621
2622 switch (DECL_FUNCTION_CODE (fndecl))
2623 {
2624 case BUILT_IN_LCEIL:
2625 case BUILT_IN_LLCEIL:
2626 name = "ceil";
2627 break;
2628 case BUILT_IN_LCEILF:
2629 case BUILT_IN_LLCEILF:
2630 name = "ceilf";
2631 break;
2632 case BUILT_IN_LCEILL:
2633 case BUILT_IN_LLCEILL:
2634 name = "ceill";
2635 break;
2636 case BUILT_IN_LFLOOR:
2637 case BUILT_IN_LLFLOOR:
2638 name = "floor";
2639 break;
2640 case BUILT_IN_LFLOORF:
2641 case BUILT_IN_LLFLOORF:
2642 name = "floorf";
2643 break;
2644 case BUILT_IN_LFLOORL:
2645 case BUILT_IN_LLFLOORL:
2646 name = "floorl";
2647 break;
2648 default:
2649 gcc_unreachable ();
2650 }
2651
2652 fntype = build_function_type_list (TREE_TYPE (arg),
2653 TREE_TYPE (arg), NULL_TREE);
2654 fallback_fndecl = build_fn_decl (name, fntype);
2655 }
2656
2657 exp = build_call_expr (fallback_fndecl, 1, arg);
2658
2659 tmp = expand_normal (exp);
2660
2661 /* Truncate the result of floating point optab to integer
2662 via expand_fix (). */
2663 target = gen_reg_rtx (mode);
2664 expand_fix (target, tmp, 0);
2665
2666 return target;
2667 }
2668
2669 /* Expand a call to one of the builtin math functions doing integer
2670 conversion (lrint).
2671 Return 0 if a normal call should be emitted rather than expanding the
2672 function in-line. EXP is the expression that is a call to the builtin
2673 function; if convenient, the result should be placed in TARGET. */
2674
2675 static rtx
2676 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2677 {
2678 convert_optab builtin_optab;
2679 rtx op0, insns;
2680 tree fndecl = get_callee_fndecl (exp);
2681 tree arg;
2682 enum machine_mode mode;
2683
2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (flag_errno_math)
2686 return NULL_RTX;
2687
2688 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2689 gcc_unreachable ();
2690
2691 arg = CALL_EXPR_ARG (exp, 0);
2692
2693 switch (DECL_FUNCTION_CODE (fndecl))
2694 {
2695 CASE_FLT_FN (BUILT_IN_LRINT):
2696 CASE_FLT_FN (BUILT_IN_LLRINT):
2697 builtin_optab = lrint_optab; break;
2698 CASE_FLT_FN (BUILT_IN_LROUND):
2699 CASE_FLT_FN (BUILT_IN_LLROUND):
2700 builtin_optab = lround_optab; break;
2701 default:
2702 gcc_unreachable ();
2703 }
2704
2705 /* Make a suitable register to place result in. */
2706 mode = TYPE_MODE (TREE_TYPE (exp));
2707
2708 target = gen_reg_rtx (mode);
2709
2710 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2711 need to expand the argument again. This way, we will not perform
2712 side-effects more the once. */
2713 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2714
2715 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2716
2717 start_sequence ();
2718
2719 if (expand_sfix_optab (target, op0, builtin_optab))
2720 {
2721 /* Output the entire sequence. */
2722 insns = get_insns ();
2723 end_sequence ();
2724 emit_insn (insns);
2725 return target;
2726 }
2727
2728 /* If we were unable to expand via the builtin, stop the sequence
2729 (without outputting the insns) and call to the library function
2730 with the stabilized argument list. */
2731 end_sequence ();
2732
2733 target = expand_call (exp, target, target == const0_rtx);
2734
2735 return target;
2736 }
2737
2738 /* To evaluate powi(x,n), the floating point value x raised to the
2739 constant integer exponent n, we use a hybrid algorithm that
2740 combines the "window method" with look-up tables. For an
2741 introduction to exponentiation algorithms and "addition chains",
2742 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2743 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2744 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2745 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2746
2747 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2748 multiplications to inline before calling the system library's pow
2749 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2750 so this default never requires calling pow, powf or powl. */
2751
2752 #ifndef POWI_MAX_MULTS
2753 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2754 #endif
2755
2756 /* The size of the "optimal power tree" lookup table. All
2757 exponents less than this value are simply looked up in the
2758 powi_table below. This threshold is also used to size the
2759 cache of pseudo registers that hold intermediate results. */
2760 #define POWI_TABLE_SIZE 256
2761
2762 /* The size, in bits of the window, used in the "window method"
2763 exponentiation algorithm. This is equivalent to a radix of
2764 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2765 #define POWI_WINDOW_SIZE 3
2766
2767 /* The following table is an efficient representation of an
2768 "optimal power tree". For each value, i, the corresponding
2769 value, j, in the table states than an optimal evaluation
2770 sequence for calculating pow(x,i) can be found by evaluating
2771 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2772 100 integers is given in Knuth's "Seminumerical algorithms". */
2773
2774 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2775 {
2776 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2777 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2778 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2779 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2780 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2781 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2782 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2783 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2784 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2785 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2786 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2787 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2788 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2789 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2790 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2791 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2792 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2793 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2794 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2795 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2796 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2797 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2798 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2799 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2800 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2801 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2802 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2803 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2804 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2805 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2806 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2807 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2808 };
2809
2810
2811 /* Return the number of multiplications required to calculate
2812 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2813 subroutine of powi_cost. CACHE is an array indicating
2814 which exponents have already been calculated. */
2815
2816 static int
2817 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2818 {
2819 /* If we've already calculated this exponent, then this evaluation
2820 doesn't require any additional multiplications. */
2821 if (cache[n])
2822 return 0;
2823
2824 cache[n] = true;
2825 return powi_lookup_cost (n - powi_table[n], cache)
2826 + powi_lookup_cost (powi_table[n], cache) + 1;
2827 }
2828
2829 /* Return the number of multiplications required to calculate
2830 powi(x,n) for an arbitrary x, given the exponent N. This
2831 function needs to be kept in sync with expand_powi below. */
2832
2833 static int
2834 powi_cost (HOST_WIDE_INT n)
2835 {
2836 bool cache[POWI_TABLE_SIZE];
2837 unsigned HOST_WIDE_INT digit;
2838 unsigned HOST_WIDE_INT val;
2839 int result;
2840
2841 if (n == 0)
2842 return 0;
2843
2844 /* Ignore the reciprocal when calculating the cost. */
2845 val = (n < 0) ? -n : n;
2846
2847 /* Initialize the exponent cache. */
2848 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2849 cache[1] = true;
2850
2851 result = 0;
2852
2853 while (val >= POWI_TABLE_SIZE)
2854 {
2855 if (val & 1)
2856 {
2857 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2858 result += powi_lookup_cost (digit, cache)
2859 + POWI_WINDOW_SIZE + 1;
2860 val >>= POWI_WINDOW_SIZE;
2861 }
2862 else
2863 {
2864 val >>= 1;
2865 result++;
2866 }
2867 }
2868
2869 return result + powi_lookup_cost (val, cache);
2870 }
2871
2872 /* Recursive subroutine of expand_powi. This function takes the array,
2873 CACHE, of already calculated exponents and an exponent N and returns
2874 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2875
2876 static rtx
2877 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2878 {
2879 unsigned HOST_WIDE_INT digit;
2880 rtx target, result;
2881 rtx op0, op1;
2882
2883 if (n < POWI_TABLE_SIZE)
2884 {
2885 if (cache[n])
2886 return cache[n];
2887
2888 target = gen_reg_rtx (mode);
2889 cache[n] = target;
2890
2891 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2892 op1 = expand_powi_1 (mode, powi_table[n], cache);
2893 }
2894 else if (n & 1)
2895 {
2896 target = gen_reg_rtx (mode);
2897 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2898 op0 = expand_powi_1 (mode, n - digit, cache);
2899 op1 = expand_powi_1 (mode, digit, cache);
2900 }
2901 else
2902 {
2903 target = gen_reg_rtx (mode);
2904 op0 = expand_powi_1 (mode, n >> 1, cache);
2905 op1 = op0;
2906 }
2907
2908 result = expand_mult (mode, op0, op1, target, 0);
2909 if (result != target)
2910 emit_move_insn (target, result);
2911 return target;
2912 }
2913
2914 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2915 floating point operand in mode MODE, and N is the exponent. This
2916 function needs to be kept in sync with powi_cost above. */
2917
2918 static rtx
2919 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2920 {
2921 unsigned HOST_WIDE_INT val;
2922 rtx cache[POWI_TABLE_SIZE];
2923 rtx result;
2924
2925 if (n == 0)
2926 return CONST1_RTX (mode);
2927
2928 val = (n < 0) ? -n : n;
2929
2930 memset (cache, 0, sizeof (cache));
2931 cache[1] = x;
2932
2933 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2934
2935 /* If the original exponent was negative, reciprocate the result. */
2936 if (n < 0)
2937 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2938 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2939
2940 return result;
2941 }
2942
2943 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2944 a normal call should be emitted rather than expanding the function
2945 in-line. EXP is the expression that is a call to the builtin
2946 function; if convenient, the result should be placed in TARGET. */
2947
2948 static rtx
2949 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2950 {
2951 tree arg0, arg1;
2952 tree fn, narg0;
2953 tree type = TREE_TYPE (exp);
2954 REAL_VALUE_TYPE cint, c, c2;
2955 HOST_WIDE_INT n;
2956 rtx op, op2;
2957 enum machine_mode mode = TYPE_MODE (type);
2958
2959 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2961
2962 arg0 = CALL_EXPR_ARG (exp, 0);
2963 arg1 = CALL_EXPR_ARG (exp, 1);
2964
2965 if (TREE_CODE (arg1) != REAL_CST
2966 || TREE_OVERFLOW (arg1))
2967 return expand_builtin_mathfn_2 (exp, target, subtarget);
2968
2969 /* Handle constant exponents. */
2970
2971 /* For integer valued exponents we can expand to an optimal multiplication
2972 sequence using expand_powi. */
2973 c = TREE_REAL_CST (arg1);
2974 n = real_to_integer (&c);
2975 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2976 if (real_identical (&c, &cint)
2977 && ((n >= -1 && n <= 2)
2978 || (flag_unsafe_math_optimizations
2979 && optimize_insn_for_speed_p ()
2980 && powi_cost (n) <= POWI_MAX_MULTS)))
2981 {
2982 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 if (n != 1)
2984 {
2985 op = force_reg (mode, op);
2986 op = expand_powi (op, mode, n);
2987 }
2988 return op;
2989 }
2990
2991 narg0 = builtin_save_expr (arg0);
2992
2993 /* If the exponent is not integer valued, check if it is half of an integer.
2994 In this case we can expand to sqrt (x) * x**(n/2). */
2995 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2996 if (fn != NULL_TREE)
2997 {
2998 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2999 n = real_to_integer (&c2);
3000 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3001 if (real_identical (&c2, &cint)
3002 && ((flag_unsafe_math_optimizations
3003 && optimize_insn_for_speed_p ()
3004 && powi_cost (n/2) <= POWI_MAX_MULTS)
3005 || n == 1))
3006 {
3007 tree call_expr = build_call_expr (fn, 1, narg0);
3008 /* Use expand_expr in case the newly built call expression
3009 was folded to a non-call. */
3010 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3011 if (n != 1)
3012 {
3013 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3014 op2 = force_reg (mode, op2);
3015 op2 = expand_powi (op2, mode, abs (n / 2));
3016 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3017 0, OPTAB_LIB_WIDEN);
3018 /* If the original exponent was negative, reciprocate the
3019 result. */
3020 if (n < 0)
3021 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3022 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3023 }
3024 return op;
3025 }
3026 }
3027
3028 /* Try if the exponent is a third of an integer. In this case
3029 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3030 different from pow (x, 1./3.) due to rounding and behavior
3031 with negative x we need to constrain this transformation to
3032 unsafe math and positive x or finite math. */
3033 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3034 if (fn != NULL_TREE
3035 && flag_unsafe_math_optimizations
3036 && (tree_expr_nonnegative_p (arg0)
3037 || !HONOR_NANS (mode)))
3038 {
3039 REAL_VALUE_TYPE dconst3;
3040 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3041 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3042 real_round (&c2, mode, &c2);
3043 n = real_to_integer (&c2);
3044 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3045 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3046 real_convert (&c2, mode, &c2);
3047 if (real_identical (&c2, &c)
3048 && ((optimize_insn_for_speed_p ()
3049 && powi_cost (n/3) <= POWI_MAX_MULTS)
3050 || n == 1))
3051 {
3052 tree call_expr = build_call_expr (fn, 1,narg0);
3053 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3054 if (abs (n) % 3 == 2)
3055 op = expand_simple_binop (mode, MULT, op, op, op,
3056 0, OPTAB_LIB_WIDEN);
3057 if (n != 1)
3058 {
3059 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3060 op2 = force_reg (mode, op2);
3061 op2 = expand_powi (op2, mode, abs (n / 3));
3062 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3063 0, OPTAB_LIB_WIDEN);
3064 /* If the original exponent was negative, reciprocate the
3065 result. */
3066 if (n < 0)
3067 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3068 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3069 }
3070 return op;
3071 }
3072 }
3073
3074 /* Fall back to optab expansion. */
3075 return expand_builtin_mathfn_2 (exp, target, subtarget);
3076 }
3077
3078 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3079 a normal call should be emitted rather than expanding the function
3080 in-line. EXP is the expression that is a call to the builtin
3081 function; if convenient, the result should be placed in TARGET. */
3082
3083 static rtx
3084 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3085 {
3086 tree arg0, arg1;
3087 rtx op0, op1;
3088 enum machine_mode mode;
3089 enum machine_mode mode2;
3090
3091 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3092 return NULL_RTX;
3093
3094 arg0 = CALL_EXPR_ARG (exp, 0);
3095 arg1 = CALL_EXPR_ARG (exp, 1);
3096 mode = TYPE_MODE (TREE_TYPE (exp));
3097
3098 /* Handle constant power. */
3099
3100 if (TREE_CODE (arg1) == INTEGER_CST
3101 && !TREE_OVERFLOW (arg1))
3102 {
3103 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3104
3105 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3106 Otherwise, check the number of multiplications required. */
3107 if ((TREE_INT_CST_HIGH (arg1) == 0
3108 || TREE_INT_CST_HIGH (arg1) == -1)
3109 && ((n >= -1 && n <= 2)
3110 || (optimize_insn_for_speed_p ()
3111 && powi_cost (n) <= POWI_MAX_MULTS)))
3112 {
3113 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3114 op0 = force_reg (mode, op0);
3115 return expand_powi (op0, mode, n);
3116 }
3117 }
3118
3119 /* Emit a libcall to libgcc. */
3120
3121 /* Mode of the 2nd argument must match that of an int. */
3122 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3123
3124 if (target == NULL_RTX)
3125 target = gen_reg_rtx (mode);
3126
3127 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3128 if (GET_MODE (op0) != mode)
3129 op0 = convert_to_mode (mode, op0, 0);
3130 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3131 if (GET_MODE (op1) != mode2)
3132 op1 = convert_to_mode (mode2, op1, 0);
3133
3134 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3135 target, LCT_CONST, mode, 2,
3136 op0, mode, op1, mode2);
3137
3138 return target;
3139 }
3140
3141 /* Expand expression EXP which is a call to the strlen builtin. Return
3142 NULL_RTX if we failed the caller should emit a normal call, otherwise
3143 try to get the result in TARGET, if convenient. */
3144
3145 static rtx
3146 expand_builtin_strlen (tree exp, rtx target,
3147 enum machine_mode target_mode)
3148 {
3149 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3150 return NULL_RTX;
3151 else
3152 {
3153 rtx pat;
3154 tree len;
3155 tree src = CALL_EXPR_ARG (exp, 0);
3156 rtx result, src_reg, char_rtx, before_strlen;
3157 enum machine_mode insn_mode = target_mode, char_mode;
3158 enum insn_code icode = CODE_FOR_nothing;
3159 int align;
3160
3161 /* If the length can be computed at compile-time, return it. */
3162 len = c_strlen (src, 0);
3163 if (len)
3164 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3165
3166 /* If the length can be computed at compile-time and is constant
3167 integer, but there are side-effects in src, evaluate
3168 src for side-effects, then return len.
3169 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3170 can be optimized into: i++; x = 3; */
3171 len = c_strlen (src, 1);
3172 if (len && TREE_CODE (len) == INTEGER_CST)
3173 {
3174 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3175 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3176 }
3177
3178 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3179
3180 /* If SRC is not a pointer type, don't do this operation inline. */
3181 if (align == 0)
3182 return NULL_RTX;
3183
3184 /* Bail out if we can't compute strlen in the right mode. */
3185 while (insn_mode != VOIDmode)
3186 {
3187 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3188 if (icode != CODE_FOR_nothing)
3189 break;
3190
3191 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3192 }
3193 if (insn_mode == VOIDmode)
3194 return NULL_RTX;
3195
3196 /* Make a place to write the result of the instruction. */
3197 result = target;
3198 if (! (result != 0
3199 && REG_P (result)
3200 && GET_MODE (result) == insn_mode
3201 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3202 result = gen_reg_rtx (insn_mode);
3203
3204 /* Make a place to hold the source address. We will not expand
3205 the actual source until we are sure that the expansion will
3206 not fail -- there are trees that cannot be expanded twice. */
3207 src_reg = gen_reg_rtx (Pmode);
3208
3209 /* Mark the beginning of the strlen sequence so we can emit the
3210 source operand later. */
3211 before_strlen = get_last_insn ();
3212
3213 char_rtx = const0_rtx;
3214 char_mode = insn_data[(int) icode].operand[2].mode;
3215 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3216 char_mode))
3217 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3218
3219 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3220 char_rtx, GEN_INT (align));
3221 if (! pat)
3222 return NULL_RTX;
3223 emit_insn (pat);
3224
3225 /* Now that we are assured of success, expand the source. */
3226 start_sequence ();
3227 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3228 if (pat != src_reg)
3229 emit_move_insn (src_reg, pat);
3230 pat = get_insns ();
3231 end_sequence ();
3232
3233 if (before_strlen)
3234 emit_insn_after (pat, before_strlen);
3235 else
3236 emit_insn_before (pat, get_insns ());
3237
3238 /* Return the value in the proper mode for this function. */
3239 if (GET_MODE (result) == target_mode)
3240 target = result;
3241 else if (target != 0)
3242 convert_move (target, result, 0);
3243 else
3244 target = convert_to_mode (target_mode, result, 0);
3245
3246 return target;
3247 }
3248 }
3249
3250 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3251 caller should emit a normal call, otherwise try to get the result
3252 in TARGET, if convenient (and in mode MODE if that's convenient). */
3253
3254 static rtx
3255 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3256 {
3257 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3258 {
3259 tree type = TREE_TYPE (exp);
3260 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3261 CALL_EXPR_ARG (exp, 1), type);
3262 if (result)
3263 return expand_expr (result, target, mode, EXPAND_NORMAL);
3264 }
3265 return NULL_RTX;
3266 }
3267
3268 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3269 caller should emit a normal call, otherwise try to get the result
3270 in TARGET, if convenient (and in mode MODE if that's convenient). */
3271
3272 static rtx
3273 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3274 {
3275 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3276 {
3277 tree type = TREE_TYPE (exp);
3278 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3279 CALL_EXPR_ARG (exp, 1), type);
3280 if (result)
3281 return expand_expr (result, target, mode, EXPAND_NORMAL);
3282
3283 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3284 }
3285 return NULL_RTX;
3286 }
3287
3288 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3289 caller should emit a normal call, otherwise try to get the result
3290 in TARGET, if convenient (and in mode MODE if that's convenient). */
3291
3292 static rtx
3293 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3294 {
3295 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3296 {
3297 tree type = TREE_TYPE (exp);
3298 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3299 CALL_EXPR_ARG (exp, 1), type);
3300 if (result)
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 }
3303 return NULL_RTX;
3304 }
3305
3306 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3307 caller should emit a normal call, otherwise try to get the result
3308 in TARGET, if convenient (and in mode MODE if that's convenient). */
3309
3310 static rtx
3311 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3312 {
3313 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3314 {
3315 tree type = TREE_TYPE (exp);
3316 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3317 CALL_EXPR_ARG (exp, 1), type);
3318 if (result)
3319 return expand_expr (result, target, mode, EXPAND_NORMAL);
3320 }
3321 return NULL_RTX;
3322 }
3323
3324 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3325 bytes from constant string DATA + OFFSET and return it as target
3326 constant. */
3327
3328 static rtx
3329 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3330 enum machine_mode mode)
3331 {
3332 const char *str = (const char *) data;
3333
3334 gcc_assert (offset >= 0
3335 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3336 <= strlen (str) + 1));
3337
3338 return c_readstr (str + offset, mode);
3339 }
3340
3341 /* Expand a call EXP to the memcpy builtin.
3342 Return NULL_RTX if we failed, the caller should emit a normal call,
3343 otherwise try to get the result in TARGET, if convenient (and in
3344 mode MODE if that's convenient). */
3345
3346 static rtx
3347 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3348 {
3349 tree fndecl = get_callee_fndecl (exp);
3350
3351 if (!validate_arglist (exp,
3352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3353 return NULL_RTX;
3354 else
3355 {
3356 tree dest = CALL_EXPR_ARG (exp, 0);
3357 tree src = CALL_EXPR_ARG (exp, 1);
3358 tree len = CALL_EXPR_ARG (exp, 2);
3359 const char *src_str;
3360 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3361 unsigned int dest_align
3362 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3363 rtx dest_mem, src_mem, dest_addr, len_rtx;
3364 tree result = fold_builtin_memory_op (dest, src, len,
3365 TREE_TYPE (TREE_TYPE (fndecl)),
3366 false, /*endp=*/0);
3367 HOST_WIDE_INT expected_size = -1;
3368 unsigned int expected_align = 0;
3369 tree_ann_common_t ann;
3370
3371 if (result)
3372 {
3373 while (TREE_CODE (result) == COMPOUND_EXPR)
3374 {
3375 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3376 EXPAND_NORMAL);
3377 result = TREE_OPERAND (result, 1);
3378 }
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 }
3381
3382 /* If DEST is not a pointer type, call the normal function. */
3383 if (dest_align == 0)
3384 return NULL_RTX;
3385
3386 /* If either SRC is not a pointer type, don't do this
3387 operation in-line. */
3388 if (src_align == 0)
3389 return NULL_RTX;
3390
3391 ann = tree_common_ann (exp);
3392 if (ann)
3393 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3394
3395 if (expected_align < dest_align)
3396 expected_align = dest_align;
3397 dest_mem = get_memory_rtx (dest, len);
3398 set_mem_align (dest_mem, dest_align);
3399 len_rtx = expand_normal (len);
3400 src_str = c_getstr (src);
3401
3402 /* If SRC is a string constant and block move would be done
3403 by pieces, we can avoid loading the string from memory
3404 and only stored the computed constants. */
3405 if (src_str
3406 && CONST_INT_P (len_rtx)
3407 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3408 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3409 CONST_CAST (char *, src_str),
3410 dest_align, false))
3411 {
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, 0);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3419 }
3420
3421 src_mem = get_memory_rtx (src, len);
3422 set_mem_align (src_mem, src_align);
3423
3424 /* Copy word part most expediently. */
3425 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3426 CALL_EXPR_TAILCALL (exp)
3427 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3428 expected_align, expected_size);
3429
3430 if (dest_addr == 0)
3431 {
3432 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3434 }
3435 return dest_addr;
3436 }
3437 }
3438
3439 /* Expand a call EXP to the mempcpy builtin.
3440 Return NULL_RTX if we failed; the caller should emit a normal call,
3441 otherwise try to get the result in TARGET, if convenient (and in
3442 mode MODE if that's convenient). If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3446
3447 static rtx
3448 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3449 {
3450 if (!validate_arglist (exp,
3451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3452 return NULL_RTX;
3453 else
3454 {
3455 tree dest = CALL_EXPR_ARG (exp, 0);
3456 tree src = CALL_EXPR_ARG (exp, 1);
3457 tree len = CALL_EXPR_ARG (exp, 2);
3458 return expand_builtin_mempcpy_args (dest, src, len,
3459 TREE_TYPE (exp),
3460 target, mode, /*endp=*/ 1);
3461 }
3462 }
3463
3464 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3465 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3466 so that this can also be called without constructing an actual CALL_EXPR.
3467 TYPE is the return type of the call. The other arguments and return value
3468 are the same as for expand_builtin_mempcpy. */
3469
3470 static rtx
3471 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3472 rtx target, enum machine_mode mode, int endp)
3473 {
3474 /* If return value is ignored, transform mempcpy into memcpy. */
3475 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3476 {
3477 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3478 tree result = build_call_expr (fn, 3, dest, src, len);
3479
3480 while (TREE_CODE (result) == COMPOUND_EXPR)
3481 {
3482 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3483 EXPAND_NORMAL);
3484 result = TREE_OPERAND (result, 1);
3485 }
3486 return expand_expr (result, target, mode, EXPAND_NORMAL);
3487 }
3488 else
3489 {
3490 const char *src_str;
3491 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3492 unsigned int dest_align
3493 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3494 rtx dest_mem, src_mem, len_rtx;
3495 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3496
3497 if (result)
3498 {
3499 while (TREE_CODE (result) == COMPOUND_EXPR)
3500 {
3501 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3502 EXPAND_NORMAL);
3503 result = TREE_OPERAND (result, 1);
3504 }
3505 return expand_expr (result, target, mode, EXPAND_NORMAL);
3506 }
3507
3508 /* If either SRC or DEST is not a pointer type, don't do this
3509 operation in-line. */
3510 if (dest_align == 0 || src_align == 0)
3511 return NULL_RTX;
3512
3513 /* If LEN is not constant, call the normal function. */
3514 if (! host_integerp (len, 1))
3515 return NULL_RTX;
3516
3517 len_rtx = expand_normal (len);
3518 src_str = c_getstr (src);
3519
3520 /* If SRC is a string constant and block move would be done
3521 by pieces, we can avoid loading the string from memory
3522 and only stored the computed constants. */
3523 if (src_str
3524 && CONST_INT_P (len_rtx)
3525 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3526 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3527 CONST_CAST (char *, src_str),
3528 dest_align, false))
3529 {
3530 dest_mem = get_memory_rtx (dest, len);
3531 set_mem_align (dest_mem, dest_align);
3532 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3533 builtin_memcpy_read_str,
3534 CONST_CAST (char *, src_str),
3535 dest_align, false, endp);
3536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3538 return dest_mem;
3539 }
3540
3541 if (CONST_INT_P (len_rtx)
3542 && can_move_by_pieces (INTVAL (len_rtx),
3543 MIN (dest_align, src_align)))
3544 {
3545 dest_mem = get_memory_rtx (dest, len);
3546 set_mem_align (dest_mem, dest_align);
3547 src_mem = get_memory_rtx (src, len);
3548 set_mem_align (src_mem, src_align);
3549 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3550 MIN (dest_align, src_align), endp);
3551 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3552 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3553 return dest_mem;
3554 }
3555
3556 return NULL_RTX;
3557 }
3558 }
3559
3560 /* Expand expression EXP, which is a call to the memmove builtin. Return
3561 NULL_RTX if we failed; the caller should emit a normal call. */
3562
3563 static rtx
3564 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3565 {
3566 if (!validate_arglist (exp,
3567 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3569 else
3570 {
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 1);
3573 tree len = CALL_EXPR_ARG (exp, 2);
3574 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3575 target, mode, ignore);
3576 }
3577 }
3578
3579 /* Helper function to do the actual work for expand_builtin_memmove. The
3580 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3581 so that this can also be called without constructing an actual CALL_EXPR.
3582 TYPE is the return type of the call. The other arguments and return value
3583 are the same as for expand_builtin_memmove. */
3584
3585 static rtx
3586 expand_builtin_memmove_args (tree dest, tree src, tree len,
3587 tree type, rtx target, enum machine_mode mode,
3588 int ignore)
3589 {
3590 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3591
3592 if (result)
3593 {
3594 STRIP_TYPE_NOPS (result);
3595 while (TREE_CODE (result) == COMPOUND_EXPR)
3596 {
3597 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3598 EXPAND_NORMAL);
3599 result = TREE_OPERAND (result, 1);
3600 }
3601 return expand_expr (result, target, mode, EXPAND_NORMAL);
3602 }
3603
3604 /* Otherwise, call the normal function. */
3605 return NULL_RTX;
3606 }
3607
3608 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3609 NULL_RTX if we failed the caller should emit a normal call. */
3610
3611 static rtx
3612 expand_builtin_bcopy (tree exp, int ignore)
3613 {
3614 tree type = TREE_TYPE (exp);
3615 tree src, dest, size;
3616
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3620
3621 src = CALL_EXPR_ARG (exp, 0);
3622 dest = CALL_EXPR_ARG (exp, 1);
3623 size = CALL_EXPR_ARG (exp, 2);
3624
3625 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3626 This is done this way so that if it isn't expanded inline, we fall
3627 back to calling bcopy instead of memmove. */
3628 return expand_builtin_memmove_args (dest, src,
3629 fold_convert (sizetype, size),
3630 type, const0_rtx, VOIDmode,
3631 ignore);
3632 }
3633
3634 #ifndef HAVE_movstr
3635 # define HAVE_movstr 0
3636 # define CODE_FOR_movstr CODE_FOR_nothing
3637 #endif
3638
3639 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3640 we failed, the caller should emit a normal call, otherwise try to
3641 get the result in TARGET, if convenient. If ENDP is 0 return the
3642 destination pointer, if ENDP is 1 return the end pointer ala
3643 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3644 stpcpy. */
3645
3646 static rtx
3647 expand_movstr (tree dest, tree src, rtx target, int endp)
3648 {
3649 rtx end;
3650 rtx dest_mem;
3651 rtx src_mem;
3652 rtx insn;
3653 const struct insn_data * data;
3654
3655 if (!HAVE_movstr)
3656 return NULL_RTX;
3657
3658 dest_mem = get_memory_rtx (dest, NULL);
3659 src_mem = get_memory_rtx (src, NULL);
3660 if (!endp)
3661 {
3662 target = force_reg (Pmode, XEXP (dest_mem, 0));
3663 dest_mem = replace_equiv_address (dest_mem, target);
3664 end = gen_reg_rtx (Pmode);
3665 }
3666 else
3667 {
3668 if (target == 0 || target == const0_rtx)
3669 {
3670 end = gen_reg_rtx (Pmode);
3671 if (target == 0)
3672 target = end;
3673 }
3674 else
3675 end = target;
3676 }
3677
3678 data = insn_data + CODE_FOR_movstr;
3679
3680 if (data->operand[0].mode != VOIDmode)
3681 end = gen_lowpart (data->operand[0].mode, end);
3682
3683 insn = data->genfun (end, dest_mem, src_mem);
3684
3685 gcc_assert (insn);
3686
3687 emit_insn (insn);
3688
3689 /* movstr is supposed to set end to the address of the NUL
3690 terminator. If the caller requested a mempcpy-like return value,
3691 adjust it. */
3692 if (endp == 1 && target != const0_rtx)
3693 {
3694 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3695 emit_move_insn (target, force_operand (tem, NULL_RTX));
3696 }
3697
3698 return target;
3699 }
3700
3701 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3705
3706 static rtx
3707 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3708 {
3709 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3710 {
3711 tree dest = CALL_EXPR_ARG (exp, 0);
3712 tree src = CALL_EXPR_ARG (exp, 1);
3713 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3714 }
3715 return NULL_RTX;
3716 }
3717
3718 /* Helper function to do the actual work for expand_builtin_strcpy. The
3719 arguments to the builtin_strcpy call DEST and SRC are broken out
3720 so that this can also be called without constructing an actual CALL_EXPR.
3721 The other arguments and return value are the same as for
3722 expand_builtin_strcpy. */
3723
3724 static rtx
3725 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3726 rtx target, enum machine_mode mode)
3727 {
3728 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3729 if (result)
3730 return expand_expr (result, target, mode, EXPAND_NORMAL);
3731 return expand_movstr (dest, src, target, /*endp=*/0);
3732
3733 }
3734
3735 /* Expand a call EXP to the stpcpy builtin.
3736 Return NULL_RTX if we failed the caller should emit a normal call,
3737 otherwise try to get the result in TARGET, if convenient (and in
3738 mode MODE if that's convenient). */
3739
3740 static rtx
3741 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3742 {
3743 tree dst, src;
3744
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3746 return NULL_RTX;
3747
3748 dst = CALL_EXPR_ARG (exp, 0);
3749 src = CALL_EXPR_ARG (exp, 1);
3750
3751 /* If return value is ignored, transform stpcpy into strcpy. */
3752 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3753 {
3754 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3755 tree result = build_call_expr (fn, 2, dst, src);
3756
3757 STRIP_NOPS (result);
3758 while (TREE_CODE (result) == COMPOUND_EXPR)
3759 {
3760 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3761 EXPAND_NORMAL);
3762 result = TREE_OPERAND (result, 1);
3763 }
3764 return expand_expr (result, target, mode, EXPAND_NORMAL);
3765 }
3766 else
3767 {
3768 tree len, lenp1;
3769 rtx ret;
3770
3771 /* Ensure we get an actual string whose length can be evaluated at
3772 compile-time, not an expression containing a string. This is
3773 because the latter will potentially produce pessimized code
3774 when used to produce the return value. */
3775 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3777
3778 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3779 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3780 target, mode, /*endp=*/2);
3781
3782 if (ret)
3783 return ret;
3784
3785 if (TREE_CODE (len) == INTEGER_CST)
3786 {
3787 rtx len_rtx = expand_normal (len);
3788
3789 if (CONST_INT_P (len_rtx))
3790 {
3791 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3792 dst, src, target, mode);
3793
3794 if (ret)
3795 {
3796 if (! target)
3797 {
3798 if (mode != VOIDmode)
3799 target = gen_reg_rtx (mode);
3800 else
3801 target = gen_reg_rtx (GET_MODE (ret));
3802 }
3803 if (GET_MODE (target) != GET_MODE (ret))
3804 ret = gen_lowpart (GET_MODE (target), ret);
3805
3806 ret = plus_constant (ret, INTVAL (len_rtx));
3807 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3808 gcc_assert (ret);
3809
3810 return target;
3811 }
3812 }
3813 }
3814
3815 return expand_movstr (dst, src, target, /*endp=*/2);
3816 }
3817 }
3818
3819 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3820 bytes from constant string DATA + OFFSET and return it as target
3821 constant. */
3822
3823 rtx
3824 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3825 enum machine_mode mode)
3826 {
3827 const char *str = (const char *) data;
3828
3829 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3830 return const0_rtx;
3831
3832 return c_readstr (str + offset, mode);
3833 }
3834
3835 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3836 NULL_RTX if we failed the caller should emit a normal call. */
3837
3838 static rtx
3839 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3840 {
3841 tree fndecl = get_callee_fndecl (exp);
3842
3843 if (validate_arglist (exp,
3844 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3845 {
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree len = CALL_EXPR_ARG (exp, 2);
3849 tree slen = c_strlen (src, 1);
3850 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3851
3852 if (result)
3853 {
3854 while (TREE_CODE (result) == COMPOUND_EXPR)
3855 {
3856 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3857 EXPAND_NORMAL);
3858 result = TREE_OPERAND (result, 1);
3859 }
3860 return expand_expr (result, target, mode, EXPAND_NORMAL);
3861 }
3862
3863 /* We must be passed a constant len and src parameter. */
3864 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3865 return NULL_RTX;
3866
3867 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3868
3869 /* We're required to pad with trailing zeros if the requested
3870 len is greater than strlen(s2)+1. In that case try to
3871 use store_by_pieces, if it fails, punt. */
3872 if (tree_int_cst_lt (slen, len))
3873 {
3874 unsigned int dest_align
3875 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3876 const char *p = c_getstr (src);
3877 rtx dest_mem;
3878
3879 if (!p || dest_align == 0 || !host_integerp (len, 1)
3880 || !can_store_by_pieces (tree_low_cst (len, 1),
3881 builtin_strncpy_read_str,
3882 CONST_CAST (char *, p),
3883 dest_align, false))
3884 return NULL_RTX;
3885
3886 dest_mem = get_memory_rtx (dest, len);
3887 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3888 builtin_strncpy_read_str,
3889 CONST_CAST (char *, p), dest_align, false, 0);
3890 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3891 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3892 return dest_mem;
3893 }
3894 }
3895 return NULL_RTX;
3896 }
3897
3898 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3899 bytes from constant string DATA + OFFSET and return it as target
3900 constant. */
3901
3902 rtx
3903 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3904 enum machine_mode mode)
3905 {
3906 const char *c = (const char *) data;
3907 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3908
3909 memset (p, *c, GET_MODE_SIZE (mode));
3910
3911 return c_readstr (p, mode);
3912 }
3913
3914 /* Callback routine for store_by_pieces. Return the RTL of a register
3915 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3916 char value given in the RTL register data. For example, if mode is
3917 4 bytes wide, return the RTL for 0x01010101*data. */
3918
3919 static rtx
3920 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3921 enum machine_mode mode)
3922 {
3923 rtx target, coeff;
3924 size_t size;
3925 char *p;
3926
3927 size = GET_MODE_SIZE (mode);
3928 if (size == 1)
3929 return (rtx) data;
3930
3931 p = XALLOCAVEC (char, size);
3932 memset (p, 1, size);
3933 coeff = c_readstr (p, mode);
3934
3935 target = convert_to_mode (mode, (rtx) data, 1);
3936 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3937 return force_reg (mode, target);
3938 }
3939
3940 /* Expand expression EXP, which is a call to the memset builtin. Return
3941 NULL_RTX if we failed the caller should emit a normal call, otherwise
3942 try to get the result in TARGET, if convenient (and in mode MODE if that's
3943 convenient). */
3944
3945 static rtx
3946 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3947 {
3948 if (!validate_arglist (exp,
3949 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3950 return NULL_RTX;
3951 else
3952 {
3953 tree dest = CALL_EXPR_ARG (exp, 0);
3954 tree val = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3957 }
3958 }
3959
3960 /* Helper function to do the actual work for expand_builtin_memset. The
3961 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3962 so that this can also be called without constructing an actual CALL_EXPR.
3963 The other arguments and return value are the same as for
3964 expand_builtin_memset. */
3965
3966 static rtx
3967 expand_builtin_memset_args (tree dest, tree val, tree len,
3968 rtx target, enum machine_mode mode, tree orig_exp)
3969 {
3970 tree fndecl, fn;
3971 enum built_in_function fcode;
3972 char c;
3973 unsigned int dest_align;
3974 rtx dest_mem, dest_addr, len_rtx;
3975 HOST_WIDE_INT expected_size = -1;
3976 unsigned int expected_align = 0;
3977 tree_ann_common_t ann;
3978
3979 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3980
3981 /* If DEST is not a pointer type, don't do this operation in-line. */
3982 if (dest_align == 0)
3983 return NULL_RTX;
3984
3985 ann = tree_common_ann (orig_exp);
3986 if (ann)
3987 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3988
3989 if (expected_align < dest_align)
3990 expected_align = dest_align;
3991
3992 /* If the LEN parameter is zero, return DEST. */
3993 if (integer_zerop (len))
3994 {
3995 /* Evaluate and ignore VAL in case it has side-effects. */
3996 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3997 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3998 }
3999
4000 /* Stabilize the arguments in case we fail. */
4001 dest = builtin_save_expr (dest);
4002 val = builtin_save_expr (val);
4003 len = builtin_save_expr (len);
4004
4005 len_rtx = expand_normal (len);
4006 dest_mem = get_memory_rtx (dest, len);
4007
4008 if (TREE_CODE (val) != INTEGER_CST)
4009 {
4010 rtx val_rtx;
4011
4012 val_rtx = expand_normal (val);
4013 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4014 val_rtx, 0);
4015
4016 /* Assume that we can memset by pieces if we can store
4017 * the coefficients by pieces (in the required modes).
4018 * We can't pass builtin_memset_gen_str as that emits RTL. */
4019 c = 1;
4020 if (host_integerp (len, 1)
4021 && can_store_by_pieces (tree_low_cst (len, 1),
4022 builtin_memset_read_str, &c, dest_align,
4023 true))
4024 {
4025 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4026 val_rtx);
4027 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4028 builtin_memset_gen_str, val_rtx, dest_align,
4029 true, 0);
4030 }
4031 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4032 dest_align, expected_align,
4033 expected_size))
4034 goto do_libcall;
4035
4036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4038 return dest_mem;
4039 }
4040
4041 if (target_char_cast (val, &c))
4042 goto do_libcall;
4043
4044 if (c)
4045 {
4046 if (host_integerp (len, 1)
4047 && can_store_by_pieces (tree_low_cst (len, 1),
4048 builtin_memset_read_str, &c, dest_align,
4049 true))
4050 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align, true, 0);
4052 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4053 dest_align, expected_align,
4054 expected_size))
4055 goto do_libcall;
4056
4057 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4058 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4059 return dest_mem;
4060 }
4061
4062 set_mem_align (dest_mem, dest_align);
4063 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4064 CALL_EXPR_TAILCALL (orig_exp)
4065 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4066 expected_align, expected_size);
4067
4068 if (dest_addr == 0)
4069 {
4070 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4071 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4072 }
4073
4074 return dest_addr;
4075
4076 do_libcall:
4077 fndecl = get_callee_fndecl (orig_exp);
4078 fcode = DECL_FUNCTION_CODE (fndecl);
4079 if (fcode == BUILT_IN_MEMSET)
4080 fn = build_call_expr (fndecl, 3, dest, val, len);
4081 else if (fcode == BUILT_IN_BZERO)
4082 fn = build_call_expr (fndecl, 2, dest, len);
4083 else
4084 gcc_unreachable ();
4085 if (TREE_CODE (fn) == CALL_EXPR)
4086 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4087 return expand_call (fn, target, target == const0_rtx);
4088 }
4089
4090 /* Expand expression EXP, which is a call to the bzero builtin. Return
4091 NULL_RTX if we failed the caller should emit a normal call. */
4092
4093 static rtx
4094 expand_builtin_bzero (tree exp)
4095 {
4096 tree dest, size;
4097
4098 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4099 return NULL_RTX;
4100
4101 dest = CALL_EXPR_ARG (exp, 0);
4102 size = CALL_EXPR_ARG (exp, 1);
4103
4104 /* New argument list transforming bzero(ptr x, int y) to
4105 memset(ptr x, int 0, size_t y). This is done this way
4106 so that if it isn't expanded inline, we fallback to
4107 calling bzero instead of memset. */
4108
4109 return expand_builtin_memset_args (dest, integer_zero_node,
4110 fold_convert (sizetype, size),
4111 const0_rtx, VOIDmode, exp);
4112 }
4113
4114 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4115 caller should emit a normal call, otherwise try to get the result
4116 in TARGET, if convenient (and in mode MODE if that's convenient). */
4117
4118 static rtx
4119 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4120 {
4121 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4122 INTEGER_TYPE, VOID_TYPE))
4123 {
4124 tree type = TREE_TYPE (exp);
4125 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4126 CALL_EXPR_ARG (exp, 1),
4127 CALL_EXPR_ARG (exp, 2), type);
4128 if (result)
4129 return expand_expr (result, target, mode, EXPAND_NORMAL);
4130 }
4131 return NULL_RTX;
4132 }
4133
4134 /* Expand expression EXP, which is a call to the memcmp built-in function.
4135 Return NULL_RTX if we failed and the
4136 caller should emit a normal call, otherwise try to get the result in
4137 TARGET, if convenient (and in mode MODE, if that's convenient). */
4138
4139 static rtx
4140 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4141 {
4142 if (!validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4144 return NULL_RTX;
4145 else
4146 {
4147 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4148 CALL_EXPR_ARG (exp, 1),
4149 CALL_EXPR_ARG (exp, 2));
4150 if (result)
4151 return expand_expr (result, target, mode, EXPAND_NORMAL);
4152 }
4153
4154 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4155 {
4156 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4157 rtx result;
4158 rtx insn;
4159 tree arg1 = CALL_EXPR_ARG (exp, 0);
4160 tree arg2 = CALL_EXPR_ARG (exp, 1);
4161 tree len = CALL_EXPR_ARG (exp, 2);
4162
4163 int arg1_align
4164 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4165 int arg2_align
4166 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4167 enum machine_mode insn_mode;
4168
4169 #ifdef HAVE_cmpmemsi
4170 if (HAVE_cmpmemsi)
4171 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4172 else
4173 #endif
4174 #ifdef HAVE_cmpstrnsi
4175 if (HAVE_cmpstrnsi)
4176 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4177 else
4178 #endif
4179 return NULL_RTX;
4180
4181 /* If we don't have POINTER_TYPE, call the function. */
4182 if (arg1_align == 0 || arg2_align == 0)
4183 return NULL_RTX;
4184
4185 /* Make a place to write the result of the instruction. */
4186 result = target;
4187 if (! (result != 0
4188 && REG_P (result) && GET_MODE (result) == insn_mode
4189 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4190 result = gen_reg_rtx (insn_mode);
4191
4192 arg1_rtx = get_memory_rtx (arg1, len);
4193 arg2_rtx = get_memory_rtx (arg2, len);
4194 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4195
4196 /* Set MEM_SIZE as appropriate. */
4197 if (CONST_INT_P (arg3_rtx))
4198 {
4199 set_mem_size (arg1_rtx, arg3_rtx);
4200 set_mem_size (arg2_rtx, arg3_rtx);
4201 }
4202
4203 #ifdef HAVE_cmpmemsi
4204 if (HAVE_cmpmemsi)
4205 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4206 GEN_INT (MIN (arg1_align, arg2_align)));
4207 else
4208 #endif
4209 #ifdef HAVE_cmpstrnsi
4210 if (HAVE_cmpstrnsi)
4211 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4213 else
4214 #endif
4215 gcc_unreachable ();
4216
4217 if (insn)
4218 emit_insn (insn);
4219 else
4220 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4221 TYPE_MODE (integer_type_node), 3,
4222 XEXP (arg1_rtx, 0), Pmode,
4223 XEXP (arg2_rtx, 0), Pmode,
4224 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4225 TYPE_UNSIGNED (sizetype)),
4226 TYPE_MODE (sizetype));
4227
4228 /* Return the value in the proper mode for this function. */
4229 mode = TYPE_MODE (TREE_TYPE (exp));
4230 if (GET_MODE (result) == mode)
4231 return result;
4232 else if (target != 0)
4233 {
4234 convert_move (target, result, 0);
4235 return target;
4236 }
4237 else
4238 return convert_to_mode (mode, result, 0);
4239 }
4240 #endif
4241
4242 return NULL_RTX;
4243 }
4244
4245 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4246 if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4248
4249 static rtx
4250 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4251 {
4252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4253 return NULL_RTX;
4254 else
4255 {
4256 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4257 CALL_EXPR_ARG (exp, 1));
4258 if (result)
4259 return expand_expr (result, target, mode, EXPAND_NORMAL);
4260 }
4261
4262 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4263 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4264 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4265 {
4266 rtx arg1_rtx, arg2_rtx;
4267 rtx result, insn = NULL_RTX;
4268 tree fndecl, fn;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4271
4272 int arg1_align
4273 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4274 int arg2_align
4275 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4276
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4279 return NULL_RTX;
4280
4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
4284
4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
4287
4288 #ifdef HAVE_cmpstrsi
4289 /* Try to call cmpstrsi. */
4290 if (HAVE_cmpstrsi)
4291 {
4292 enum machine_mode insn_mode
4293 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4294
4295 /* Make a place to write the result of the instruction. */
4296 result = target;
4297 if (! (result != 0
4298 && REG_P (result) && GET_MODE (result) == insn_mode
4299 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4300 result = gen_reg_rtx (insn_mode);
4301
4302 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4303 GEN_INT (MIN (arg1_align, arg2_align)));
4304 }
4305 #endif
4306 #ifdef HAVE_cmpstrnsi
4307 /* Try to determine at least one length and call cmpstrnsi. */
4308 if (!insn && HAVE_cmpstrnsi)
4309 {
4310 tree len;
4311 rtx arg3_rtx;
4312
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4315 tree len1 = c_strlen (arg1, 1);
4316 tree len2 = c_strlen (arg2, 1);
4317
4318 if (len1)
4319 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4320 if (len2)
4321 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4322
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4328 use the smaller. */
4329
4330 if (!len1)
4331 len = len2;
4332 else if (!len2)
4333 len = len1;
4334 else if (TREE_SIDE_EFFECTS (len1))
4335 len = len2;
4336 else if (TREE_SIDE_EFFECTS (len2))
4337 len = len1;
4338 else if (TREE_CODE (len1) != INTEGER_CST)
4339 len = len2;
4340 else if (TREE_CODE (len2) != INTEGER_CST)
4341 len = len1;
4342 else if (tree_int_cst_lt (len1, len2))
4343 len = len1;
4344 else
4345 len = len2;
4346
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len || TREE_SIDE_EFFECTS (len))
4349 goto do_libcall;
4350
4351 arg3_rtx = expand_normal (len);
4352
4353 /* Make a place to write the result of the instruction. */
4354 result = target;
4355 if (! (result != 0
4356 && REG_P (result) && GET_MODE (result) == insn_mode
4357 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4358 result = gen_reg_rtx (insn_mode);
4359
4360 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4361 GEN_INT (MIN (arg1_align, arg2_align)));
4362 }
4363 #endif
4364
4365 if (insn)
4366 {
4367 emit_insn (insn);
4368
4369 /* Return the value in the proper mode for this function. */
4370 mode = TYPE_MODE (TREE_TYPE (exp));
4371 if (GET_MODE (result) == mode)
4372 return result;
4373 if (target == 0)
4374 return convert_to_mode (mode, result, 0);
4375 convert_move (target, result, 0);
4376 return target;
4377 }
4378
4379 /* Expand the library call ourselves using a stabilized argument
4380 list to avoid re-evaluating the function's arguments twice. */
4381 #ifdef HAVE_cmpstrnsi
4382 do_libcall:
4383 #endif
4384 fndecl = get_callee_fndecl (exp);
4385 fn = build_call_expr (fndecl, 2, arg1, arg2);
4386 if (TREE_CODE (fn) == CALL_EXPR)
4387 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4388 return expand_call (fn, target, target == const0_rtx);
4389 }
4390 #endif
4391 return NULL_RTX;
4392 }
4393
4394 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4395 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4397
4398 static rtx
4399 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4400 {
4401 if (!validate_arglist (exp,
4402 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4403 return NULL_RTX;
4404 else
4405 {
4406 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4407 CALL_EXPR_ARG (exp, 1),
4408 CALL_EXPR_ARG (exp, 2));
4409 if (result)
4410 return expand_expr (result, target, mode, EXPAND_NORMAL);
4411 }
4412
4413 /* If c_strlen can determine an expression for one of the string
4414 lengths, and it doesn't have side effects, then emit cmpstrnsi
4415 using length MIN(strlen(string)+1, arg3). */
4416 #ifdef HAVE_cmpstrnsi
4417 if (HAVE_cmpstrnsi)
4418 {
4419 tree len, len1, len2;
4420 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4421 rtx result, insn;
4422 tree fndecl, fn;
4423 tree arg1 = CALL_EXPR_ARG (exp, 0);
4424 tree arg2 = CALL_EXPR_ARG (exp, 1);
4425 tree arg3 = CALL_EXPR_ARG (exp, 2);
4426
4427 int arg1_align
4428 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4429 int arg2_align
4430 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4431 enum machine_mode insn_mode
4432 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4433
4434 len1 = c_strlen (arg1, 1);
4435 len2 = c_strlen (arg2, 1);
4436
4437 if (len1)
4438 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4439 if (len2)
4440 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4441
4442 /* If we don't have a constant length for the first, use the length
4443 of the second, if we know it. We don't require a constant for
4444 this case; some cost analysis could be done if both are available
4445 but neither is constant. For now, assume they're equally cheap,
4446 unless one has side effects. If both strings have constant lengths,
4447 use the smaller. */
4448
4449 if (!len1)
4450 len = len2;
4451 else if (!len2)
4452 len = len1;
4453 else if (TREE_SIDE_EFFECTS (len1))
4454 len = len2;
4455 else if (TREE_SIDE_EFFECTS (len2))
4456 len = len1;
4457 else if (TREE_CODE (len1) != INTEGER_CST)
4458 len = len2;
4459 else if (TREE_CODE (len2) != INTEGER_CST)
4460 len = len1;
4461 else if (tree_int_cst_lt (len1, len2))
4462 len = len1;
4463 else
4464 len = len2;
4465
4466 /* If both arguments have side effects, we cannot optimize. */
4467 if (!len || TREE_SIDE_EFFECTS (len))
4468 return NULL_RTX;
4469
4470 /* The actual new length parameter is MIN(len,arg3). */
4471 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4472 fold_convert (TREE_TYPE (len), arg3));
4473
4474 /* If we don't have POINTER_TYPE, call the function. */
4475 if (arg1_align == 0 || arg2_align == 0)
4476 return NULL_RTX;
4477
4478 /* Make a place to write the result of the instruction. */
4479 result = target;
4480 if (! (result != 0
4481 && REG_P (result) && GET_MODE (result) == insn_mode
4482 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4483 result = gen_reg_rtx (insn_mode);
4484
4485 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4486 arg1 = builtin_save_expr (arg1);
4487 arg2 = builtin_save_expr (arg2);
4488 len = builtin_save_expr (len);
4489
4490 arg1_rtx = get_memory_rtx (arg1, len);
4491 arg2_rtx = get_memory_rtx (arg2, len);
4492 arg3_rtx = expand_normal (len);
4493 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4494 GEN_INT (MIN (arg1_align, arg2_align)));
4495 if (insn)
4496 {
4497 emit_insn (insn);
4498
4499 /* Return the value in the proper mode for this function. */
4500 mode = TYPE_MODE (TREE_TYPE (exp));
4501 if (GET_MODE (result) == mode)
4502 return result;
4503 if (target == 0)
4504 return convert_to_mode (mode, result, 0);
4505 convert_move (target, result, 0);
4506 return target;
4507 }
4508
4509 /* Expand the library call ourselves using a stabilized argument
4510 list to avoid re-evaluating the function's arguments twice. */
4511 fndecl = get_callee_fndecl (exp);
4512 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4513 if (TREE_CODE (fn) == CALL_EXPR)
4514 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4515 return expand_call (fn, target, target == const0_rtx);
4516 }
4517 #endif
4518 return NULL_RTX;
4519 }
4520
4521 /* Expand expression EXP, which is a call to the strcat builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4524
4525 static rtx
4526 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4527 {
4528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4529 return NULL_RTX;
4530 else
4531 {
4532 tree dst = CALL_EXPR_ARG (exp, 0);
4533 tree src = CALL_EXPR_ARG (exp, 1);
4534 const char *p = c_getstr (src);
4535
4536 /* If the string length is zero, return the dst parameter. */
4537 if (p && *p == '\0')
4538 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4539
4540 if (optimize_insn_for_speed_p ())
4541 {
4542 /* See if we can store by pieces into (dst + strlen(dst)). */
4543 tree newsrc, newdst,
4544 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4545 rtx insns;
4546
4547 /* Stabilize the argument list. */
4548 newsrc = builtin_save_expr (src);
4549 dst = builtin_save_expr (dst);
4550
4551 start_sequence ();
4552
4553 /* Create strlen (dst). */
4554 newdst = build_call_expr (strlen_fn, 1, dst);
4555 /* Create (dst p+ strlen (dst)). */
4556
4557 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4558 newdst = builtin_save_expr (newdst);
4559
4560 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4561 {
4562 end_sequence (); /* Stop sequence. */
4563 return NULL_RTX;
4564 }
4565
4566 /* Output the entire sequence. */
4567 insns = get_insns ();
4568 end_sequence ();
4569 emit_insn (insns);
4570
4571 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4572 }
4573
4574 return NULL_RTX;
4575 }
4576 }
4577
4578 /* Expand expression EXP, which is a call to the strncat builtin.
4579 Return NULL_RTX if we failed the caller should emit a normal call,
4580 otherwise try to get the result in TARGET, if convenient. */
4581
4582 static rtx
4583 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4584 {
4585 if (validate_arglist (exp,
4586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4587 {
4588 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4589 CALL_EXPR_ARG (exp, 1),
4590 CALL_EXPR_ARG (exp, 2));
4591 if (result)
4592 return expand_expr (result, target, mode, EXPAND_NORMAL);
4593 }
4594 return NULL_RTX;
4595 }
4596
4597 /* Expand expression EXP, which is a call to the strspn builtin.
4598 Return NULL_RTX if we failed the caller should emit a normal call,
4599 otherwise try to get the result in TARGET, if convenient. */
4600
4601 static rtx
4602 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4603 {
4604 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4605 {
4606 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4607 CALL_EXPR_ARG (exp, 1));
4608 if (result)
4609 return expand_expr (result, target, mode, EXPAND_NORMAL);
4610 }
4611 return NULL_RTX;
4612 }
4613
4614 /* Expand expression EXP, which is a call to the strcspn builtin.
4615 Return NULL_RTX if we failed the caller should emit a normal call,
4616 otherwise try to get the result in TARGET, if convenient. */
4617
4618 static rtx
4619 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4620 {
4621 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4622 {
4623 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4624 CALL_EXPR_ARG (exp, 1));
4625 if (result)
4626 return expand_expr (result, target, mode, EXPAND_NORMAL);
4627 }
4628 return NULL_RTX;
4629 }
4630
4631 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4632 if that's convenient. */
4633
4634 rtx
4635 expand_builtin_saveregs (void)
4636 {
4637 rtx val, seq;
4638
4639 /* Don't do __builtin_saveregs more than once in a function.
4640 Save the result of the first call and reuse it. */
4641 if (saveregs_value != 0)
4642 return saveregs_value;
4643
4644 /* When this function is called, it means that registers must be
4645 saved on entry to this function. So we migrate the call to the
4646 first insn of this function. */
4647
4648 start_sequence ();
4649
4650 /* Do whatever the machine needs done in this case. */
4651 val = targetm.calls.expand_builtin_saveregs ();
4652
4653 seq = get_insns ();
4654 end_sequence ();
4655
4656 saveregs_value = val;
4657
4658 /* Put the insns after the NOTE that starts the function. If this
4659 is inside a start_sequence, make the outer-level insn chain current, so
4660 the code is placed at the start of the function. */
4661 push_topmost_sequence ();
4662 emit_insn_after (seq, entry_of_function ());
4663 pop_topmost_sequence ();
4664
4665 return val;
4666 }
4667
4668 /* __builtin_args_info (N) returns word N of the arg space info
4669 for the current function. The number and meanings of words
4670 is controlled by the definition of CUMULATIVE_ARGS. */
4671
4672 static rtx
4673 expand_builtin_args_info (tree exp)
4674 {
4675 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4676 int *word_ptr = (int *) &crtl->args.info;
4677
4678 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4679
4680 if (call_expr_nargs (exp) != 0)
4681 {
4682 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4683 error ("argument of %<__builtin_args_info%> must be constant");
4684 else
4685 {
4686 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4687
4688 if (wordnum < 0 || wordnum >= nwords)
4689 error ("argument of %<__builtin_args_info%> out of range");
4690 else
4691 return GEN_INT (word_ptr[wordnum]);
4692 }
4693 }
4694 else
4695 error ("missing argument in %<__builtin_args_info%>");
4696
4697 return const0_rtx;
4698 }
4699
4700 /* Expand a call to __builtin_next_arg. */
4701
4702 static rtx
4703 expand_builtin_next_arg (void)
4704 {
4705 /* Checking arguments is already done in fold_builtin_next_arg
4706 that must be called before this function. */
4707 return expand_binop (ptr_mode, add_optab,
4708 crtl->args.internal_arg_pointer,
4709 crtl->args.arg_offset_rtx,
4710 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4711 }
4712
4713 /* Make it easier for the backends by protecting the valist argument
4714 from multiple evaluations. */
4715
4716 static tree
4717 stabilize_va_list (tree valist, int needs_lvalue)
4718 {
4719 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4720
4721 gcc_assert (vatype != NULL_TREE);
4722
4723 if (TREE_CODE (vatype) == ARRAY_TYPE)
4724 {
4725 if (TREE_SIDE_EFFECTS (valist))
4726 valist = save_expr (valist);
4727
4728 /* For this case, the backends will be expecting a pointer to
4729 vatype, but it's possible we've actually been given an array
4730 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4731 So fix it. */
4732 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4733 {
4734 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4735 valist = build_fold_addr_expr_with_type (valist, p1);
4736 }
4737 }
4738 else
4739 {
4740 tree pt;
4741
4742 if (! needs_lvalue)
4743 {
4744 if (! TREE_SIDE_EFFECTS (valist))
4745 return valist;
4746
4747 pt = build_pointer_type (vatype);
4748 valist = fold_build1 (ADDR_EXPR, pt, valist);
4749 TREE_SIDE_EFFECTS (valist) = 1;
4750 }
4751
4752 if (TREE_SIDE_EFFECTS (valist))
4753 valist = save_expr (valist);
4754 valist = build_fold_indirect_ref (valist);
4755 }
4756
4757 return valist;
4758 }
4759
4760 /* The "standard" definition of va_list is void*. */
4761
4762 tree
4763 std_build_builtin_va_list (void)
4764 {
4765 return ptr_type_node;
4766 }
4767
4768 /* The "standard" abi va_list is va_list_type_node. */
4769
4770 tree
4771 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4772 {
4773 return va_list_type_node;
4774 }
4775
4776 /* The "standard" type of va_list is va_list_type_node. */
4777
4778 tree
4779 std_canonical_va_list_type (tree type)
4780 {
4781 tree wtype, htype;
4782
4783 if (INDIRECT_REF_P (type))
4784 type = TREE_TYPE (type);
4785 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4786 type = TREE_TYPE (type);
4787 wtype = va_list_type_node;
4788 htype = type;
4789 /* Treat structure va_list types. */
4790 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4791 htype = TREE_TYPE (htype);
4792 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4793 {
4794 /* If va_list is an array type, the argument may have decayed
4795 to a pointer type, e.g. by being passed to another function.
4796 In that case, unwrap both types so that we can compare the
4797 underlying records. */
4798 if (TREE_CODE (htype) == ARRAY_TYPE
4799 || POINTER_TYPE_P (htype))
4800 {
4801 wtype = TREE_TYPE (wtype);
4802 htype = TREE_TYPE (htype);
4803 }
4804 }
4805 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4806 return va_list_type_node;
4807
4808 return NULL_TREE;
4809 }
4810
4811 /* The "standard" implementation of va_start: just assign `nextarg' to
4812 the variable. */
4813
4814 void
4815 std_expand_builtin_va_start (tree valist, rtx nextarg)
4816 {
4817 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4818 convert_move (va_r, nextarg, 0);
4819 }
4820
4821 /* Expand EXP, a call to __builtin_va_start. */
4822
4823 static rtx
4824 expand_builtin_va_start (tree exp)
4825 {
4826 rtx nextarg;
4827 tree valist;
4828
4829 if (call_expr_nargs (exp) < 2)
4830 {
4831 error ("too few arguments to function %<va_start%>");
4832 return const0_rtx;
4833 }
4834
4835 if (fold_builtin_next_arg (exp, true))
4836 return const0_rtx;
4837
4838 nextarg = expand_builtin_next_arg ();
4839 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4840
4841 if (targetm.expand_builtin_va_start)
4842 targetm.expand_builtin_va_start (valist, nextarg);
4843 else
4844 std_expand_builtin_va_start (valist, nextarg);
4845
4846 return const0_rtx;
4847 }
4848
4849 /* The "standard" implementation of va_arg: read the value from the
4850 current (padded) address and increment by the (padded) size. */
4851
4852 tree
4853 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4854 gimple_seq *post_p)
4855 {
4856 tree addr, t, type_size, rounded_size, valist_tmp;
4857 unsigned HOST_WIDE_INT align, boundary;
4858 bool indirect;
4859
4860 #ifdef ARGS_GROW_DOWNWARD
4861 /* All of the alignment and movement below is for args-grow-up machines.
4862 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4863 implement their own specialized gimplify_va_arg_expr routines. */
4864 gcc_unreachable ();
4865 #endif
4866
4867 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4868 if (indirect)
4869 type = build_pointer_type (type);
4870
4871 align = PARM_BOUNDARY / BITS_PER_UNIT;
4872 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4873
4874 /* When we align parameter on stack for caller, if the parameter
4875 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4876 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4877 here with caller. */
4878 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4879 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4880
4881 boundary /= BITS_PER_UNIT;
4882
4883 /* Hoist the valist value into a temporary for the moment. */
4884 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4885
4886 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4887 requires greater alignment, we must perform dynamic alignment. */
4888 if (boundary > align
4889 && !integer_zerop (TYPE_SIZE (type)))
4890 {
4891 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4892 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4893 valist_tmp, size_int (boundary - 1)));
4894 gimplify_and_add (t, pre_p);
4895
4896 t = fold_convert (sizetype, valist_tmp);
4897 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4898 fold_convert (TREE_TYPE (valist),
4899 fold_build2 (BIT_AND_EXPR, sizetype, t,
4900 size_int (-boundary))));
4901 gimplify_and_add (t, pre_p);
4902 }
4903 else
4904 boundary = align;
4905
4906 /* If the actual alignment is less than the alignment of the type,
4907 adjust the type accordingly so that we don't assume strict alignment
4908 when dereferencing the pointer. */
4909 boundary *= BITS_PER_UNIT;
4910 if (boundary < TYPE_ALIGN (type))
4911 {
4912 type = build_variant_type_copy (type);
4913 TYPE_ALIGN (type) = boundary;
4914 }
4915
4916 /* Compute the rounded size of the type. */
4917 type_size = size_in_bytes (type);
4918 rounded_size = round_up (type_size, align);
4919
4920 /* Reduce rounded_size so it's sharable with the postqueue. */
4921 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4922
4923 /* Get AP. */
4924 addr = valist_tmp;
4925 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4926 {
4927 /* Small args are padded downward. */
4928 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4929 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4930 size_binop (MINUS_EXPR, rounded_size, type_size));
4931 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4932 }
4933
4934 /* Compute new value for AP. */
4935 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4937 gimplify_and_add (t, pre_p);
4938
4939 addr = fold_convert (build_pointer_type (type), addr);
4940
4941 if (indirect)
4942 addr = build_va_arg_indirect_ref (addr);
4943
4944 return build_va_arg_indirect_ref (addr);
4945 }
4946
4947 /* Build an indirect-ref expression over the given TREE, which represents a
4948 piece of a va_arg() expansion. */
4949 tree
4950 build_va_arg_indirect_ref (tree addr)
4951 {
4952 addr = build_fold_indirect_ref (addr);
4953
4954 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4955 mf_mark (addr);
4956
4957 return addr;
4958 }
4959
4960 /* Return a dummy expression of type TYPE in order to keep going after an
4961 error. */
4962
4963 static tree
4964 dummy_object (tree type)
4965 {
4966 tree t = build_int_cst (build_pointer_type (type), 0);
4967 return build1 (INDIRECT_REF, type, t);
4968 }
4969
4970 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4971 builtin function, but a very special sort of operator. */
4972
4973 enum gimplify_status
4974 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4975 {
4976 tree promoted_type, have_va_type;
4977 tree valist = TREE_OPERAND (*expr_p, 0);
4978 tree type = TREE_TYPE (*expr_p);
4979 tree t;
4980 location_t loc = EXPR_HAS_LOCATION (*expr_p) ? EXPR_LOCATION (*expr_p) :
4981 UNKNOWN_LOCATION;
4982
4983 /* Verify that valist is of the proper type. */
4984 have_va_type = TREE_TYPE (valist);
4985 if (have_va_type == error_mark_node)
4986 return GS_ERROR;
4987 have_va_type = targetm.canonical_va_list_type (have_va_type);
4988
4989 if (have_va_type == NULL_TREE)
4990 {
4991 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4992 return GS_ERROR;
4993 }
4994
4995 /* Generate a diagnostic for requesting data of a type that cannot
4996 be passed through `...' due to type promotion at the call site. */
4997 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4998 != type)
4999 {
5000 static bool gave_help;
5001 bool warned;
5002
5003 /* Unfortunately, this is merely undefined, rather than a constraint
5004 violation, so we cannot make this an error. If this call is never
5005 executed, the program is still strictly conforming. */
5006 warned = warning_at (loc, 0,
5007 "%qT is promoted to %qT when passed through %<...%>",
5008 type, promoted_type);
5009 if (!gave_help && warned)
5010 {
5011 gave_help = true;
5012 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5013 promoted_type, type);
5014 }
5015
5016 /* We can, however, treat "undefined" any way we please.
5017 Call abort to encourage the user to fix the program. */
5018 if (warned)
5019 inform (loc, "if this code is reached, the program will abort");
5020 /* Before the abort, allow the evaluation of the va_list
5021 expression to exit or longjmp. */
5022 gimplify_and_add (valist, pre_p);
5023 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5024 gimplify_and_add (t, pre_p);
5025
5026 /* This is dead code, but go ahead and finish so that the
5027 mode of the result comes out right. */
5028 *expr_p = dummy_object (type);
5029 return GS_ALL_DONE;
5030 }
5031 else
5032 {
5033 /* Make it easier for the backends by protecting the valist argument
5034 from multiple evaluations. */
5035 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5036 {
5037 /* For this case, the backends will be expecting a pointer to
5038 TREE_TYPE (abi), but it's possible we've
5039 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5040 So fix it. */
5041 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5042 {
5043 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5044 valist = fold_convert (p1, build_fold_addr_expr (valist));
5045 }
5046
5047 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5048 }
5049 else
5050 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5051
5052 if (!targetm.gimplify_va_arg_expr)
5053 /* FIXME: Once most targets are converted we should merely
5054 assert this is non-null. */
5055 return GS_ALL_DONE;
5056
5057 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5058 return GS_OK;
5059 }
5060 }
5061
5062 /* Expand EXP, a call to __builtin_va_end. */
5063
5064 static rtx
5065 expand_builtin_va_end (tree exp)
5066 {
5067 tree valist = CALL_EXPR_ARG (exp, 0);
5068
5069 /* Evaluate for side effects, if needed. I hate macros that don't
5070 do that. */
5071 if (TREE_SIDE_EFFECTS (valist))
5072 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5073
5074 return const0_rtx;
5075 }
5076
5077 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5078 builtin rather than just as an assignment in stdarg.h because of the
5079 nastiness of array-type va_list types. */
5080
5081 static rtx
5082 expand_builtin_va_copy (tree exp)
5083 {
5084 tree dst, src, t;
5085
5086 dst = CALL_EXPR_ARG (exp, 0);
5087 src = CALL_EXPR_ARG (exp, 1);
5088
5089 dst = stabilize_va_list (dst, 1);
5090 src = stabilize_va_list (src, 0);
5091
5092 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5093
5094 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5095 {
5096 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5097 TREE_SIDE_EFFECTS (t) = 1;
5098 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5099 }
5100 else
5101 {
5102 rtx dstb, srcb, size;
5103
5104 /* Evaluate to pointers. */
5105 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5106 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5107 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5108 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5109
5110 dstb = convert_memory_address (Pmode, dstb);
5111 srcb = convert_memory_address (Pmode, srcb);
5112
5113 /* "Dereference" to BLKmode memories. */
5114 dstb = gen_rtx_MEM (BLKmode, dstb);
5115 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5116 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5117 srcb = gen_rtx_MEM (BLKmode, srcb);
5118 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5119 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5120
5121 /* Copy. */
5122 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5123 }
5124
5125 return const0_rtx;
5126 }
5127
5128 /* Expand a call to one of the builtin functions __builtin_frame_address or
5129 __builtin_return_address. */
5130
5131 static rtx
5132 expand_builtin_frame_address (tree fndecl, tree exp)
5133 {
5134 /* The argument must be a nonnegative integer constant.
5135 It counts the number of frames to scan up the stack.
5136 The value is the return address saved in that frame. */
5137 if (call_expr_nargs (exp) == 0)
5138 /* Warning about missing arg was already issued. */
5139 return const0_rtx;
5140 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5141 {
5142 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5143 error ("invalid argument to %<__builtin_frame_address%>");
5144 else
5145 error ("invalid argument to %<__builtin_return_address%>");
5146 return const0_rtx;
5147 }
5148 else
5149 {
5150 rtx tem
5151 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5152 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5153
5154 /* Some ports cannot access arbitrary stack frames. */
5155 if (tem == NULL)
5156 {
5157 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5158 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5159 else
5160 warning (0, "unsupported argument to %<__builtin_return_address%>");
5161 return const0_rtx;
5162 }
5163
5164 /* For __builtin_frame_address, return what we've got. */
5165 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5166 return tem;
5167
5168 if (!REG_P (tem)
5169 && ! CONSTANT_P (tem))
5170 tem = copy_to_mode_reg (Pmode, tem);
5171 return tem;
5172 }
5173 }
5174
5175 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5176 we failed and the caller should emit a normal call, otherwise try to get
5177 the result in TARGET, if convenient. */
5178
5179 static rtx
5180 expand_builtin_alloca (tree exp, rtx target)
5181 {
5182 rtx op0;
5183 rtx result;
5184
5185 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5186 should always expand to function calls. These can be intercepted
5187 in libmudflap. */
5188 if (flag_mudflap)
5189 return NULL_RTX;
5190
5191 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5192 return NULL_RTX;
5193
5194 /* Compute the argument. */
5195 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5196
5197 /* Allocate the desired space. */
5198 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5199 result = convert_memory_address (ptr_mode, result);
5200
5201 return result;
5202 }
5203
5204 /* Expand a call to a bswap builtin with argument ARG0. MODE
5205 is the mode to expand with. */
5206
5207 static rtx
5208 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5209 {
5210 enum machine_mode mode;
5211 tree arg;
5212 rtx op0;
5213
5214 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5215 return NULL_RTX;
5216
5217 arg = CALL_EXPR_ARG (exp, 0);
5218 mode = TYPE_MODE (TREE_TYPE (arg));
5219 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5220
5221 target = expand_unop (mode, bswap_optab, op0, target, 1);
5222
5223 gcc_assert (target);
5224
5225 return convert_to_mode (mode, target, 0);
5226 }
5227
5228 /* Expand a call to a unary builtin in EXP.
5229 Return NULL_RTX if a normal call should be emitted rather than expanding the
5230 function in-line. If convenient, the result should be placed in TARGET.
5231 SUBTARGET may be used as the target for computing one of EXP's operands. */
5232
5233 static rtx
5234 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5235 rtx subtarget, optab op_optab)
5236 {
5237 rtx op0;
5238
5239 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5240 return NULL_RTX;
5241
5242 /* Compute the argument. */
5243 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5244 VOIDmode, EXPAND_NORMAL);
5245 /* Compute op, into TARGET if possible.
5246 Set TARGET to wherever the result comes back. */
5247 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5248 op_optab, op0, target, 1);
5249 gcc_assert (target);
5250
5251 return convert_to_mode (target_mode, target, 0);
5252 }
5253
5254 /* If the string passed to fputs is a constant and is one character
5255 long, we attempt to transform this call into __builtin_fputc(). */
5256
5257 static rtx
5258 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5259 {
5260 /* Verify the arguments in the original call. */
5261 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5262 {
5263 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5264 CALL_EXPR_ARG (exp, 1),
5265 (target == const0_rtx),
5266 unlocked, NULL_TREE);
5267 if (result)
5268 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5269 }
5270 return NULL_RTX;
5271 }
5272
5273 /* Expand a call to __builtin_expect. We just return our argument
5274 as the builtin_expect semantic should've been already executed by
5275 tree branch prediction pass. */
5276
5277 static rtx
5278 expand_builtin_expect (tree exp, rtx target)
5279 {
5280 tree arg, c;
5281
5282 if (call_expr_nargs (exp) < 2)
5283 return const0_rtx;
5284 arg = CALL_EXPR_ARG (exp, 0);
5285 c = CALL_EXPR_ARG (exp, 1);
5286
5287 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5288 /* When guessing was done, the hints should be already stripped away. */
5289 gcc_assert (!flag_guess_branch_prob
5290 || optimize == 0 || errorcount || sorrycount);
5291 return target;
5292 }
5293
5294 void
5295 expand_builtin_trap (void)
5296 {
5297 #ifdef HAVE_trap
5298 if (HAVE_trap)
5299 emit_insn (gen_trap ());
5300 else
5301 #endif
5302 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5303 emit_barrier ();
5304 }
5305
5306 /* Expand a call to __builtin_unreachable. We do nothing except emit
5307 a barrier saying that control flow will not pass here.
5308
5309 It is the responsibility of the program being compiled to ensure
5310 that control flow does never reach __builtin_unreachable. */
5311 static void
5312 expand_builtin_unreachable (void)
5313 {
5314 emit_barrier ();
5315 }
5316
5317 /* Expand EXP, a call to fabs, fabsf or fabsl.
5318 Return NULL_RTX if a normal call should be emitted rather than expanding
5319 the function inline. If convenient, the result should be placed
5320 in TARGET. SUBTARGET may be used as the target for computing
5321 the operand. */
5322
5323 static rtx
5324 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5325 {
5326 enum machine_mode mode;
5327 tree arg;
5328 rtx op0;
5329
5330 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5331 return NULL_RTX;
5332
5333 arg = CALL_EXPR_ARG (exp, 0);
5334 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5335 mode = TYPE_MODE (TREE_TYPE (arg));
5336 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5337 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5338 }
5339
5340 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5341 Return NULL is a normal call should be emitted rather than expanding the
5342 function inline. If convenient, the result should be placed in TARGET.
5343 SUBTARGET may be used as the target for computing the operand. */
5344
5345 static rtx
5346 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5347 {
5348 rtx op0, op1;
5349 tree arg;
5350
5351 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5352 return NULL_RTX;
5353
5354 arg = CALL_EXPR_ARG (exp, 0);
5355 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5356
5357 arg = CALL_EXPR_ARG (exp, 1);
5358 op1 = expand_normal (arg);
5359
5360 return expand_copysign (op0, op1, target);
5361 }
5362
5363 /* Create a new constant string literal and return a char* pointer to it.
5364 The STRING_CST value is the LEN characters at STR. */
5365 tree
5366 build_string_literal (int len, const char *str)
5367 {
5368 tree t, elem, index, type;
5369
5370 t = build_string (len, str);
5371 elem = build_type_variant (char_type_node, 1, 0);
5372 index = build_index_type (size_int (len - 1));
5373 type = build_array_type (elem, index);
5374 TREE_TYPE (t) = type;
5375 TREE_CONSTANT (t) = 1;
5376 TREE_READONLY (t) = 1;
5377 TREE_STATIC (t) = 1;
5378
5379 type = build_pointer_type (elem);
5380 t = build1 (ADDR_EXPR, type,
5381 build4 (ARRAY_REF, elem,
5382 t, integer_zero_node, NULL_TREE, NULL_TREE));
5383 return t;
5384 }
5385
5386 /* Expand EXP, a call to printf or printf_unlocked.
5387 Return NULL_RTX if a normal call should be emitted rather than transforming
5388 the function inline. If convenient, the result should be placed in
5389 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5390 call. */
5391 static rtx
5392 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5393 bool unlocked)
5394 {
5395 /* If we're using an unlocked function, assume the other unlocked
5396 functions exist explicitly. */
5397 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5398 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5399 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5400 : implicit_built_in_decls[BUILT_IN_PUTS];
5401 const char *fmt_str;
5402 tree fn = 0;
5403 tree fmt, arg;
5404 int nargs = call_expr_nargs (exp);
5405
5406 /* If the return value is used, don't do the transformation. */
5407 if (target != const0_rtx)
5408 return NULL_RTX;
5409
5410 /* Verify the required arguments in the original call. */
5411 if (nargs == 0)
5412 return NULL_RTX;
5413 fmt = CALL_EXPR_ARG (exp, 0);
5414 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5415 return NULL_RTX;
5416
5417 /* Check whether the format is a literal string constant. */
5418 fmt_str = c_getstr (fmt);
5419 if (fmt_str == NULL)
5420 return NULL_RTX;
5421
5422 if (!init_target_chars ())
5423 return NULL_RTX;
5424
5425 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5426 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5427 {
5428 if ((nargs != 2)
5429 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5430 return NULL_RTX;
5431 if (fn_puts)
5432 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5433 }
5434 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5435 else if (strcmp (fmt_str, target_percent_c) == 0)
5436 {
5437 if ((nargs != 2)
5438 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5439 return NULL_RTX;
5440 if (fn_putchar)
5441 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5442 }
5443 else
5444 {
5445 /* We can't handle anything else with % args or %% ... yet. */
5446 if (strchr (fmt_str, target_percent))
5447 return NULL_RTX;
5448
5449 if (nargs > 1)
5450 return NULL_RTX;
5451
5452 /* If the format specifier was "", printf does nothing. */
5453 if (fmt_str[0] == '\0')
5454 return const0_rtx;
5455 /* If the format specifier has length of 1, call putchar. */
5456 if (fmt_str[1] == '\0')
5457 {
5458 /* Given printf("c"), (where c is any one character,)
5459 convert "c"[0] to an int and pass that to the replacement
5460 function. */
5461 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5462 if (fn_putchar)
5463 fn = build_call_expr (fn_putchar, 1, arg);
5464 }
5465 else
5466 {
5467 /* If the format specifier was "string\n", call puts("string"). */
5468 size_t len = strlen (fmt_str);
5469 if ((unsigned char)fmt_str[len - 1] == target_newline)
5470 {
5471 /* Create a NUL-terminated string that's one char shorter
5472 than the original, stripping off the trailing '\n'. */
5473 char *newstr = XALLOCAVEC (char, len);
5474 memcpy (newstr, fmt_str, len - 1);
5475 newstr[len - 1] = 0;
5476 arg = build_string_literal (len, newstr);
5477 if (fn_puts)
5478 fn = build_call_expr (fn_puts, 1, arg);
5479 }
5480 else
5481 /* We'd like to arrange to call fputs(string,stdout) here,
5482 but we need stdout and don't have a way to get it yet. */
5483 return NULL_RTX;
5484 }
5485 }
5486
5487 if (!fn)
5488 return NULL_RTX;
5489 if (TREE_CODE (fn) == CALL_EXPR)
5490 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5491 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5492 }
5493
5494 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5495 Return NULL_RTX if a normal call should be emitted rather than transforming
5496 the function inline. If convenient, the result should be placed in
5497 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5498 call. */
5499 static rtx
5500 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5501 bool unlocked)
5502 {
5503 /* If we're using an unlocked function, assume the other unlocked
5504 functions exist explicitly. */
5505 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5506 : implicit_built_in_decls[BUILT_IN_FPUTC];
5507 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5508 : implicit_built_in_decls[BUILT_IN_FPUTS];
5509 const char *fmt_str;
5510 tree fn = 0;
5511 tree fmt, fp, arg;
5512 int nargs = call_expr_nargs (exp);
5513
5514 /* If the return value is used, don't do the transformation. */
5515 if (target != const0_rtx)
5516 return NULL_RTX;
5517
5518 /* Verify the required arguments in the original call. */
5519 if (nargs < 2)
5520 return NULL_RTX;
5521 fp = CALL_EXPR_ARG (exp, 0);
5522 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5523 return NULL_RTX;
5524 fmt = CALL_EXPR_ARG (exp, 1);
5525 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5526 return NULL_RTX;
5527
5528 /* Check whether the format is a literal string constant. */
5529 fmt_str = c_getstr (fmt);
5530 if (fmt_str == NULL)
5531 return NULL_RTX;
5532
5533 if (!init_target_chars ())
5534 return NULL_RTX;
5535
5536 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5537 if (strcmp (fmt_str, target_percent_s) == 0)
5538 {
5539 if ((nargs != 3)
5540 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5541 return NULL_RTX;
5542 arg = CALL_EXPR_ARG (exp, 2);
5543 if (fn_fputs)
5544 fn = build_call_expr (fn_fputs, 2, arg, fp);
5545 }
5546 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5547 else if (strcmp (fmt_str, target_percent_c) == 0)
5548 {
5549 if ((nargs != 3)
5550 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5551 return NULL_RTX;
5552 arg = CALL_EXPR_ARG (exp, 2);
5553 if (fn_fputc)
5554 fn = build_call_expr (fn_fputc, 2, arg, fp);
5555 }
5556 else
5557 {
5558 /* We can't handle anything else with % args or %% ... yet. */
5559 if (strchr (fmt_str, target_percent))
5560 return NULL_RTX;
5561
5562 if (nargs > 2)
5563 return NULL_RTX;
5564
5565 /* If the format specifier was "", fprintf does nothing. */
5566 if (fmt_str[0] == '\0')
5567 {
5568 /* Evaluate and ignore FILE* argument for side-effects. */
5569 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5570 return const0_rtx;
5571 }
5572
5573 /* When "string" doesn't contain %, replace all cases of
5574 fprintf(stream,string) with fputs(string,stream). The fputs
5575 builtin will take care of special cases like length == 1. */
5576 if (fn_fputs)
5577 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5578 }
5579
5580 if (!fn)
5581 return NULL_RTX;
5582 if (TREE_CODE (fn) == CALL_EXPR)
5583 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5584 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5585 }
5586
5587 /* Expand a call EXP to sprintf. Return NULL_RTX if
5588 a normal call should be emitted rather than expanding the function
5589 inline. If convenient, the result should be placed in TARGET with
5590 mode MODE. */
5591
5592 static rtx
5593 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5594 {
5595 tree dest, fmt;
5596 const char *fmt_str;
5597 int nargs = call_expr_nargs (exp);
5598
5599 /* Verify the required arguments in the original call. */
5600 if (nargs < 2)
5601 return NULL_RTX;
5602 dest = CALL_EXPR_ARG (exp, 0);
5603 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5604 return NULL_RTX;
5605 fmt = CALL_EXPR_ARG (exp, 0);
5606 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5607 return NULL_RTX;
5608
5609 /* Check whether the format is a literal string constant. */
5610 fmt_str = c_getstr (fmt);
5611 if (fmt_str == NULL)
5612 return NULL_RTX;
5613
5614 if (!init_target_chars ())
5615 return NULL_RTX;
5616
5617 /* If the format doesn't contain % args or %%, use strcpy. */
5618 if (strchr (fmt_str, target_percent) == 0)
5619 {
5620 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5621 tree exp;
5622
5623 if ((nargs > 2) || ! fn)
5624 return NULL_RTX;
5625 expand_expr (build_call_expr (fn, 2, dest, fmt),
5626 const0_rtx, VOIDmode, EXPAND_NORMAL);
5627 if (target == const0_rtx)
5628 return const0_rtx;
5629 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5630 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5631 }
5632 /* If the format is "%s", use strcpy if the result isn't used. */
5633 else if (strcmp (fmt_str, target_percent_s) == 0)
5634 {
5635 tree fn, arg, len;
5636 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5637
5638 if (! fn)
5639 return NULL_RTX;
5640 if (nargs != 3)
5641 return NULL_RTX;
5642 arg = CALL_EXPR_ARG (exp, 2);
5643 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5644 return NULL_RTX;
5645
5646 if (target != const0_rtx)
5647 {
5648 len = c_strlen (arg, 1);
5649 if (! len || TREE_CODE (len) != INTEGER_CST)
5650 return NULL_RTX;
5651 }
5652 else
5653 len = NULL_TREE;
5654
5655 expand_expr (build_call_expr (fn, 2, dest, arg),
5656 const0_rtx, VOIDmode, EXPAND_NORMAL);
5657
5658 if (target == const0_rtx)
5659 return const0_rtx;
5660 return expand_expr (len, target, mode, EXPAND_NORMAL);
5661 }
5662
5663 return NULL_RTX;
5664 }
5665
5666 /* Expand a call to either the entry or exit function profiler. */
5667
5668 static rtx
5669 expand_builtin_profile_func (bool exitp)
5670 {
5671 rtx this_rtx, which;
5672
5673 this_rtx = DECL_RTL (current_function_decl);
5674 gcc_assert (MEM_P (this_rtx));
5675 this_rtx = XEXP (this_rtx, 0);
5676
5677 if (exitp)
5678 which = profile_function_exit_libfunc;
5679 else
5680 which = profile_function_entry_libfunc;
5681
5682 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5683 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5684 0),
5685 Pmode);
5686
5687 return const0_rtx;
5688 }
5689
5690 /* Expand a call to __builtin___clear_cache. */
5691
5692 static rtx
5693 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5694 {
5695 #ifndef HAVE_clear_cache
5696 #ifdef CLEAR_INSN_CACHE
5697 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5698 does something. Just do the default expansion to a call to
5699 __clear_cache(). */
5700 return NULL_RTX;
5701 #else
5702 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5703 does nothing. There is no need to call it. Do nothing. */
5704 return const0_rtx;
5705 #endif /* CLEAR_INSN_CACHE */
5706 #else
5707 /* We have a "clear_cache" insn, and it will handle everything. */
5708 tree begin, end;
5709 rtx begin_rtx, end_rtx;
5710 enum insn_code icode;
5711
5712 /* We must not expand to a library call. If we did, any
5713 fallback library function in libgcc that might contain a call to
5714 __builtin___clear_cache() would recurse infinitely. */
5715 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5716 {
5717 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5718 return const0_rtx;
5719 }
5720
5721 if (HAVE_clear_cache)
5722 {
5723 icode = CODE_FOR_clear_cache;
5724
5725 begin = CALL_EXPR_ARG (exp, 0);
5726 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5727 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5728 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5729 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5730
5731 end = CALL_EXPR_ARG (exp, 1);
5732 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5733 end_rtx = convert_memory_address (Pmode, end_rtx);
5734 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5735 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5736
5737 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5738 }
5739 return const0_rtx;
5740 #endif /* HAVE_clear_cache */
5741 }
5742
5743 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5744
5745 static rtx
5746 round_trampoline_addr (rtx tramp)
5747 {
5748 rtx temp, addend, mask;
5749
5750 /* If we don't need too much alignment, we'll have been guaranteed
5751 proper alignment by get_trampoline_type. */
5752 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5753 return tramp;
5754
5755 /* Round address up to desired boundary. */
5756 temp = gen_reg_rtx (Pmode);
5757 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5758 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5759
5760 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5761 temp, 0, OPTAB_LIB_WIDEN);
5762 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5763 temp, 0, OPTAB_LIB_WIDEN);
5764
5765 return tramp;
5766 }
5767
5768 static rtx
5769 expand_builtin_init_trampoline (tree exp)
5770 {
5771 tree t_tramp, t_func, t_chain;
5772 rtx r_tramp, r_func, r_chain;
5773 #ifdef TRAMPOLINE_TEMPLATE
5774 rtx blktramp;
5775 #endif
5776
5777 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5778 POINTER_TYPE, VOID_TYPE))
5779 return NULL_RTX;
5780
5781 t_tramp = CALL_EXPR_ARG (exp, 0);
5782 t_func = CALL_EXPR_ARG (exp, 1);
5783 t_chain = CALL_EXPR_ARG (exp, 2);
5784
5785 r_tramp = expand_normal (t_tramp);
5786 r_func = expand_normal (t_func);
5787 r_chain = expand_normal (t_chain);
5788
5789 /* Generate insns to initialize the trampoline. */
5790 r_tramp = round_trampoline_addr (r_tramp);
5791 #ifdef TRAMPOLINE_TEMPLATE
5792 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5793 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5794 emit_block_move (blktramp, assemble_trampoline_template (),
5795 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5796 #endif
5797 trampolines_created = 1;
5798 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5799
5800 return const0_rtx;
5801 }
5802
5803 static rtx
5804 expand_builtin_adjust_trampoline (tree exp)
5805 {
5806 rtx tramp;
5807
5808 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5809 return NULL_RTX;
5810
5811 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5812 tramp = round_trampoline_addr (tramp);
5813 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5814 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5815 #endif
5816
5817 return tramp;
5818 }
5819
5820 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5821 function. The function first checks whether the back end provides
5822 an insn to implement signbit for the respective mode. If not, it
5823 checks whether the floating point format of the value is such that
5824 the sign bit can be extracted. If that is not the case, the
5825 function returns NULL_RTX to indicate that a normal call should be
5826 emitted rather than expanding the function in-line. EXP is the
5827 expression that is a call to the builtin function; if convenient,
5828 the result should be placed in TARGET. */
5829 static rtx
5830 expand_builtin_signbit (tree exp, rtx target)
5831 {
5832 const struct real_format *fmt;
5833 enum machine_mode fmode, imode, rmode;
5834 HOST_WIDE_INT hi, lo;
5835 tree arg;
5836 int word, bitpos;
5837 enum insn_code icode;
5838 rtx temp;
5839
5840 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5841 return NULL_RTX;
5842
5843 arg = CALL_EXPR_ARG (exp, 0);
5844 fmode = TYPE_MODE (TREE_TYPE (arg));
5845 rmode = TYPE_MODE (TREE_TYPE (exp));
5846 fmt = REAL_MODE_FORMAT (fmode);
5847
5848 arg = builtin_save_expr (arg);
5849
5850 /* Expand the argument yielding a RTX expression. */
5851 temp = expand_normal (arg);
5852
5853 /* Check if the back end provides an insn that handles signbit for the
5854 argument's mode. */
5855 icode = signbit_optab->handlers [(int) fmode].insn_code;
5856 if (icode != CODE_FOR_nothing)
5857 {
5858 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5859 emit_unop_insn (icode, target, temp, UNKNOWN);
5860 return target;
5861 }
5862
5863 /* For floating point formats without a sign bit, implement signbit
5864 as "ARG < 0.0". */
5865 bitpos = fmt->signbit_ro;
5866 if (bitpos < 0)
5867 {
5868 /* But we can't do this if the format supports signed zero. */
5869 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5870 return NULL_RTX;
5871
5872 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5873 build_real (TREE_TYPE (arg), dconst0));
5874 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5875 }
5876
5877 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5878 {
5879 imode = int_mode_for_mode (fmode);
5880 if (imode == BLKmode)
5881 return NULL_RTX;
5882 temp = gen_lowpart (imode, temp);
5883 }
5884 else
5885 {
5886 imode = word_mode;
5887 /* Handle targets with different FP word orders. */
5888 if (FLOAT_WORDS_BIG_ENDIAN)
5889 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5890 else
5891 word = bitpos / BITS_PER_WORD;
5892 temp = operand_subword_force (temp, word, fmode);
5893 bitpos = bitpos % BITS_PER_WORD;
5894 }
5895
5896 /* Force the intermediate word_mode (or narrower) result into a
5897 register. This avoids attempting to create paradoxical SUBREGs
5898 of floating point modes below. */
5899 temp = force_reg (imode, temp);
5900
5901 /* If the bitpos is within the "result mode" lowpart, the operation
5902 can be implement with a single bitwise AND. Otherwise, we need
5903 a right shift and an AND. */
5904
5905 if (bitpos < GET_MODE_BITSIZE (rmode))
5906 {
5907 if (bitpos < HOST_BITS_PER_WIDE_INT)
5908 {
5909 hi = 0;
5910 lo = (HOST_WIDE_INT) 1 << bitpos;
5911 }
5912 else
5913 {
5914 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5915 lo = 0;
5916 }
5917
5918 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5919 temp = gen_lowpart (rmode, temp);
5920 temp = expand_binop (rmode, and_optab, temp,
5921 immed_double_const (lo, hi, rmode),
5922 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5923 }
5924 else
5925 {
5926 /* Perform a logical right shift to place the signbit in the least
5927 significant bit, then truncate the result to the desired mode
5928 and mask just this bit. */
5929 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5930 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5931 temp = gen_lowpart (rmode, temp);
5932 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5933 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5934 }
5935
5936 return temp;
5937 }
5938
5939 /* Expand fork or exec calls. TARGET is the desired target of the
5940 call. EXP is the call. FN is the
5941 identificator of the actual function. IGNORE is nonzero if the
5942 value is to be ignored. */
5943
5944 static rtx
5945 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5946 {
5947 tree id, decl;
5948 tree call;
5949
5950 /* If we are not profiling, just call the function. */
5951 if (!profile_arc_flag)
5952 return NULL_RTX;
5953
5954 /* Otherwise call the wrapper. This should be equivalent for the rest of
5955 compiler, so the code does not diverge, and the wrapper may run the
5956 code necessary for keeping the profiling sane. */
5957
5958 switch (DECL_FUNCTION_CODE (fn))
5959 {
5960 case BUILT_IN_FORK:
5961 id = get_identifier ("__gcov_fork");
5962 break;
5963
5964 case BUILT_IN_EXECL:
5965 id = get_identifier ("__gcov_execl");
5966 break;
5967
5968 case BUILT_IN_EXECV:
5969 id = get_identifier ("__gcov_execv");
5970 break;
5971
5972 case BUILT_IN_EXECLP:
5973 id = get_identifier ("__gcov_execlp");
5974 break;
5975
5976 case BUILT_IN_EXECLE:
5977 id = get_identifier ("__gcov_execle");
5978 break;
5979
5980 case BUILT_IN_EXECVP:
5981 id = get_identifier ("__gcov_execvp");
5982 break;
5983
5984 case BUILT_IN_EXECVE:
5985 id = get_identifier ("__gcov_execve");
5986 break;
5987
5988 default:
5989 gcc_unreachable ();
5990 }
5991
5992 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5993 FUNCTION_DECL, id, TREE_TYPE (fn));
5994 DECL_EXTERNAL (decl) = 1;
5995 TREE_PUBLIC (decl) = 1;
5996 DECL_ARTIFICIAL (decl) = 1;
5997 TREE_NOTHROW (decl) = 1;
5998 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5999 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6000 call = rewrite_call_expr (exp, 0, decl, 0);
6001 return expand_call (call, target, ignore);
6002 }
6003
6004
6005 \f
6006 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6007 the pointer in these functions is void*, the tree optimizers may remove
6008 casts. The mode computed in expand_builtin isn't reliable either, due
6009 to __sync_bool_compare_and_swap.
6010
6011 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6012 group of builtins. This gives us log2 of the mode size. */
6013
6014 static inline enum machine_mode
6015 get_builtin_sync_mode (int fcode_diff)
6016 {
6017 /* The size is not negotiable, so ask not to get BLKmode in return
6018 if the target indicates that a smaller size would be better. */
6019 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6020 }
6021
6022 /* Expand the memory expression LOC and return the appropriate memory operand
6023 for the builtin_sync operations. */
6024
6025 static rtx
6026 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6027 {
6028 rtx addr, mem;
6029
6030 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6031
6032 /* Note that we explicitly do not want any alias information for this
6033 memory, so that we kill all other live memories. Otherwise we don't
6034 satisfy the full barrier semantics of the intrinsic. */
6035 mem = validize_mem (gen_rtx_MEM (mode, addr));
6036
6037 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6038 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6039 MEM_VOLATILE_P (mem) = 1;
6040
6041 return mem;
6042 }
6043
6044 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6045 EXP is the CALL_EXPR. CODE is the rtx code
6046 that corresponds to the arithmetic or logical operation from the name;
6047 an exception here is that NOT actually means NAND. TARGET is an optional
6048 place for us to store the results; AFTER is true if this is the
6049 fetch_and_xxx form. IGNORE is true if we don't actually care about
6050 the result of the operation at all. */
6051
6052 static rtx
6053 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6054 enum rtx_code code, bool after,
6055 rtx target, bool ignore)
6056 {
6057 rtx val, mem;
6058 enum machine_mode old_mode;
6059 location_t loc = EXPR_LOCATION (exp);
6060
6061 if (code == NOT && warn_sync_nand)
6062 {
6063 tree fndecl = get_callee_fndecl (exp);
6064 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6065
6066 static bool warned_f_a_n, warned_n_a_f;
6067
6068 switch (fcode)
6069 {
6070 case BUILT_IN_FETCH_AND_NAND_1:
6071 case BUILT_IN_FETCH_AND_NAND_2:
6072 case BUILT_IN_FETCH_AND_NAND_4:
6073 case BUILT_IN_FETCH_AND_NAND_8:
6074 case BUILT_IN_FETCH_AND_NAND_16:
6075
6076 if (warned_f_a_n)
6077 break;
6078
6079 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6080 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6081 warned_f_a_n = true;
6082 break;
6083
6084 case BUILT_IN_NAND_AND_FETCH_1:
6085 case BUILT_IN_NAND_AND_FETCH_2:
6086 case BUILT_IN_NAND_AND_FETCH_4:
6087 case BUILT_IN_NAND_AND_FETCH_8:
6088 case BUILT_IN_NAND_AND_FETCH_16:
6089
6090 if (warned_n_a_f)
6091 break;
6092
6093 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6094 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6095 warned_n_a_f = true;
6096 break;
6097
6098 default:
6099 gcc_unreachable ();
6100 }
6101 }
6102
6103 /* Expand the operands. */
6104 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6105
6106 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6107 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6108 of CONST_INTs, where we know the old_mode only from the call argument. */
6109 old_mode = GET_MODE (val);
6110 if (old_mode == VOIDmode)
6111 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6112 val = convert_modes (mode, old_mode, val, 1);
6113
6114 if (ignore)
6115 return expand_sync_operation (mem, val, code);
6116 else
6117 return expand_sync_fetch_operation (mem, val, code, after, target);
6118 }
6119
6120 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6121 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6122 true if this is the boolean form. TARGET is a place for us to store the
6123 results; this is NOT optional if IS_BOOL is true. */
6124
6125 static rtx
6126 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6127 bool is_bool, rtx target)
6128 {
6129 rtx old_val, new_val, mem;
6130 enum machine_mode old_mode;
6131
6132 /* Expand the operands. */
6133 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6134
6135
6136 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6137 mode, EXPAND_NORMAL);
6138 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6139 of CONST_INTs, where we know the old_mode only from the call argument. */
6140 old_mode = GET_MODE (old_val);
6141 if (old_mode == VOIDmode)
6142 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6143 old_val = convert_modes (mode, old_mode, old_val, 1);
6144
6145 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6146 mode, EXPAND_NORMAL);
6147 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6148 of CONST_INTs, where we know the old_mode only from the call argument. */
6149 old_mode = GET_MODE (new_val);
6150 if (old_mode == VOIDmode)
6151 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6152 new_val = convert_modes (mode, old_mode, new_val, 1);
6153
6154 if (is_bool)
6155 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6156 else
6157 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6158 }
6159
6160 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6161 general form is actually an atomic exchange, and some targets only
6162 support a reduced form with the second argument being a constant 1.
6163 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6164 the results. */
6165
6166 static rtx
6167 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6168 rtx target)
6169 {
6170 rtx val, mem;
6171 enum machine_mode old_mode;
6172
6173 /* Expand the operands. */
6174 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6175 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6176 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6177 of CONST_INTs, where we know the old_mode only from the call argument. */
6178 old_mode = GET_MODE (val);
6179 if (old_mode == VOIDmode)
6180 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6181 val = convert_modes (mode, old_mode, val, 1);
6182
6183 return expand_sync_lock_test_and_set (mem, val, target);
6184 }
6185
6186 /* Expand the __sync_synchronize intrinsic. */
6187
6188 static void
6189 expand_builtin_synchronize (void)
6190 {
6191 tree x;
6192
6193 #ifdef HAVE_memory_barrier
6194 if (HAVE_memory_barrier)
6195 {
6196 emit_insn (gen_memory_barrier ());
6197 return;
6198 }
6199 #endif
6200
6201 if (synchronize_libfunc != NULL_RTX)
6202 {
6203 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6204 return;
6205 }
6206
6207 /* If no explicit memory barrier instruction is available, create an
6208 empty asm stmt with a memory clobber. */
6209 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6210 tree_cons (NULL, build_string (6, "memory"), NULL));
6211 ASM_VOLATILE_P (x) = 1;
6212 expand_asm_expr (x);
6213 }
6214
6215 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6216
6217 static void
6218 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6219 {
6220 enum insn_code icode;
6221 rtx mem, insn;
6222 rtx val = const0_rtx;
6223
6224 /* Expand the operands. */
6225 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6226
6227 /* If there is an explicit operation in the md file, use it. */
6228 icode = sync_lock_release[mode];
6229 if (icode != CODE_FOR_nothing)
6230 {
6231 if (!insn_data[icode].operand[1].predicate (val, mode))
6232 val = force_reg (mode, val);
6233
6234 insn = GEN_FCN (icode) (mem, val);
6235 if (insn)
6236 {
6237 emit_insn (insn);
6238 return;
6239 }
6240 }
6241
6242 /* Otherwise we can implement this operation by emitting a barrier
6243 followed by a store of zero. */
6244 expand_builtin_synchronize ();
6245 emit_move_insn (mem, val);
6246 }
6247 \f
6248 /* Expand an expression EXP that calls a built-in function,
6249 with result going to TARGET if that's convenient
6250 (and in mode MODE if that's convenient).
6251 SUBTARGET may be used as the target for computing one of EXP's operands.
6252 IGNORE is nonzero if the value is to be ignored. */
6253
6254 rtx
6255 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6256 int ignore)
6257 {
6258 tree fndecl = get_callee_fndecl (exp);
6259 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6260 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6261
6262 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6263 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6264
6265 /* When not optimizing, generate calls to library functions for a certain
6266 set of builtins. */
6267 if (!optimize
6268 && !called_as_built_in (fndecl)
6269 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6270 && fcode != BUILT_IN_ALLOCA
6271 && fcode != BUILT_IN_FREE)
6272 return expand_call (exp, target, ignore);
6273
6274 /* The built-in function expanders test for target == const0_rtx
6275 to determine whether the function's result will be ignored. */
6276 if (ignore)
6277 target = const0_rtx;
6278
6279 /* If the result of a pure or const built-in function is ignored, and
6280 none of its arguments are volatile, we can avoid expanding the
6281 built-in call and just evaluate the arguments for side-effects. */
6282 if (target == const0_rtx
6283 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6284 {
6285 bool volatilep = false;
6286 tree arg;
6287 call_expr_arg_iterator iter;
6288
6289 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6290 if (TREE_THIS_VOLATILE (arg))
6291 {
6292 volatilep = true;
6293 break;
6294 }
6295
6296 if (! volatilep)
6297 {
6298 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6299 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6300 return const0_rtx;
6301 }
6302 }
6303
6304 switch (fcode)
6305 {
6306 CASE_FLT_FN (BUILT_IN_FABS):
6307 target = expand_builtin_fabs (exp, target, subtarget);
6308 if (target)
6309 return target;
6310 break;
6311
6312 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6313 target = expand_builtin_copysign (exp, target, subtarget);
6314 if (target)
6315 return target;
6316 break;
6317
6318 /* Just do a normal library call if we were unable to fold
6319 the values. */
6320 CASE_FLT_FN (BUILT_IN_CABS):
6321 break;
6322
6323 CASE_FLT_FN (BUILT_IN_EXP):
6324 CASE_FLT_FN (BUILT_IN_EXP10):
6325 CASE_FLT_FN (BUILT_IN_POW10):
6326 CASE_FLT_FN (BUILT_IN_EXP2):
6327 CASE_FLT_FN (BUILT_IN_EXPM1):
6328 CASE_FLT_FN (BUILT_IN_LOGB):
6329 CASE_FLT_FN (BUILT_IN_LOG):
6330 CASE_FLT_FN (BUILT_IN_LOG10):
6331 CASE_FLT_FN (BUILT_IN_LOG2):
6332 CASE_FLT_FN (BUILT_IN_LOG1P):
6333 CASE_FLT_FN (BUILT_IN_TAN):
6334 CASE_FLT_FN (BUILT_IN_ASIN):
6335 CASE_FLT_FN (BUILT_IN_ACOS):
6336 CASE_FLT_FN (BUILT_IN_ATAN):
6337 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6338 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6339 because of possible accuracy problems. */
6340 if (! flag_unsafe_math_optimizations)
6341 break;
6342 CASE_FLT_FN (BUILT_IN_SQRT):
6343 CASE_FLT_FN (BUILT_IN_FLOOR):
6344 CASE_FLT_FN (BUILT_IN_CEIL):
6345 CASE_FLT_FN (BUILT_IN_TRUNC):
6346 CASE_FLT_FN (BUILT_IN_ROUND):
6347 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6348 CASE_FLT_FN (BUILT_IN_RINT):
6349 target = expand_builtin_mathfn (exp, target, subtarget);
6350 if (target)
6351 return target;
6352 break;
6353
6354 CASE_FLT_FN (BUILT_IN_ILOGB):
6355 if (! flag_unsafe_math_optimizations)
6356 break;
6357 CASE_FLT_FN (BUILT_IN_ISINF):
6358 CASE_FLT_FN (BUILT_IN_FINITE):
6359 case BUILT_IN_ISFINITE:
6360 case BUILT_IN_ISNORMAL:
6361 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6362 if (target)
6363 return target;
6364 break;
6365
6366 CASE_FLT_FN (BUILT_IN_LCEIL):
6367 CASE_FLT_FN (BUILT_IN_LLCEIL):
6368 CASE_FLT_FN (BUILT_IN_LFLOOR):
6369 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6370 target = expand_builtin_int_roundingfn (exp, target);
6371 if (target)
6372 return target;
6373 break;
6374
6375 CASE_FLT_FN (BUILT_IN_LRINT):
6376 CASE_FLT_FN (BUILT_IN_LLRINT):
6377 CASE_FLT_FN (BUILT_IN_LROUND):
6378 CASE_FLT_FN (BUILT_IN_LLROUND):
6379 target = expand_builtin_int_roundingfn_2 (exp, target);
6380 if (target)
6381 return target;
6382 break;
6383
6384 CASE_FLT_FN (BUILT_IN_POW):
6385 target = expand_builtin_pow (exp, target, subtarget);
6386 if (target)
6387 return target;
6388 break;
6389
6390 CASE_FLT_FN (BUILT_IN_POWI):
6391 target = expand_builtin_powi (exp, target, subtarget);
6392 if (target)
6393 return target;
6394 break;
6395
6396 CASE_FLT_FN (BUILT_IN_ATAN2):
6397 CASE_FLT_FN (BUILT_IN_LDEXP):
6398 CASE_FLT_FN (BUILT_IN_SCALB):
6399 CASE_FLT_FN (BUILT_IN_SCALBN):
6400 CASE_FLT_FN (BUILT_IN_SCALBLN):
6401 if (! flag_unsafe_math_optimizations)
6402 break;
6403
6404 CASE_FLT_FN (BUILT_IN_FMOD):
6405 CASE_FLT_FN (BUILT_IN_REMAINDER):
6406 CASE_FLT_FN (BUILT_IN_DREM):
6407 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6408 if (target)
6409 return target;
6410 break;
6411
6412 CASE_FLT_FN (BUILT_IN_CEXPI):
6413 target = expand_builtin_cexpi (exp, target, subtarget);
6414 gcc_assert (target);
6415 return target;
6416
6417 CASE_FLT_FN (BUILT_IN_SIN):
6418 CASE_FLT_FN (BUILT_IN_COS):
6419 if (! flag_unsafe_math_optimizations)
6420 break;
6421 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6422 if (target)
6423 return target;
6424 break;
6425
6426 CASE_FLT_FN (BUILT_IN_SINCOS):
6427 if (! flag_unsafe_math_optimizations)
6428 break;
6429 target = expand_builtin_sincos (exp);
6430 if (target)
6431 return target;
6432 break;
6433
6434 case BUILT_IN_APPLY_ARGS:
6435 return expand_builtin_apply_args ();
6436
6437 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6438 FUNCTION with a copy of the parameters described by
6439 ARGUMENTS, and ARGSIZE. It returns a block of memory
6440 allocated on the stack into which is stored all the registers
6441 that might possibly be used for returning the result of a
6442 function. ARGUMENTS is the value returned by
6443 __builtin_apply_args. ARGSIZE is the number of bytes of
6444 arguments that must be copied. ??? How should this value be
6445 computed? We'll also need a safe worst case value for varargs
6446 functions. */
6447 case BUILT_IN_APPLY:
6448 if (!validate_arglist (exp, POINTER_TYPE,
6449 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6450 && !validate_arglist (exp, REFERENCE_TYPE,
6451 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6452 return const0_rtx;
6453 else
6454 {
6455 rtx ops[3];
6456
6457 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6458 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6459 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6460
6461 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6462 }
6463
6464 /* __builtin_return (RESULT) causes the function to return the
6465 value described by RESULT. RESULT is address of the block of
6466 memory returned by __builtin_apply. */
6467 case BUILT_IN_RETURN:
6468 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6469 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6470 return const0_rtx;
6471
6472 case BUILT_IN_SAVEREGS:
6473 return expand_builtin_saveregs ();
6474
6475 case BUILT_IN_ARGS_INFO:
6476 return expand_builtin_args_info (exp);
6477
6478 case BUILT_IN_VA_ARG_PACK:
6479 /* All valid uses of __builtin_va_arg_pack () are removed during
6480 inlining. */
6481 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6482 return const0_rtx;
6483
6484 case BUILT_IN_VA_ARG_PACK_LEN:
6485 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6486 inlining. */
6487 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6488 return const0_rtx;
6489
6490 /* Return the address of the first anonymous stack arg. */
6491 case BUILT_IN_NEXT_ARG:
6492 if (fold_builtin_next_arg (exp, false))
6493 return const0_rtx;
6494 return expand_builtin_next_arg ();
6495
6496 case BUILT_IN_CLEAR_CACHE:
6497 target = expand_builtin___clear_cache (exp);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_CLASSIFY_TYPE:
6503 return expand_builtin_classify_type (exp);
6504
6505 case BUILT_IN_CONSTANT_P:
6506 return const0_rtx;
6507
6508 case BUILT_IN_FRAME_ADDRESS:
6509 case BUILT_IN_RETURN_ADDRESS:
6510 return expand_builtin_frame_address (fndecl, exp);
6511
6512 /* Returns the address of the area where the structure is returned.
6513 0 otherwise. */
6514 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6515 if (call_expr_nargs (exp) != 0
6516 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6517 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6518 return const0_rtx;
6519 else
6520 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6521
6522 case BUILT_IN_ALLOCA:
6523 target = expand_builtin_alloca (exp, target);
6524 if (target)
6525 return target;
6526 break;
6527
6528 case BUILT_IN_STACK_SAVE:
6529 return expand_stack_save ();
6530
6531 case BUILT_IN_STACK_RESTORE:
6532 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6533 return const0_rtx;
6534
6535 case BUILT_IN_BSWAP32:
6536 case BUILT_IN_BSWAP64:
6537 target = expand_builtin_bswap (exp, target, subtarget);
6538
6539 if (target)
6540 return target;
6541 break;
6542
6543 CASE_INT_FN (BUILT_IN_FFS):
6544 case BUILT_IN_FFSIMAX:
6545 target = expand_builtin_unop (target_mode, exp, target,
6546 subtarget, ffs_optab);
6547 if (target)
6548 return target;
6549 break;
6550
6551 CASE_INT_FN (BUILT_IN_CLZ):
6552 case BUILT_IN_CLZIMAX:
6553 target = expand_builtin_unop (target_mode, exp, target,
6554 subtarget, clz_optab);
6555 if (target)
6556 return target;
6557 break;
6558
6559 CASE_INT_FN (BUILT_IN_CTZ):
6560 case BUILT_IN_CTZIMAX:
6561 target = expand_builtin_unop (target_mode, exp, target,
6562 subtarget, ctz_optab);
6563 if (target)
6564 return target;
6565 break;
6566
6567 CASE_INT_FN (BUILT_IN_POPCOUNT):
6568 case BUILT_IN_POPCOUNTIMAX:
6569 target = expand_builtin_unop (target_mode, exp, target,
6570 subtarget, popcount_optab);
6571 if (target)
6572 return target;
6573 break;
6574
6575 CASE_INT_FN (BUILT_IN_PARITY):
6576 case BUILT_IN_PARITYIMAX:
6577 target = expand_builtin_unop (target_mode, exp, target,
6578 subtarget, parity_optab);
6579 if (target)
6580 return target;
6581 break;
6582
6583 case BUILT_IN_STRLEN:
6584 target = expand_builtin_strlen (exp, target, target_mode);
6585 if (target)
6586 return target;
6587 break;
6588
6589 case BUILT_IN_STRCPY:
6590 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6591 if (target)
6592 return target;
6593 break;
6594
6595 case BUILT_IN_STRNCPY:
6596 target = expand_builtin_strncpy (exp, target, mode);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_STPCPY:
6602 target = expand_builtin_stpcpy (exp, target, mode);
6603 if (target)
6604 return target;
6605 break;
6606
6607 case BUILT_IN_STRCAT:
6608 target = expand_builtin_strcat (fndecl, exp, target, mode);
6609 if (target)
6610 return target;
6611 break;
6612
6613 case BUILT_IN_STRNCAT:
6614 target = expand_builtin_strncat (exp, target, mode);
6615 if (target)
6616 return target;
6617 break;
6618
6619 case BUILT_IN_STRSPN:
6620 target = expand_builtin_strspn (exp, target, mode);
6621 if (target)
6622 return target;
6623 break;
6624
6625 case BUILT_IN_STRCSPN:
6626 target = expand_builtin_strcspn (exp, target, mode);
6627 if (target)
6628 return target;
6629 break;
6630
6631 case BUILT_IN_STRSTR:
6632 target = expand_builtin_strstr (exp, target, mode);
6633 if (target)
6634 return target;
6635 break;
6636
6637 case BUILT_IN_STRPBRK:
6638 target = expand_builtin_strpbrk (exp, target, mode);
6639 if (target)
6640 return target;
6641 break;
6642
6643 case BUILT_IN_INDEX:
6644 case BUILT_IN_STRCHR:
6645 target = expand_builtin_strchr (exp, target, mode);
6646 if (target)
6647 return target;
6648 break;
6649
6650 case BUILT_IN_RINDEX:
6651 case BUILT_IN_STRRCHR:
6652 target = expand_builtin_strrchr (exp, target, mode);
6653 if (target)
6654 return target;
6655 break;
6656
6657 case BUILT_IN_MEMCPY:
6658 target = expand_builtin_memcpy (exp, target, mode);
6659 if (target)
6660 return target;
6661 break;
6662
6663 case BUILT_IN_MEMPCPY:
6664 target = expand_builtin_mempcpy (exp, target, mode);
6665 if (target)
6666 return target;
6667 break;
6668
6669 case BUILT_IN_MEMMOVE:
6670 target = expand_builtin_memmove (exp, target, mode, ignore);
6671 if (target)
6672 return target;
6673 break;
6674
6675 case BUILT_IN_BCOPY:
6676 target = expand_builtin_bcopy (exp, ignore);
6677 if (target)
6678 return target;
6679 break;
6680
6681 case BUILT_IN_MEMSET:
6682 target = expand_builtin_memset (exp, target, mode);
6683 if (target)
6684 return target;
6685 break;
6686
6687 case BUILT_IN_BZERO:
6688 target = expand_builtin_bzero (exp);
6689 if (target)
6690 return target;
6691 break;
6692
6693 case BUILT_IN_STRCMP:
6694 target = expand_builtin_strcmp (exp, target, mode);
6695 if (target)
6696 return target;
6697 break;
6698
6699 case BUILT_IN_STRNCMP:
6700 target = expand_builtin_strncmp (exp, target, mode);
6701 if (target)
6702 return target;
6703 break;
6704
6705 case BUILT_IN_MEMCHR:
6706 target = expand_builtin_memchr (exp, target, mode);
6707 if (target)
6708 return target;
6709 break;
6710
6711 case BUILT_IN_BCMP:
6712 case BUILT_IN_MEMCMP:
6713 target = expand_builtin_memcmp (exp, target, mode);
6714 if (target)
6715 return target;
6716 break;
6717
6718 case BUILT_IN_SETJMP:
6719 /* This should have been lowered to the builtins below. */
6720 gcc_unreachable ();
6721
6722 case BUILT_IN_SETJMP_SETUP:
6723 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6724 and the receiver label. */
6725 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6726 {
6727 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6728 VOIDmode, EXPAND_NORMAL);
6729 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6730 rtx label_r = label_rtx (label);
6731
6732 /* This is copied from the handling of non-local gotos. */
6733 expand_builtin_setjmp_setup (buf_addr, label_r);
6734 nonlocal_goto_handler_labels
6735 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6736 nonlocal_goto_handler_labels);
6737 /* ??? Do not let expand_label treat us as such since we would
6738 not want to be both on the list of non-local labels and on
6739 the list of forced labels. */
6740 FORCED_LABEL (label) = 0;
6741 return const0_rtx;
6742 }
6743 break;
6744
6745 case BUILT_IN_SETJMP_DISPATCHER:
6746 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6747 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6748 {
6749 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6750 rtx label_r = label_rtx (label);
6751
6752 /* Remove the dispatcher label from the list of non-local labels
6753 since the receiver labels have been added to it above. */
6754 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6755 return const0_rtx;
6756 }
6757 break;
6758
6759 case BUILT_IN_SETJMP_RECEIVER:
6760 /* __builtin_setjmp_receiver is passed the receiver label. */
6761 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6762 {
6763 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6764 rtx label_r = label_rtx (label);
6765
6766 expand_builtin_setjmp_receiver (label_r);
6767 return const0_rtx;
6768 }
6769 break;
6770
6771 /* __builtin_longjmp is passed a pointer to an array of five words.
6772 It's similar to the C library longjmp function but works with
6773 __builtin_setjmp above. */
6774 case BUILT_IN_LONGJMP:
6775 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6776 {
6777 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6778 VOIDmode, EXPAND_NORMAL);
6779 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6780
6781 if (value != const1_rtx)
6782 {
6783 error ("%<__builtin_longjmp%> second argument must be 1");
6784 return const0_rtx;
6785 }
6786
6787 expand_builtin_longjmp (buf_addr, value);
6788 return const0_rtx;
6789 }
6790 break;
6791
6792 case BUILT_IN_NONLOCAL_GOTO:
6793 target = expand_builtin_nonlocal_goto (exp);
6794 if (target)
6795 return target;
6796 break;
6797
6798 /* This updates the setjmp buffer that is its argument with the value
6799 of the current stack pointer. */
6800 case BUILT_IN_UPDATE_SETJMP_BUF:
6801 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6802 {
6803 rtx buf_addr
6804 = expand_normal (CALL_EXPR_ARG (exp, 0));
6805
6806 expand_builtin_update_setjmp_buf (buf_addr);
6807 return const0_rtx;
6808 }
6809 break;
6810
6811 case BUILT_IN_TRAP:
6812 expand_builtin_trap ();
6813 return const0_rtx;
6814
6815 case BUILT_IN_UNREACHABLE:
6816 expand_builtin_unreachable ();
6817 return const0_rtx;
6818
6819 case BUILT_IN_PRINTF:
6820 target = expand_builtin_printf (exp, target, mode, false);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_PRINTF_UNLOCKED:
6826 target = expand_builtin_printf (exp, target, mode, true);
6827 if (target)
6828 return target;
6829 break;
6830
6831 case BUILT_IN_FPUTS:
6832 target = expand_builtin_fputs (exp, target, false);
6833 if (target)
6834 return target;
6835 break;
6836 case BUILT_IN_FPUTS_UNLOCKED:
6837 target = expand_builtin_fputs (exp, target, true);
6838 if (target)
6839 return target;
6840 break;
6841
6842 case BUILT_IN_FPRINTF:
6843 target = expand_builtin_fprintf (exp, target, mode, false);
6844 if (target)
6845 return target;
6846 break;
6847
6848 case BUILT_IN_FPRINTF_UNLOCKED:
6849 target = expand_builtin_fprintf (exp, target, mode, true);
6850 if (target)
6851 return target;
6852 break;
6853
6854 case BUILT_IN_SPRINTF:
6855 target = expand_builtin_sprintf (exp, target, mode);
6856 if (target)
6857 return target;
6858 break;
6859
6860 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6861 case BUILT_IN_SIGNBITD32:
6862 case BUILT_IN_SIGNBITD64:
6863 case BUILT_IN_SIGNBITD128:
6864 target = expand_builtin_signbit (exp, target);
6865 if (target)
6866 return target;
6867 break;
6868
6869 /* Various hooks for the DWARF 2 __throw routine. */
6870 case BUILT_IN_UNWIND_INIT:
6871 expand_builtin_unwind_init ();
6872 return const0_rtx;
6873 case BUILT_IN_DWARF_CFA:
6874 return virtual_cfa_rtx;
6875 #ifdef DWARF2_UNWIND_INFO
6876 case BUILT_IN_DWARF_SP_COLUMN:
6877 return expand_builtin_dwarf_sp_column ();
6878 case BUILT_IN_INIT_DWARF_REG_SIZES:
6879 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6880 return const0_rtx;
6881 #endif
6882 case BUILT_IN_FROB_RETURN_ADDR:
6883 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6884 case BUILT_IN_EXTRACT_RETURN_ADDR:
6885 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6886 case BUILT_IN_EH_RETURN:
6887 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6888 CALL_EXPR_ARG (exp, 1));
6889 return const0_rtx;
6890 #ifdef EH_RETURN_DATA_REGNO
6891 case BUILT_IN_EH_RETURN_DATA_REGNO:
6892 return expand_builtin_eh_return_data_regno (exp);
6893 #endif
6894 case BUILT_IN_EXTEND_POINTER:
6895 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6896
6897 case BUILT_IN_VA_START:
6898 return expand_builtin_va_start (exp);
6899 case BUILT_IN_VA_END:
6900 return expand_builtin_va_end (exp);
6901 case BUILT_IN_VA_COPY:
6902 return expand_builtin_va_copy (exp);
6903 case BUILT_IN_EXPECT:
6904 return expand_builtin_expect (exp, target);
6905 case BUILT_IN_PREFETCH:
6906 expand_builtin_prefetch (exp);
6907 return const0_rtx;
6908
6909 case BUILT_IN_PROFILE_FUNC_ENTER:
6910 return expand_builtin_profile_func (false);
6911 case BUILT_IN_PROFILE_FUNC_EXIT:
6912 return expand_builtin_profile_func (true);
6913
6914 case BUILT_IN_INIT_TRAMPOLINE:
6915 return expand_builtin_init_trampoline (exp);
6916 case BUILT_IN_ADJUST_TRAMPOLINE:
6917 return expand_builtin_adjust_trampoline (exp);
6918
6919 case BUILT_IN_FORK:
6920 case BUILT_IN_EXECL:
6921 case BUILT_IN_EXECV:
6922 case BUILT_IN_EXECLP:
6923 case BUILT_IN_EXECLE:
6924 case BUILT_IN_EXECVP:
6925 case BUILT_IN_EXECVE:
6926 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6927 if (target)
6928 return target;
6929 break;
6930
6931 case BUILT_IN_FETCH_AND_ADD_1:
6932 case BUILT_IN_FETCH_AND_ADD_2:
6933 case BUILT_IN_FETCH_AND_ADD_4:
6934 case BUILT_IN_FETCH_AND_ADD_8:
6935 case BUILT_IN_FETCH_AND_ADD_16:
6936 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6937 target = expand_builtin_sync_operation (mode, exp, PLUS,
6938 false, target, ignore);
6939 if (target)
6940 return target;
6941 break;
6942
6943 case BUILT_IN_FETCH_AND_SUB_1:
6944 case BUILT_IN_FETCH_AND_SUB_2:
6945 case BUILT_IN_FETCH_AND_SUB_4:
6946 case BUILT_IN_FETCH_AND_SUB_8:
6947 case BUILT_IN_FETCH_AND_SUB_16:
6948 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6949 target = expand_builtin_sync_operation (mode, exp, MINUS,
6950 false, target, ignore);
6951 if (target)
6952 return target;
6953 break;
6954
6955 case BUILT_IN_FETCH_AND_OR_1:
6956 case BUILT_IN_FETCH_AND_OR_2:
6957 case BUILT_IN_FETCH_AND_OR_4:
6958 case BUILT_IN_FETCH_AND_OR_8:
6959 case BUILT_IN_FETCH_AND_OR_16:
6960 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6961 target = expand_builtin_sync_operation (mode, exp, IOR,
6962 false, target, ignore);
6963 if (target)
6964 return target;
6965 break;
6966
6967 case BUILT_IN_FETCH_AND_AND_1:
6968 case BUILT_IN_FETCH_AND_AND_2:
6969 case BUILT_IN_FETCH_AND_AND_4:
6970 case BUILT_IN_FETCH_AND_AND_8:
6971 case BUILT_IN_FETCH_AND_AND_16:
6972 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6973 target = expand_builtin_sync_operation (mode, exp, AND,
6974 false, target, ignore);
6975 if (target)
6976 return target;
6977 break;
6978
6979 case BUILT_IN_FETCH_AND_XOR_1:
6980 case BUILT_IN_FETCH_AND_XOR_2:
6981 case BUILT_IN_FETCH_AND_XOR_4:
6982 case BUILT_IN_FETCH_AND_XOR_8:
6983 case BUILT_IN_FETCH_AND_XOR_16:
6984 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6985 target = expand_builtin_sync_operation (mode, exp, XOR,
6986 false, target, ignore);
6987 if (target)
6988 return target;
6989 break;
6990
6991 case BUILT_IN_FETCH_AND_NAND_1:
6992 case BUILT_IN_FETCH_AND_NAND_2:
6993 case BUILT_IN_FETCH_AND_NAND_4:
6994 case BUILT_IN_FETCH_AND_NAND_8:
6995 case BUILT_IN_FETCH_AND_NAND_16:
6996 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6997 target = expand_builtin_sync_operation (mode, exp, NOT,
6998 false, target, ignore);
6999 if (target)
7000 return target;
7001 break;
7002
7003 case BUILT_IN_ADD_AND_FETCH_1:
7004 case BUILT_IN_ADD_AND_FETCH_2:
7005 case BUILT_IN_ADD_AND_FETCH_4:
7006 case BUILT_IN_ADD_AND_FETCH_8:
7007 case BUILT_IN_ADD_AND_FETCH_16:
7008 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7009 target = expand_builtin_sync_operation (mode, exp, PLUS,
7010 true, target, ignore);
7011 if (target)
7012 return target;
7013 break;
7014
7015 case BUILT_IN_SUB_AND_FETCH_1:
7016 case BUILT_IN_SUB_AND_FETCH_2:
7017 case BUILT_IN_SUB_AND_FETCH_4:
7018 case BUILT_IN_SUB_AND_FETCH_8:
7019 case BUILT_IN_SUB_AND_FETCH_16:
7020 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7021 target = expand_builtin_sync_operation (mode, exp, MINUS,
7022 true, target, ignore);
7023 if (target)
7024 return target;
7025 break;
7026
7027 case BUILT_IN_OR_AND_FETCH_1:
7028 case BUILT_IN_OR_AND_FETCH_2:
7029 case BUILT_IN_OR_AND_FETCH_4:
7030 case BUILT_IN_OR_AND_FETCH_8:
7031 case BUILT_IN_OR_AND_FETCH_16:
7032 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7033 target = expand_builtin_sync_operation (mode, exp, IOR,
7034 true, target, ignore);
7035 if (target)
7036 return target;
7037 break;
7038
7039 case BUILT_IN_AND_AND_FETCH_1:
7040 case BUILT_IN_AND_AND_FETCH_2:
7041 case BUILT_IN_AND_AND_FETCH_4:
7042 case BUILT_IN_AND_AND_FETCH_8:
7043 case BUILT_IN_AND_AND_FETCH_16:
7044 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7045 target = expand_builtin_sync_operation (mode, exp, AND,
7046 true, target, ignore);
7047 if (target)
7048 return target;
7049 break;
7050
7051 case BUILT_IN_XOR_AND_FETCH_1:
7052 case BUILT_IN_XOR_AND_FETCH_2:
7053 case BUILT_IN_XOR_AND_FETCH_4:
7054 case BUILT_IN_XOR_AND_FETCH_8:
7055 case BUILT_IN_XOR_AND_FETCH_16:
7056 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7057 target = expand_builtin_sync_operation (mode, exp, XOR,
7058 true, target, ignore);
7059 if (target)
7060 return target;
7061 break;
7062
7063 case BUILT_IN_NAND_AND_FETCH_1:
7064 case BUILT_IN_NAND_AND_FETCH_2:
7065 case BUILT_IN_NAND_AND_FETCH_4:
7066 case BUILT_IN_NAND_AND_FETCH_8:
7067 case BUILT_IN_NAND_AND_FETCH_16:
7068 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7069 target = expand_builtin_sync_operation (mode, exp, NOT,
7070 true, target, ignore);
7071 if (target)
7072 return target;
7073 break;
7074
7075 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7076 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7077 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7078 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7079 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7080 if (mode == VOIDmode)
7081 mode = TYPE_MODE (boolean_type_node);
7082 if (!target || !register_operand (target, mode))
7083 target = gen_reg_rtx (mode);
7084
7085 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7086 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7087 if (target)
7088 return target;
7089 break;
7090
7091 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7092 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7093 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7094 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7095 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7096 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7097 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7098 if (target)
7099 return target;
7100 break;
7101
7102 case BUILT_IN_LOCK_TEST_AND_SET_1:
7103 case BUILT_IN_LOCK_TEST_AND_SET_2:
7104 case BUILT_IN_LOCK_TEST_AND_SET_4:
7105 case BUILT_IN_LOCK_TEST_AND_SET_8:
7106 case BUILT_IN_LOCK_TEST_AND_SET_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7108 target = expand_builtin_lock_test_and_set (mode, exp, target);
7109 if (target)
7110 return target;
7111 break;
7112
7113 case BUILT_IN_LOCK_RELEASE_1:
7114 case BUILT_IN_LOCK_RELEASE_2:
7115 case BUILT_IN_LOCK_RELEASE_4:
7116 case BUILT_IN_LOCK_RELEASE_8:
7117 case BUILT_IN_LOCK_RELEASE_16:
7118 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7119 expand_builtin_lock_release (mode, exp);
7120 return const0_rtx;
7121
7122 case BUILT_IN_SYNCHRONIZE:
7123 expand_builtin_synchronize ();
7124 return const0_rtx;
7125
7126 case BUILT_IN_OBJECT_SIZE:
7127 return expand_builtin_object_size (exp);
7128
7129 case BUILT_IN_MEMCPY_CHK:
7130 case BUILT_IN_MEMPCPY_CHK:
7131 case BUILT_IN_MEMMOVE_CHK:
7132 case BUILT_IN_MEMSET_CHK:
7133 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7134 if (target)
7135 return target;
7136 break;
7137
7138 case BUILT_IN_STRCPY_CHK:
7139 case BUILT_IN_STPCPY_CHK:
7140 case BUILT_IN_STRNCPY_CHK:
7141 case BUILT_IN_STRCAT_CHK:
7142 case BUILT_IN_STRNCAT_CHK:
7143 case BUILT_IN_SNPRINTF_CHK:
7144 case BUILT_IN_VSNPRINTF_CHK:
7145 maybe_emit_chk_warning (exp, fcode);
7146 break;
7147
7148 case BUILT_IN_SPRINTF_CHK:
7149 case BUILT_IN_VSPRINTF_CHK:
7150 maybe_emit_sprintf_chk_warning (exp, fcode);
7151 break;
7152
7153 case BUILT_IN_FREE:
7154 maybe_emit_free_warning (exp);
7155 break;
7156
7157 default: /* just do library call, if unknown builtin */
7158 break;
7159 }
7160
7161 /* The switch statement above can drop through to cause the function
7162 to be called normally. */
7163 return expand_call (exp, target, ignore);
7164 }
7165
7166 /* Determine whether a tree node represents a call to a built-in
7167 function. If the tree T is a call to a built-in function with
7168 the right number of arguments of the appropriate types, return
7169 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7170 Otherwise the return value is END_BUILTINS. */
7171
7172 enum built_in_function
7173 builtin_mathfn_code (const_tree t)
7174 {
7175 const_tree fndecl, arg, parmlist;
7176 const_tree argtype, parmtype;
7177 const_call_expr_arg_iterator iter;
7178
7179 if (TREE_CODE (t) != CALL_EXPR
7180 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7181 return END_BUILTINS;
7182
7183 fndecl = get_callee_fndecl (t);
7184 if (fndecl == NULL_TREE
7185 || TREE_CODE (fndecl) != FUNCTION_DECL
7186 || ! DECL_BUILT_IN (fndecl)
7187 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7188 return END_BUILTINS;
7189
7190 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7191 init_const_call_expr_arg_iterator (t, &iter);
7192 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7193 {
7194 /* If a function doesn't take a variable number of arguments,
7195 the last element in the list will have type `void'. */
7196 parmtype = TREE_VALUE (parmlist);
7197 if (VOID_TYPE_P (parmtype))
7198 {
7199 if (more_const_call_expr_args_p (&iter))
7200 return END_BUILTINS;
7201 return DECL_FUNCTION_CODE (fndecl);
7202 }
7203
7204 if (! more_const_call_expr_args_p (&iter))
7205 return END_BUILTINS;
7206
7207 arg = next_const_call_expr_arg (&iter);
7208 argtype = TREE_TYPE (arg);
7209
7210 if (SCALAR_FLOAT_TYPE_P (parmtype))
7211 {
7212 if (! SCALAR_FLOAT_TYPE_P (argtype))
7213 return END_BUILTINS;
7214 }
7215 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7216 {
7217 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7218 return END_BUILTINS;
7219 }
7220 else if (POINTER_TYPE_P (parmtype))
7221 {
7222 if (! POINTER_TYPE_P (argtype))
7223 return END_BUILTINS;
7224 }
7225 else if (INTEGRAL_TYPE_P (parmtype))
7226 {
7227 if (! INTEGRAL_TYPE_P (argtype))
7228 return END_BUILTINS;
7229 }
7230 else
7231 return END_BUILTINS;
7232 }
7233
7234 /* Variable-length argument list. */
7235 return DECL_FUNCTION_CODE (fndecl);
7236 }
7237
7238 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7239 evaluate to a constant. */
7240
7241 static tree
7242 fold_builtin_constant_p (tree arg)
7243 {
7244 /* We return 1 for a numeric type that's known to be a constant
7245 value at compile-time or for an aggregate type that's a
7246 literal constant. */
7247 STRIP_NOPS (arg);
7248
7249 /* If we know this is a constant, emit the constant of one. */
7250 if (CONSTANT_CLASS_P (arg)
7251 || (TREE_CODE (arg) == CONSTRUCTOR
7252 && TREE_CONSTANT (arg)))
7253 return integer_one_node;
7254 if (TREE_CODE (arg) == ADDR_EXPR)
7255 {
7256 tree op = TREE_OPERAND (arg, 0);
7257 if (TREE_CODE (op) == STRING_CST
7258 || (TREE_CODE (op) == ARRAY_REF
7259 && integer_zerop (TREE_OPERAND (op, 1))
7260 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7261 return integer_one_node;
7262 }
7263
7264 /* If this expression has side effects, show we don't know it to be a
7265 constant. Likewise if it's a pointer or aggregate type since in
7266 those case we only want literals, since those are only optimized
7267 when generating RTL, not later.
7268 And finally, if we are compiling an initializer, not code, we
7269 need to return a definite result now; there's not going to be any
7270 more optimization done. */
7271 if (TREE_SIDE_EFFECTS (arg)
7272 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7273 || POINTER_TYPE_P (TREE_TYPE (arg))
7274 || cfun == 0
7275 || folding_initializer)
7276 return integer_zero_node;
7277
7278 return NULL_TREE;
7279 }
7280
7281 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7282 return it as a truthvalue. */
7283
7284 static tree
7285 build_builtin_expect_predicate (tree pred, tree expected)
7286 {
7287 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7288
7289 fn = built_in_decls[BUILT_IN_EXPECT];
7290 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7291 ret_type = TREE_TYPE (TREE_TYPE (fn));
7292 pred_type = TREE_VALUE (arg_types);
7293 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7294
7295 pred = fold_convert (pred_type, pred);
7296 expected = fold_convert (expected_type, expected);
7297 call_expr = build_call_expr (fn, 2, pred, expected);
7298
7299 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7300 build_int_cst (ret_type, 0));
7301 }
7302
7303 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7304 NULL_TREE if no simplification is possible. */
7305
7306 static tree
7307 fold_builtin_expect (tree arg0, tree arg1)
7308 {
7309 tree inner, fndecl;
7310 enum tree_code code;
7311
7312 /* If this is a builtin_expect within a builtin_expect keep the
7313 inner one. See through a comparison against a constant. It
7314 might have been added to create a thruthvalue. */
7315 inner = arg0;
7316 if (COMPARISON_CLASS_P (inner)
7317 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7318 inner = TREE_OPERAND (inner, 0);
7319
7320 if (TREE_CODE (inner) == CALL_EXPR
7321 && (fndecl = get_callee_fndecl (inner))
7322 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7323 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7324 return arg0;
7325
7326 /* Distribute the expected value over short-circuiting operators.
7327 See through the cast from truthvalue_type_node to long. */
7328 inner = arg0;
7329 while (TREE_CODE (inner) == NOP_EXPR
7330 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7331 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7332 inner = TREE_OPERAND (inner, 0);
7333
7334 code = TREE_CODE (inner);
7335 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7336 {
7337 tree op0 = TREE_OPERAND (inner, 0);
7338 tree op1 = TREE_OPERAND (inner, 1);
7339
7340 op0 = build_builtin_expect_predicate (op0, arg1);
7341 op1 = build_builtin_expect_predicate (op1, arg1);
7342 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7343
7344 return fold_convert (TREE_TYPE (arg0), inner);
7345 }
7346
7347 /* If the argument isn't invariant then there's nothing else we can do. */
7348 if (!TREE_CONSTANT (arg0))
7349 return NULL_TREE;
7350
7351 /* If we expect that a comparison against the argument will fold to
7352 a constant return the constant. In practice, this means a true
7353 constant or the address of a non-weak symbol. */
7354 inner = arg0;
7355 STRIP_NOPS (inner);
7356 if (TREE_CODE (inner) == ADDR_EXPR)
7357 {
7358 do
7359 {
7360 inner = TREE_OPERAND (inner, 0);
7361 }
7362 while (TREE_CODE (inner) == COMPONENT_REF
7363 || TREE_CODE (inner) == ARRAY_REF);
7364 if ((TREE_CODE (inner) == VAR_DECL
7365 || TREE_CODE (inner) == FUNCTION_DECL)
7366 && DECL_WEAK (inner))
7367 return NULL_TREE;
7368 }
7369
7370 /* Otherwise, ARG0 already has the proper type for the return value. */
7371 return arg0;
7372 }
7373
7374 /* Fold a call to __builtin_classify_type with argument ARG. */
7375
7376 static tree
7377 fold_builtin_classify_type (tree arg)
7378 {
7379 if (arg == 0)
7380 return build_int_cst (NULL_TREE, no_type_class);
7381
7382 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7383 }
7384
7385 /* Fold a call to __builtin_strlen with argument ARG. */
7386
7387 static tree
7388 fold_builtin_strlen (tree arg)
7389 {
7390 if (!validate_arg (arg, POINTER_TYPE))
7391 return NULL_TREE;
7392 else
7393 {
7394 tree len = c_strlen (arg, 0);
7395
7396 if (len)
7397 {
7398 /* Convert from the internal "sizetype" type to "size_t". */
7399 if (size_type_node)
7400 len = fold_convert (size_type_node, len);
7401 return len;
7402 }
7403
7404 return NULL_TREE;
7405 }
7406 }
7407
7408 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7409
7410 static tree
7411 fold_builtin_inf (tree type, int warn)
7412 {
7413 REAL_VALUE_TYPE real;
7414
7415 /* __builtin_inff is intended to be usable to define INFINITY on all
7416 targets. If an infinity is not available, INFINITY expands "to a
7417 positive constant of type float that overflows at translation
7418 time", footnote "In this case, using INFINITY will violate the
7419 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7420 Thus we pedwarn to ensure this constraint violation is
7421 diagnosed. */
7422 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7423 pedwarn (input_location, 0, "target format does not support infinity");
7424
7425 real_inf (&real);
7426 return build_real (type, real);
7427 }
7428
7429 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7430
7431 static tree
7432 fold_builtin_nan (tree arg, tree type, int quiet)
7433 {
7434 REAL_VALUE_TYPE real;
7435 const char *str;
7436
7437 if (!validate_arg (arg, POINTER_TYPE))
7438 return NULL_TREE;
7439 str = c_getstr (arg);
7440 if (!str)
7441 return NULL_TREE;
7442
7443 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7444 return NULL_TREE;
7445
7446 return build_real (type, real);
7447 }
7448
7449 /* Return true if the floating point expression T has an integer value.
7450 We also allow +Inf, -Inf and NaN to be considered integer values. */
7451
7452 static bool
7453 integer_valued_real_p (tree t)
7454 {
7455 switch (TREE_CODE (t))
7456 {
7457 case FLOAT_EXPR:
7458 return true;
7459
7460 case ABS_EXPR:
7461 case SAVE_EXPR:
7462 return integer_valued_real_p (TREE_OPERAND (t, 0));
7463
7464 case COMPOUND_EXPR:
7465 case MODIFY_EXPR:
7466 case BIND_EXPR:
7467 return integer_valued_real_p (TREE_OPERAND (t, 1));
7468
7469 case PLUS_EXPR:
7470 case MINUS_EXPR:
7471 case MULT_EXPR:
7472 case MIN_EXPR:
7473 case MAX_EXPR:
7474 return integer_valued_real_p (TREE_OPERAND (t, 0))
7475 && integer_valued_real_p (TREE_OPERAND (t, 1));
7476
7477 case COND_EXPR:
7478 return integer_valued_real_p (TREE_OPERAND (t, 1))
7479 && integer_valued_real_p (TREE_OPERAND (t, 2));
7480
7481 case REAL_CST:
7482 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7483
7484 case NOP_EXPR:
7485 {
7486 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7487 if (TREE_CODE (type) == INTEGER_TYPE)
7488 return true;
7489 if (TREE_CODE (type) == REAL_TYPE)
7490 return integer_valued_real_p (TREE_OPERAND (t, 0));
7491 break;
7492 }
7493
7494 case CALL_EXPR:
7495 switch (builtin_mathfn_code (t))
7496 {
7497 CASE_FLT_FN (BUILT_IN_CEIL):
7498 CASE_FLT_FN (BUILT_IN_FLOOR):
7499 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7500 CASE_FLT_FN (BUILT_IN_RINT):
7501 CASE_FLT_FN (BUILT_IN_ROUND):
7502 CASE_FLT_FN (BUILT_IN_TRUNC):
7503 return true;
7504
7505 CASE_FLT_FN (BUILT_IN_FMIN):
7506 CASE_FLT_FN (BUILT_IN_FMAX):
7507 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7508 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7509
7510 default:
7511 break;
7512 }
7513 break;
7514
7515 default:
7516 break;
7517 }
7518 return false;
7519 }
7520
7521 /* FNDECL is assumed to be a builtin where truncation can be propagated
7522 across (for instance floor((double)f) == (double)floorf (f).
7523 Do the transformation for a call with argument ARG. */
7524
7525 static tree
7526 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7527 {
7528 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7529
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7532
7533 /* Integer rounding functions are idempotent. */
7534 if (fcode == builtin_mathfn_code (arg))
7535 return arg;
7536
7537 /* If argument is already integer valued, and we don't need to worry
7538 about setting errno, there's no need to perform rounding. */
7539 if (! flag_errno_math && integer_valued_real_p (arg))
7540 return arg;
7541
7542 if (optimize)
7543 {
7544 tree arg0 = strip_float_extensions (arg);
7545 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7546 tree newtype = TREE_TYPE (arg0);
7547 tree decl;
7548
7549 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7550 && (decl = mathfn_built_in (newtype, fcode)))
7551 return fold_convert (ftype,
7552 build_call_expr (decl, 1,
7553 fold_convert (newtype, arg0)));
7554 }
7555 return NULL_TREE;
7556 }
7557
7558 /* FNDECL is assumed to be builtin which can narrow the FP type of
7559 the argument, for instance lround((double)f) -> lroundf (f).
7560 Do the transformation for a call with argument ARG. */
7561
7562 static tree
7563 fold_fixed_mathfn (tree fndecl, tree arg)
7564 {
7565 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7566
7567 if (!validate_arg (arg, REAL_TYPE))
7568 return NULL_TREE;
7569
7570 /* If argument is already integer valued, and we don't need to worry
7571 about setting errno, there's no need to perform rounding. */
7572 if (! flag_errno_math && integer_valued_real_p (arg))
7573 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7574
7575 if (optimize)
7576 {
7577 tree ftype = TREE_TYPE (arg);
7578 tree arg0 = strip_float_extensions (arg);
7579 tree newtype = TREE_TYPE (arg0);
7580 tree decl;
7581
7582 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7583 && (decl = mathfn_built_in (newtype, fcode)))
7584 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7585 }
7586
7587 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7588 sizeof (long long) == sizeof (long). */
7589 if (TYPE_PRECISION (long_long_integer_type_node)
7590 == TYPE_PRECISION (long_integer_type_node))
7591 {
7592 tree newfn = NULL_TREE;
7593 switch (fcode)
7594 {
7595 CASE_FLT_FN (BUILT_IN_LLCEIL):
7596 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7597 break;
7598
7599 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7600 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7601 break;
7602
7603 CASE_FLT_FN (BUILT_IN_LLROUND):
7604 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7605 break;
7606
7607 CASE_FLT_FN (BUILT_IN_LLRINT):
7608 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7609 break;
7610
7611 default:
7612 break;
7613 }
7614
7615 if (newfn)
7616 {
7617 tree newcall = build_call_expr(newfn, 1, arg);
7618 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7619 }
7620 }
7621
7622 return NULL_TREE;
7623 }
7624
7625 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7626 return type. Return NULL_TREE if no simplification can be made. */
7627
7628 static tree
7629 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7630 {
7631 tree res;
7632
7633 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7634 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7635 return NULL_TREE;
7636
7637 /* Calculate the result when the argument is a constant. */
7638 if (TREE_CODE (arg) == COMPLEX_CST
7639 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7640 type, mpfr_hypot)))
7641 return res;
7642
7643 if (TREE_CODE (arg) == COMPLEX_EXPR)
7644 {
7645 tree real = TREE_OPERAND (arg, 0);
7646 tree imag = TREE_OPERAND (arg, 1);
7647
7648 /* If either part is zero, cabs is fabs of the other. */
7649 if (real_zerop (real))
7650 return fold_build1 (ABS_EXPR, type, imag);
7651 if (real_zerop (imag))
7652 return fold_build1 (ABS_EXPR, type, real);
7653
7654 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7655 if (flag_unsafe_math_optimizations
7656 && operand_equal_p (real, imag, OEP_PURE_SAME))
7657 {
7658 const REAL_VALUE_TYPE sqrt2_trunc
7659 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7660 STRIP_NOPS (real);
7661 return fold_build2 (MULT_EXPR, type,
7662 fold_build1 (ABS_EXPR, type, real),
7663 build_real (type, sqrt2_trunc));
7664 }
7665 }
7666
7667 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7668 if (TREE_CODE (arg) == NEGATE_EXPR
7669 || TREE_CODE (arg) == CONJ_EXPR)
7670 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7671
7672 /* Don't do this when optimizing for size. */
7673 if (flag_unsafe_math_optimizations
7674 && optimize && optimize_function_for_speed_p (cfun))
7675 {
7676 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7677
7678 if (sqrtfn != NULL_TREE)
7679 {
7680 tree rpart, ipart, result;
7681
7682 arg = builtin_save_expr (arg);
7683
7684 rpart = fold_build1 (REALPART_EXPR, type, arg);
7685 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7686
7687 rpart = builtin_save_expr (rpart);
7688 ipart = builtin_save_expr (ipart);
7689
7690 result = fold_build2 (PLUS_EXPR, type,
7691 fold_build2 (MULT_EXPR, type,
7692 rpart, rpart),
7693 fold_build2 (MULT_EXPR, type,
7694 ipart, ipart));
7695
7696 return build_call_expr (sqrtfn, 1, result);
7697 }
7698 }
7699
7700 return NULL_TREE;
7701 }
7702
7703 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7704 Return NULL_TREE if no simplification can be made. */
7705
7706 static tree
7707 fold_builtin_sqrt (tree arg, tree type)
7708 {
7709
7710 enum built_in_function fcode;
7711 tree res;
7712
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7715
7716 /* Calculate the result when the argument is a constant. */
7717 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7718 return res;
7719
7720 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7721 fcode = builtin_mathfn_code (arg);
7722 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7723 {
7724 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7725 arg = fold_build2 (MULT_EXPR, type,
7726 CALL_EXPR_ARG (arg, 0),
7727 build_real (type, dconsthalf));
7728 return build_call_expr (expfn, 1, arg);
7729 }
7730
7731 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7732 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7733 {
7734 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7735
7736 if (powfn)
7737 {
7738 tree arg0 = CALL_EXPR_ARG (arg, 0);
7739 tree tree_root;
7740 /* The inner root was either sqrt or cbrt. */
7741 /* This was a conditional expression but it triggered a bug
7742 in Sun C 5.5. */
7743 REAL_VALUE_TYPE dconstroot;
7744 if (BUILTIN_SQRT_P (fcode))
7745 dconstroot = dconsthalf;
7746 else
7747 dconstroot = dconst_third ();
7748
7749 /* Adjust for the outer root. */
7750 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7751 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7752 tree_root = build_real (type, dconstroot);
7753 return build_call_expr (powfn, 2, arg0, tree_root);
7754 }
7755 }
7756
7757 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7758 if (flag_unsafe_math_optimizations
7759 && (fcode == BUILT_IN_POW
7760 || fcode == BUILT_IN_POWF
7761 || fcode == BUILT_IN_POWL))
7762 {
7763 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7764 tree arg0 = CALL_EXPR_ARG (arg, 0);
7765 tree arg1 = CALL_EXPR_ARG (arg, 1);
7766 tree narg1;
7767 if (!tree_expr_nonnegative_p (arg0))
7768 arg0 = build1 (ABS_EXPR, type, arg0);
7769 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7770 build_real (type, dconsthalf));
7771 return build_call_expr (powfn, 2, arg0, narg1);
7772 }
7773
7774 return NULL_TREE;
7775 }
7776
7777 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7778 Return NULL_TREE if no simplification can be made. */
7779
7780 static tree
7781 fold_builtin_cbrt (tree arg, tree type)
7782 {
7783 const enum built_in_function fcode = builtin_mathfn_code (arg);
7784 tree res;
7785
7786 if (!validate_arg (arg, REAL_TYPE))
7787 return NULL_TREE;
7788
7789 /* Calculate the result when the argument is a constant. */
7790 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7791 return res;
7792
7793 if (flag_unsafe_math_optimizations)
7794 {
7795 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7796 if (BUILTIN_EXPONENT_P (fcode))
7797 {
7798 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7799 const REAL_VALUE_TYPE third_trunc =
7800 real_value_truncate (TYPE_MODE (type), dconst_third ());
7801 arg = fold_build2 (MULT_EXPR, type,
7802 CALL_EXPR_ARG (arg, 0),
7803 build_real (type, third_trunc));
7804 return build_call_expr (expfn, 1, arg);
7805 }
7806
7807 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7808 if (BUILTIN_SQRT_P (fcode))
7809 {
7810 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7811
7812 if (powfn)
7813 {
7814 tree arg0 = CALL_EXPR_ARG (arg, 0);
7815 tree tree_root;
7816 REAL_VALUE_TYPE dconstroot = dconst_third ();
7817
7818 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7819 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7820 tree_root = build_real (type, dconstroot);
7821 return build_call_expr (powfn, 2, arg0, tree_root);
7822 }
7823 }
7824
7825 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7826 if (BUILTIN_CBRT_P (fcode))
7827 {
7828 tree arg0 = CALL_EXPR_ARG (arg, 0);
7829 if (tree_expr_nonnegative_p (arg0))
7830 {
7831 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7832
7833 if (powfn)
7834 {
7835 tree tree_root;
7836 REAL_VALUE_TYPE dconstroot;
7837
7838 real_arithmetic (&dconstroot, MULT_EXPR,
7839 dconst_third_ptr (), dconst_third_ptr ());
7840 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7841 tree_root = build_real (type, dconstroot);
7842 return build_call_expr (powfn, 2, arg0, tree_root);
7843 }
7844 }
7845 }
7846
7847 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7848 if (fcode == BUILT_IN_POW
7849 || fcode == BUILT_IN_POWF
7850 || fcode == BUILT_IN_POWL)
7851 {
7852 tree arg00 = CALL_EXPR_ARG (arg, 0);
7853 tree arg01 = CALL_EXPR_ARG (arg, 1);
7854 if (tree_expr_nonnegative_p (arg00))
7855 {
7856 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7857 const REAL_VALUE_TYPE dconstroot
7858 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7859 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7860 build_real (type, dconstroot));
7861 return build_call_expr (powfn, 2, arg00, narg01);
7862 }
7863 }
7864 }
7865 return NULL_TREE;
7866 }
7867
7868 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7869 TYPE is the type of the return value. Return NULL_TREE if no
7870 simplification can be made. */
7871
7872 static tree
7873 fold_builtin_cos (tree arg, tree type, tree fndecl)
7874 {
7875 tree res, narg;
7876
7877 if (!validate_arg (arg, REAL_TYPE))
7878 return NULL_TREE;
7879
7880 /* Calculate the result when the argument is a constant. */
7881 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7882 return res;
7883
7884 /* Optimize cos(-x) into cos (x). */
7885 if ((narg = fold_strip_sign_ops (arg)))
7886 return build_call_expr (fndecl, 1, narg);
7887
7888 return NULL_TREE;
7889 }
7890
7891 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7892 Return NULL_TREE if no simplification can be made. */
7893
7894 static tree
7895 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7896 {
7897 if (validate_arg (arg, REAL_TYPE))
7898 {
7899 tree res, narg;
7900
7901 /* Calculate the result when the argument is a constant. */
7902 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7903 return res;
7904
7905 /* Optimize cosh(-x) into cosh (x). */
7906 if ((narg = fold_strip_sign_ops (arg)))
7907 return build_call_expr (fndecl, 1, narg);
7908 }
7909
7910 return NULL_TREE;
7911 }
7912
7913 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7914 argument ARG. TYPE is the type of the return value. Return
7915 NULL_TREE if no simplification can be made. */
7916
7917 static tree
7918 fold_builtin_ccos (tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7919 bool hyper ATTRIBUTE_UNUSED)
7920 {
7921 if (validate_arg (arg, COMPLEX_TYPE)
7922 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7923 {
7924 tree tmp;
7925
7926 #ifdef HAVE_mpc
7927 /* Calculate the result when the argument is a constant. */
7928 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7929 return tmp;
7930 #endif
7931
7932 /* Optimize fn(-x) into fn(x). */
7933 if ((tmp = fold_strip_sign_ops (arg)))
7934 return build_call_expr (fndecl, 1, tmp);
7935 }
7936
7937 return NULL_TREE;
7938 }
7939
7940 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7941 Return NULL_TREE if no simplification can be made. */
7942
7943 static tree
7944 fold_builtin_tan (tree arg, tree type)
7945 {
7946 enum built_in_function fcode;
7947 tree res;
7948
7949 if (!validate_arg (arg, REAL_TYPE))
7950 return NULL_TREE;
7951
7952 /* Calculate the result when the argument is a constant. */
7953 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7954 return res;
7955
7956 /* Optimize tan(atan(x)) = x. */
7957 fcode = builtin_mathfn_code (arg);
7958 if (flag_unsafe_math_optimizations
7959 && (fcode == BUILT_IN_ATAN
7960 || fcode == BUILT_IN_ATANF
7961 || fcode == BUILT_IN_ATANL))
7962 return CALL_EXPR_ARG (arg, 0);
7963
7964 return NULL_TREE;
7965 }
7966
7967 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7968 NULL_TREE if no simplification can be made. */
7969
7970 static tree
7971 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7972 {
7973 tree type;
7974 tree res, fn, call;
7975
7976 if (!validate_arg (arg0, REAL_TYPE)
7977 || !validate_arg (arg1, POINTER_TYPE)
7978 || !validate_arg (arg2, POINTER_TYPE))
7979 return NULL_TREE;
7980
7981 type = TREE_TYPE (arg0);
7982
7983 /* Calculate the result when the argument is a constant. */
7984 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7985 return res;
7986
7987 /* Canonicalize sincos to cexpi. */
7988 if (!TARGET_C99_FUNCTIONS)
7989 return NULL_TREE;
7990 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7991 if (!fn)
7992 return NULL_TREE;
7993
7994 call = build_call_expr (fn, 1, arg0);
7995 call = builtin_save_expr (call);
7996
7997 return build2 (COMPOUND_EXPR, void_type_node,
7998 build2 (MODIFY_EXPR, void_type_node,
7999 build_fold_indirect_ref (arg1),
8000 build1 (IMAGPART_EXPR, type, call)),
8001 build2 (MODIFY_EXPR, void_type_node,
8002 build_fold_indirect_ref (arg2),
8003 build1 (REALPART_EXPR, type, call)));
8004 }
8005
8006 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8007 NULL_TREE if no simplification can be made. */
8008
8009 static tree
8010 fold_builtin_cexp (tree arg0, tree type)
8011 {
8012 tree rtype;
8013 tree realp, imagp, ifn;
8014 #ifdef HAVE_mpc
8015 tree res;
8016 #endif
8017
8018 if (!validate_arg (arg0, COMPLEX_TYPE)
8019 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8020 return NULL_TREE;
8021
8022 #ifdef HAVE_mpc
8023 /* Calculate the result when the argument is a constant. */
8024 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8025 return res;
8026 #endif
8027
8028 rtype = TREE_TYPE (TREE_TYPE (arg0));
8029
8030 /* In case we can figure out the real part of arg0 and it is constant zero
8031 fold to cexpi. */
8032 if (!TARGET_C99_FUNCTIONS)
8033 return NULL_TREE;
8034 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8035 if (!ifn)
8036 return NULL_TREE;
8037
8038 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
8039 && real_zerop (realp))
8040 {
8041 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
8042 return build_call_expr (ifn, 1, narg);
8043 }
8044
8045 /* In case we can easily decompose real and imaginary parts split cexp
8046 to exp (r) * cexpi (i). */
8047 if (flag_unsafe_math_optimizations
8048 && realp)
8049 {
8050 tree rfn, rcall, icall;
8051
8052 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8053 if (!rfn)
8054 return NULL_TREE;
8055
8056 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8057 if (!imagp)
8058 return NULL_TREE;
8059
8060 icall = build_call_expr (ifn, 1, imagp);
8061 icall = builtin_save_expr (icall);
8062 rcall = build_call_expr (rfn, 1, realp);
8063 rcall = builtin_save_expr (rcall);
8064 return fold_build2 (COMPLEX_EXPR, type,
8065 fold_build2 (MULT_EXPR, rtype,
8066 rcall,
8067 fold_build1 (REALPART_EXPR, rtype, icall)),
8068 fold_build2 (MULT_EXPR, rtype,
8069 rcall,
8070 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8071 }
8072
8073 return NULL_TREE;
8074 }
8075
8076 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8077 Return NULL_TREE if no simplification can be made. */
8078
8079 static tree
8080 fold_builtin_trunc (tree fndecl, tree arg)
8081 {
8082 if (!validate_arg (arg, REAL_TYPE))
8083 return NULL_TREE;
8084
8085 /* Optimize trunc of constant value. */
8086 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8087 {
8088 REAL_VALUE_TYPE r, x;
8089 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8090
8091 x = TREE_REAL_CST (arg);
8092 real_trunc (&r, TYPE_MODE (type), &x);
8093 return build_real (type, r);
8094 }
8095
8096 return fold_trunc_transparent_mathfn (fndecl, arg);
8097 }
8098
8099 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8100 Return NULL_TREE if no simplification can be made. */
8101
8102 static tree
8103 fold_builtin_floor (tree fndecl, tree arg)
8104 {
8105 if (!validate_arg (arg, REAL_TYPE))
8106 return NULL_TREE;
8107
8108 /* Optimize floor of constant value. */
8109 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8110 {
8111 REAL_VALUE_TYPE x;
8112
8113 x = TREE_REAL_CST (arg);
8114 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8115 {
8116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8117 REAL_VALUE_TYPE r;
8118
8119 real_floor (&r, TYPE_MODE (type), &x);
8120 return build_real (type, r);
8121 }
8122 }
8123
8124 /* Fold floor (x) where x is nonnegative to trunc (x). */
8125 if (tree_expr_nonnegative_p (arg))
8126 {
8127 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8128 if (truncfn)
8129 return build_call_expr (truncfn, 1, arg);
8130 }
8131
8132 return fold_trunc_transparent_mathfn (fndecl, arg);
8133 }
8134
8135 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8136 Return NULL_TREE if no simplification can be made. */
8137
8138 static tree
8139 fold_builtin_ceil (tree fndecl, tree arg)
8140 {
8141 if (!validate_arg (arg, REAL_TYPE))
8142 return NULL_TREE;
8143
8144 /* Optimize ceil of constant value. */
8145 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8146 {
8147 REAL_VALUE_TYPE x;
8148
8149 x = TREE_REAL_CST (arg);
8150 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8151 {
8152 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8153 REAL_VALUE_TYPE r;
8154
8155 real_ceil (&r, TYPE_MODE (type), &x);
8156 return build_real (type, r);
8157 }
8158 }
8159
8160 return fold_trunc_transparent_mathfn (fndecl, arg);
8161 }
8162
8163 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8164 Return NULL_TREE if no simplification can be made. */
8165
8166 static tree
8167 fold_builtin_round (tree fndecl, tree arg)
8168 {
8169 if (!validate_arg (arg, REAL_TYPE))
8170 return NULL_TREE;
8171
8172 /* Optimize round of constant value. */
8173 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8174 {
8175 REAL_VALUE_TYPE x;
8176
8177 x = TREE_REAL_CST (arg);
8178 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8179 {
8180 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8181 REAL_VALUE_TYPE r;
8182
8183 real_round (&r, TYPE_MODE (type), &x);
8184 return build_real (type, r);
8185 }
8186 }
8187
8188 return fold_trunc_transparent_mathfn (fndecl, arg);
8189 }
8190
8191 /* Fold function call to builtin lround, lroundf or lroundl (or the
8192 corresponding long long versions) and other rounding functions. ARG
8193 is the argument to the call. Return NULL_TREE if no simplification
8194 can be made. */
8195
8196 static tree
8197 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8198 {
8199 if (!validate_arg (arg, REAL_TYPE))
8200 return NULL_TREE;
8201
8202 /* Optimize lround of constant value. */
8203 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8204 {
8205 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8206
8207 if (real_isfinite (&x))
8208 {
8209 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8210 tree ftype = TREE_TYPE (arg);
8211 unsigned HOST_WIDE_INT lo2;
8212 HOST_WIDE_INT hi, lo;
8213 REAL_VALUE_TYPE r;
8214
8215 switch (DECL_FUNCTION_CODE (fndecl))
8216 {
8217 CASE_FLT_FN (BUILT_IN_LFLOOR):
8218 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8219 real_floor (&r, TYPE_MODE (ftype), &x);
8220 break;
8221
8222 CASE_FLT_FN (BUILT_IN_LCEIL):
8223 CASE_FLT_FN (BUILT_IN_LLCEIL):
8224 real_ceil (&r, TYPE_MODE (ftype), &x);
8225 break;
8226
8227 CASE_FLT_FN (BUILT_IN_LROUND):
8228 CASE_FLT_FN (BUILT_IN_LLROUND):
8229 real_round (&r, TYPE_MODE (ftype), &x);
8230 break;
8231
8232 default:
8233 gcc_unreachable ();
8234 }
8235
8236 REAL_VALUE_TO_INT (&lo, &hi, r);
8237 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8238 return build_int_cst_wide (itype, lo2, hi);
8239 }
8240 }
8241
8242 switch (DECL_FUNCTION_CODE (fndecl))
8243 {
8244 CASE_FLT_FN (BUILT_IN_LFLOOR):
8245 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8246 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8247 if (tree_expr_nonnegative_p (arg))
8248 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8249 arg);
8250 break;
8251 default:;
8252 }
8253
8254 return fold_fixed_mathfn (fndecl, arg);
8255 }
8256
8257 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8258 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8259 the argument to the call. Return NULL_TREE if no simplification can
8260 be made. */
8261
8262 static tree
8263 fold_builtin_bitop (tree fndecl, tree arg)
8264 {
8265 if (!validate_arg (arg, INTEGER_TYPE))
8266 return NULL_TREE;
8267
8268 /* Optimize for constant argument. */
8269 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8270 {
8271 HOST_WIDE_INT hi, width, result;
8272 unsigned HOST_WIDE_INT lo;
8273 tree type;
8274
8275 type = TREE_TYPE (arg);
8276 width = TYPE_PRECISION (type);
8277 lo = TREE_INT_CST_LOW (arg);
8278
8279 /* Clear all the bits that are beyond the type's precision. */
8280 if (width > HOST_BITS_PER_WIDE_INT)
8281 {
8282 hi = TREE_INT_CST_HIGH (arg);
8283 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8284 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8285 }
8286 else
8287 {
8288 hi = 0;
8289 if (width < HOST_BITS_PER_WIDE_INT)
8290 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8291 }
8292
8293 switch (DECL_FUNCTION_CODE (fndecl))
8294 {
8295 CASE_INT_FN (BUILT_IN_FFS):
8296 if (lo != 0)
8297 result = exact_log2 (lo & -lo) + 1;
8298 else if (hi != 0)
8299 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8300 else
8301 result = 0;
8302 break;
8303
8304 CASE_INT_FN (BUILT_IN_CLZ):
8305 if (hi != 0)
8306 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8307 else if (lo != 0)
8308 result = width - floor_log2 (lo) - 1;
8309 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8310 result = width;
8311 break;
8312
8313 CASE_INT_FN (BUILT_IN_CTZ):
8314 if (lo != 0)
8315 result = exact_log2 (lo & -lo);
8316 else if (hi != 0)
8317 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8318 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8319 result = width;
8320 break;
8321
8322 CASE_INT_FN (BUILT_IN_POPCOUNT):
8323 result = 0;
8324 while (lo)
8325 result++, lo &= lo - 1;
8326 while (hi)
8327 result++, hi &= hi - 1;
8328 break;
8329
8330 CASE_INT_FN (BUILT_IN_PARITY):
8331 result = 0;
8332 while (lo)
8333 result++, lo &= lo - 1;
8334 while (hi)
8335 result++, hi &= hi - 1;
8336 result &= 1;
8337 break;
8338
8339 default:
8340 gcc_unreachable ();
8341 }
8342
8343 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8344 }
8345
8346 return NULL_TREE;
8347 }
8348
8349 /* Fold function call to builtin_bswap and the long and long long
8350 variants. Return NULL_TREE if no simplification can be made. */
8351 static tree
8352 fold_builtin_bswap (tree fndecl, tree arg)
8353 {
8354 if (! validate_arg (arg, INTEGER_TYPE))
8355 return NULL_TREE;
8356
8357 /* Optimize constant value. */
8358 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8359 {
8360 HOST_WIDE_INT hi, width, r_hi = 0;
8361 unsigned HOST_WIDE_INT lo, r_lo = 0;
8362 tree type;
8363
8364 type = TREE_TYPE (arg);
8365 width = TYPE_PRECISION (type);
8366 lo = TREE_INT_CST_LOW (arg);
8367 hi = TREE_INT_CST_HIGH (arg);
8368
8369 switch (DECL_FUNCTION_CODE (fndecl))
8370 {
8371 case BUILT_IN_BSWAP32:
8372 case BUILT_IN_BSWAP64:
8373 {
8374 int s;
8375
8376 for (s = 0; s < width; s += 8)
8377 {
8378 int d = width - s - 8;
8379 unsigned HOST_WIDE_INT byte;
8380
8381 if (s < HOST_BITS_PER_WIDE_INT)
8382 byte = (lo >> s) & 0xff;
8383 else
8384 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8385
8386 if (d < HOST_BITS_PER_WIDE_INT)
8387 r_lo |= byte << d;
8388 else
8389 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8390 }
8391 }
8392
8393 break;
8394
8395 default:
8396 gcc_unreachable ();
8397 }
8398
8399 if (width < HOST_BITS_PER_WIDE_INT)
8400 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8401 else
8402 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8403 }
8404
8405 return NULL_TREE;
8406 }
8407
8408 /* A subroutine of fold_builtin to fold the various logarithmic
8409 functions. Return NULL_TREE if no simplification can me made.
8410 FUNC is the corresponding MPFR logarithm function. */
8411
8412 static tree
8413 fold_builtin_logarithm (tree fndecl, tree arg,
8414 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8415 {
8416 if (validate_arg (arg, REAL_TYPE))
8417 {
8418 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8419 tree res;
8420 const enum built_in_function fcode = builtin_mathfn_code (arg);
8421
8422 /* Calculate the result when the argument is a constant. */
8423 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8424 return res;
8425
8426 /* Special case, optimize logN(expN(x)) = x. */
8427 if (flag_unsafe_math_optimizations
8428 && ((func == mpfr_log
8429 && (fcode == BUILT_IN_EXP
8430 || fcode == BUILT_IN_EXPF
8431 || fcode == BUILT_IN_EXPL))
8432 || (func == mpfr_log2
8433 && (fcode == BUILT_IN_EXP2
8434 || fcode == BUILT_IN_EXP2F
8435 || fcode == BUILT_IN_EXP2L))
8436 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8437 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8438
8439 /* Optimize logN(func()) for various exponential functions. We
8440 want to determine the value "x" and the power "exponent" in
8441 order to transform logN(x**exponent) into exponent*logN(x). */
8442 if (flag_unsafe_math_optimizations)
8443 {
8444 tree exponent = 0, x = 0;
8445
8446 switch (fcode)
8447 {
8448 CASE_FLT_FN (BUILT_IN_EXP):
8449 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8450 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8451 dconst_e ()));
8452 exponent = CALL_EXPR_ARG (arg, 0);
8453 break;
8454 CASE_FLT_FN (BUILT_IN_EXP2):
8455 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8456 x = build_real (type, dconst2);
8457 exponent = CALL_EXPR_ARG (arg, 0);
8458 break;
8459 CASE_FLT_FN (BUILT_IN_EXP10):
8460 CASE_FLT_FN (BUILT_IN_POW10):
8461 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8462 {
8463 REAL_VALUE_TYPE dconst10;
8464 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8465 x = build_real (type, dconst10);
8466 }
8467 exponent = CALL_EXPR_ARG (arg, 0);
8468 break;
8469 CASE_FLT_FN (BUILT_IN_SQRT):
8470 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8471 x = CALL_EXPR_ARG (arg, 0);
8472 exponent = build_real (type, dconsthalf);
8473 break;
8474 CASE_FLT_FN (BUILT_IN_CBRT):
8475 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8476 x = CALL_EXPR_ARG (arg, 0);
8477 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8478 dconst_third ()));
8479 break;
8480 CASE_FLT_FN (BUILT_IN_POW):
8481 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8482 x = CALL_EXPR_ARG (arg, 0);
8483 exponent = CALL_EXPR_ARG (arg, 1);
8484 break;
8485 default:
8486 break;
8487 }
8488
8489 /* Now perform the optimization. */
8490 if (x && exponent)
8491 {
8492 tree logfn = build_call_expr (fndecl, 1, x);
8493 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8494 }
8495 }
8496 }
8497
8498 return NULL_TREE;
8499 }
8500
8501 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8502 NULL_TREE if no simplification can be made. */
8503
8504 static tree
8505 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8506 {
8507 tree res, narg0, narg1;
8508
8509 if (!validate_arg (arg0, REAL_TYPE)
8510 || !validate_arg (arg1, REAL_TYPE))
8511 return NULL_TREE;
8512
8513 /* Calculate the result when the argument is a constant. */
8514 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8515 return res;
8516
8517 /* If either argument to hypot has a negate or abs, strip that off.
8518 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8519 narg0 = fold_strip_sign_ops (arg0);
8520 narg1 = fold_strip_sign_ops (arg1);
8521 if (narg0 || narg1)
8522 {
8523 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8524 narg1 ? narg1 : arg1);
8525 }
8526
8527 /* If either argument is zero, hypot is fabs of the other. */
8528 if (real_zerop (arg0))
8529 return fold_build1 (ABS_EXPR, type, arg1);
8530 else if (real_zerop (arg1))
8531 return fold_build1 (ABS_EXPR, type, arg0);
8532
8533 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8534 if (flag_unsafe_math_optimizations
8535 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8536 {
8537 const REAL_VALUE_TYPE sqrt2_trunc
8538 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8539 return fold_build2 (MULT_EXPR, type,
8540 fold_build1 (ABS_EXPR, type, arg0),
8541 build_real (type, sqrt2_trunc));
8542 }
8543
8544 return NULL_TREE;
8545 }
8546
8547
8548 /* Fold a builtin function call to pow, powf, or powl. Return
8549 NULL_TREE if no simplification can be made. */
8550 static tree
8551 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8552 {
8553 tree res;
8554
8555 if (!validate_arg (arg0, REAL_TYPE)
8556 || !validate_arg (arg1, REAL_TYPE))
8557 return NULL_TREE;
8558
8559 /* Calculate the result when the argument is a constant. */
8560 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8561 return res;
8562
8563 /* Optimize pow(1.0,y) = 1.0. */
8564 if (real_onep (arg0))
8565 return omit_one_operand (type, build_real (type, dconst1), arg1);
8566
8567 if (TREE_CODE (arg1) == REAL_CST
8568 && !TREE_OVERFLOW (arg1))
8569 {
8570 REAL_VALUE_TYPE cint;
8571 REAL_VALUE_TYPE c;
8572 HOST_WIDE_INT n;
8573
8574 c = TREE_REAL_CST (arg1);
8575
8576 /* Optimize pow(x,0.0) = 1.0. */
8577 if (REAL_VALUES_EQUAL (c, dconst0))
8578 return omit_one_operand (type, build_real (type, dconst1),
8579 arg0);
8580
8581 /* Optimize pow(x,1.0) = x. */
8582 if (REAL_VALUES_EQUAL (c, dconst1))
8583 return arg0;
8584
8585 /* Optimize pow(x,-1.0) = 1.0/x. */
8586 if (REAL_VALUES_EQUAL (c, dconstm1))
8587 return fold_build2 (RDIV_EXPR, type,
8588 build_real (type, dconst1), arg0);
8589
8590 /* Optimize pow(x,0.5) = sqrt(x). */
8591 if (flag_unsafe_math_optimizations
8592 && REAL_VALUES_EQUAL (c, dconsthalf))
8593 {
8594 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8595
8596 if (sqrtfn != NULL_TREE)
8597 return build_call_expr (sqrtfn, 1, arg0);
8598 }
8599
8600 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8601 if (flag_unsafe_math_optimizations)
8602 {
8603 const REAL_VALUE_TYPE dconstroot
8604 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8605
8606 if (REAL_VALUES_EQUAL (c, dconstroot))
8607 {
8608 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8609 if (cbrtfn != NULL_TREE)
8610 return build_call_expr (cbrtfn, 1, arg0);
8611 }
8612 }
8613
8614 /* Check for an integer exponent. */
8615 n = real_to_integer (&c);
8616 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8617 if (real_identical (&c, &cint))
8618 {
8619 /* Attempt to evaluate pow at compile-time, unless this should
8620 raise an exception. */
8621 if (TREE_CODE (arg0) == REAL_CST
8622 && !TREE_OVERFLOW (arg0)
8623 && (n > 0
8624 || (!flag_trapping_math && !flag_errno_math)
8625 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8626 {
8627 REAL_VALUE_TYPE x;
8628 bool inexact;
8629
8630 x = TREE_REAL_CST (arg0);
8631 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8632 if (flag_unsafe_math_optimizations || !inexact)
8633 return build_real (type, x);
8634 }
8635
8636 /* Strip sign ops from even integer powers. */
8637 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8638 {
8639 tree narg0 = fold_strip_sign_ops (arg0);
8640 if (narg0)
8641 return build_call_expr (fndecl, 2, narg0, arg1);
8642 }
8643 }
8644 }
8645
8646 if (flag_unsafe_math_optimizations)
8647 {
8648 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8649
8650 /* Optimize pow(expN(x),y) = expN(x*y). */
8651 if (BUILTIN_EXPONENT_P (fcode))
8652 {
8653 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8654 tree arg = CALL_EXPR_ARG (arg0, 0);
8655 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8656 return build_call_expr (expfn, 1, arg);
8657 }
8658
8659 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8660 if (BUILTIN_SQRT_P (fcode))
8661 {
8662 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8663 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8664 build_real (type, dconsthalf));
8665 return build_call_expr (fndecl, 2, narg0, narg1);
8666 }
8667
8668 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8669 if (BUILTIN_CBRT_P (fcode))
8670 {
8671 tree arg = CALL_EXPR_ARG (arg0, 0);
8672 if (tree_expr_nonnegative_p (arg))
8673 {
8674 const REAL_VALUE_TYPE dconstroot
8675 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8676 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8677 build_real (type, dconstroot));
8678 return build_call_expr (fndecl, 2, arg, narg1);
8679 }
8680 }
8681
8682 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8683 if (fcode == BUILT_IN_POW
8684 || fcode == BUILT_IN_POWF
8685 || fcode == BUILT_IN_POWL)
8686 {
8687 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8688 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8689 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8690 return build_call_expr (fndecl, 2, arg00, narg1);
8691 }
8692 }
8693
8694 return NULL_TREE;
8695 }
8696
8697 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8698 Return NULL_TREE if no simplification can be made. */
8699 static tree
8700 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8701 tree arg0, tree arg1, tree type)
8702 {
8703 if (!validate_arg (arg0, REAL_TYPE)
8704 || !validate_arg (arg1, INTEGER_TYPE))
8705 return NULL_TREE;
8706
8707 /* Optimize pow(1.0,y) = 1.0. */
8708 if (real_onep (arg0))
8709 return omit_one_operand (type, build_real (type, dconst1), arg1);
8710
8711 if (host_integerp (arg1, 0))
8712 {
8713 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8714
8715 /* Evaluate powi at compile-time. */
8716 if (TREE_CODE (arg0) == REAL_CST
8717 && !TREE_OVERFLOW (arg0))
8718 {
8719 REAL_VALUE_TYPE x;
8720 x = TREE_REAL_CST (arg0);
8721 real_powi (&x, TYPE_MODE (type), &x, c);
8722 return build_real (type, x);
8723 }
8724
8725 /* Optimize pow(x,0) = 1.0. */
8726 if (c == 0)
8727 return omit_one_operand (type, build_real (type, dconst1),
8728 arg0);
8729
8730 /* Optimize pow(x,1) = x. */
8731 if (c == 1)
8732 return arg0;
8733
8734 /* Optimize pow(x,-1) = 1.0/x. */
8735 if (c == -1)
8736 return fold_build2 (RDIV_EXPR, type,
8737 build_real (type, dconst1), arg0);
8738 }
8739
8740 return NULL_TREE;
8741 }
8742
8743 /* A subroutine of fold_builtin to fold the various exponent
8744 functions. Return NULL_TREE if no simplification can be made.
8745 FUNC is the corresponding MPFR exponent function. */
8746
8747 static tree
8748 fold_builtin_exponent (tree fndecl, tree arg,
8749 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8750 {
8751 if (validate_arg (arg, REAL_TYPE))
8752 {
8753 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8754 tree res;
8755
8756 /* Calculate the result when the argument is a constant. */
8757 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8758 return res;
8759
8760 /* Optimize expN(logN(x)) = x. */
8761 if (flag_unsafe_math_optimizations)
8762 {
8763 const enum built_in_function fcode = builtin_mathfn_code (arg);
8764
8765 if ((func == mpfr_exp
8766 && (fcode == BUILT_IN_LOG
8767 || fcode == BUILT_IN_LOGF
8768 || fcode == BUILT_IN_LOGL))
8769 || (func == mpfr_exp2
8770 && (fcode == BUILT_IN_LOG2
8771 || fcode == BUILT_IN_LOG2F
8772 || fcode == BUILT_IN_LOG2L))
8773 || (func == mpfr_exp10
8774 && (fcode == BUILT_IN_LOG10
8775 || fcode == BUILT_IN_LOG10F
8776 || fcode == BUILT_IN_LOG10L)))
8777 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8778 }
8779 }
8780
8781 return NULL_TREE;
8782 }
8783
8784 /* Return true if VAR is a VAR_DECL or a component thereof. */
8785
8786 static bool
8787 var_decl_component_p (tree var)
8788 {
8789 tree inner = var;
8790 while (handled_component_p (inner))
8791 inner = TREE_OPERAND (inner, 0);
8792 return SSA_VAR_P (inner);
8793 }
8794
8795 /* Fold function call to builtin memset. Return
8796 NULL_TREE if no simplification can be made. */
8797
8798 static tree
8799 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8800 {
8801 tree var, ret, etype;
8802 unsigned HOST_WIDE_INT length, cval;
8803
8804 if (! validate_arg (dest, POINTER_TYPE)
8805 || ! validate_arg (c, INTEGER_TYPE)
8806 || ! validate_arg (len, INTEGER_TYPE))
8807 return NULL_TREE;
8808
8809 if (! host_integerp (len, 1))
8810 return NULL_TREE;
8811
8812 /* If the LEN parameter is zero, return DEST. */
8813 if (integer_zerop (len))
8814 return omit_one_operand (type, dest, c);
8815
8816 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8817 return NULL_TREE;
8818
8819 var = dest;
8820 STRIP_NOPS (var);
8821 if (TREE_CODE (var) != ADDR_EXPR)
8822 return NULL_TREE;
8823
8824 var = TREE_OPERAND (var, 0);
8825 if (TREE_THIS_VOLATILE (var))
8826 return NULL_TREE;
8827
8828 etype = TREE_TYPE (var);
8829 if (TREE_CODE (etype) == ARRAY_TYPE)
8830 etype = TREE_TYPE (etype);
8831
8832 if (!INTEGRAL_TYPE_P (etype)
8833 && !POINTER_TYPE_P (etype))
8834 return NULL_TREE;
8835
8836 if (! var_decl_component_p (var))
8837 return NULL_TREE;
8838
8839 length = tree_low_cst (len, 1);
8840 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8841 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8842 < (int) length)
8843 return NULL_TREE;
8844
8845 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8846 return NULL_TREE;
8847
8848 if (integer_zerop (c))
8849 cval = 0;
8850 else
8851 {
8852 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8853 return NULL_TREE;
8854
8855 cval = tree_low_cst (c, 1);
8856 cval &= 0xff;
8857 cval |= cval << 8;
8858 cval |= cval << 16;
8859 cval |= (cval << 31) << 1;
8860 }
8861
8862 ret = build_int_cst_type (etype, cval);
8863 var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
8864 dest));
8865 ret = build2 (MODIFY_EXPR, etype, var, ret);
8866 if (ignore)
8867 return ret;
8868
8869 return omit_one_operand (type, dest, ret);
8870 }
8871
8872 /* Fold function call to builtin memset. Return
8873 NULL_TREE if no simplification can be made. */
8874
8875 static tree
8876 fold_builtin_bzero (tree dest, tree size, bool ignore)
8877 {
8878 if (! validate_arg (dest, POINTER_TYPE)
8879 || ! validate_arg (size, INTEGER_TYPE))
8880 return NULL_TREE;
8881
8882 if (!ignore)
8883 return NULL_TREE;
8884
8885 /* New argument list transforming bzero(ptr x, int y) to
8886 memset(ptr x, int 0, size_t y). This is done this way
8887 so that if it isn't expanded inline, we fallback to
8888 calling bzero instead of memset. */
8889
8890 return fold_builtin_memset (dest, integer_zero_node,
8891 fold_convert (sizetype, size),
8892 void_type_node, ignore);
8893 }
8894
8895 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8896 NULL_TREE if no simplification can be made.
8897 If ENDP is 0, return DEST (like memcpy).
8898 If ENDP is 1, return DEST+LEN (like mempcpy).
8899 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8900 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8901 (memmove). */
8902
8903 static tree
8904 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8905 {
8906 tree destvar, srcvar, expr;
8907
8908 if (! validate_arg (dest, POINTER_TYPE)
8909 || ! validate_arg (src, POINTER_TYPE)
8910 || ! validate_arg (len, INTEGER_TYPE))
8911 return NULL_TREE;
8912
8913 /* If the LEN parameter is zero, return DEST. */
8914 if (integer_zerop (len))
8915 return omit_one_operand (type, dest, src);
8916
8917 /* If SRC and DEST are the same (and not volatile), return
8918 DEST{,+LEN,+LEN-1}. */
8919 if (operand_equal_p (src, dest, 0))
8920 expr = len;
8921 else
8922 {
8923 tree srctype, desttype;
8924 int src_align, dest_align;
8925
8926 if (endp == 3)
8927 {
8928 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8929 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8930
8931 /* Both DEST and SRC must be pointer types.
8932 ??? This is what old code did. Is the testing for pointer types
8933 really mandatory?
8934
8935 If either SRC is readonly or length is 1, we can use memcpy. */
8936 if (!dest_align || !src_align)
8937 return NULL_TREE;
8938 if (readonly_data_expr (src)
8939 || (host_integerp (len, 1)
8940 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8941 >= tree_low_cst (len, 1))))
8942 {
8943 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8944 if (!fn)
8945 return NULL_TREE;
8946 return build_call_expr (fn, 3, dest, src, len);
8947 }
8948
8949 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8950 srcvar = build_fold_indirect_ref (src);
8951 destvar = build_fold_indirect_ref (dest);
8952 if (srcvar
8953 && !TREE_THIS_VOLATILE (srcvar)
8954 && destvar
8955 && !TREE_THIS_VOLATILE (destvar))
8956 {
8957 tree src_base, dest_base, fn;
8958 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8959 HOST_WIDE_INT size = -1;
8960 HOST_WIDE_INT maxsize = -1;
8961
8962 src_base = srcvar;
8963 if (handled_component_p (src_base))
8964 src_base = get_ref_base_and_extent (src_base, &src_offset,
8965 &size, &maxsize);
8966 dest_base = destvar;
8967 if (handled_component_p (dest_base))
8968 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8969 &size, &maxsize);
8970 if (host_integerp (len, 1))
8971 {
8972 maxsize = tree_low_cst (len, 1);
8973 if (maxsize
8974 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8975 maxsize = -1;
8976 else
8977 maxsize *= BITS_PER_UNIT;
8978 }
8979 else
8980 maxsize = -1;
8981 if (SSA_VAR_P (src_base)
8982 && SSA_VAR_P (dest_base))
8983 {
8984 if (operand_equal_p (src_base, dest_base, 0)
8985 && ranges_overlap_p (src_offset, maxsize,
8986 dest_offset, maxsize))
8987 return NULL_TREE;
8988 }
8989 else if (TREE_CODE (src_base) == INDIRECT_REF
8990 && TREE_CODE (dest_base) == INDIRECT_REF)
8991 {
8992 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8993 TREE_OPERAND (dest_base, 0), 0)
8994 || ranges_overlap_p (src_offset, maxsize,
8995 dest_offset, maxsize))
8996 return NULL_TREE;
8997 }
8998 else
8999 return NULL_TREE;
9000
9001 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9002 if (!fn)
9003 return NULL_TREE;
9004 return build_call_expr (fn, 3, dest, src, len);
9005 }
9006 return NULL_TREE;
9007 }
9008
9009 if (!host_integerp (len, 0))
9010 return NULL_TREE;
9011 /* FIXME:
9012 This logic lose for arguments like (type *)malloc (sizeof (type)),
9013 since we strip the casts of up to VOID return value from malloc.
9014 Perhaps we ought to inherit type from non-VOID argument here? */
9015 STRIP_NOPS (src);
9016 STRIP_NOPS (dest);
9017 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9018 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9019 {
9020 tree tem = TREE_OPERAND (src, 0);
9021 STRIP_NOPS (tem);
9022 if (tem != TREE_OPERAND (src, 0))
9023 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9024 }
9025 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9026 {
9027 tree tem = TREE_OPERAND (dest, 0);
9028 STRIP_NOPS (tem);
9029 if (tem != TREE_OPERAND (dest, 0))
9030 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9031 }
9032 srctype = TREE_TYPE (TREE_TYPE (src));
9033 if (srctype
9034 && TREE_CODE (srctype) == ARRAY_TYPE
9035 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9036 {
9037 srctype = TREE_TYPE (srctype);
9038 STRIP_NOPS (src);
9039 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9040 }
9041 desttype = TREE_TYPE (TREE_TYPE (dest));
9042 if (desttype
9043 && TREE_CODE (desttype) == ARRAY_TYPE
9044 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9045 {
9046 desttype = TREE_TYPE (desttype);
9047 STRIP_NOPS (dest);
9048 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9049 }
9050 if (!srctype || !desttype
9051 || !TYPE_SIZE_UNIT (srctype)
9052 || !TYPE_SIZE_UNIT (desttype)
9053 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9054 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9055 || TYPE_VOLATILE (srctype)
9056 || TYPE_VOLATILE (desttype))
9057 return NULL_TREE;
9058
9059 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9060 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9061 if (dest_align < (int) TYPE_ALIGN (desttype)
9062 || src_align < (int) TYPE_ALIGN (srctype))
9063 return NULL_TREE;
9064
9065 if (!ignore)
9066 dest = builtin_save_expr (dest);
9067
9068 srcvar = NULL_TREE;
9069 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9070 {
9071 srcvar = build_fold_indirect_ref (src);
9072 if (TREE_THIS_VOLATILE (srcvar))
9073 return NULL_TREE;
9074 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9075 srcvar = NULL_TREE;
9076 /* With memcpy, it is possible to bypass aliasing rules, so without
9077 this check i.e. execute/20060930-2.c would be misoptimized,
9078 because it use conflicting alias set to hold argument for the
9079 memcpy call. This check is probably unnecessary with
9080 -fno-strict-aliasing. Similarly for destvar. See also
9081 PR29286. */
9082 else if (!var_decl_component_p (srcvar))
9083 srcvar = NULL_TREE;
9084 }
9085
9086 destvar = NULL_TREE;
9087 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9088 {
9089 destvar = build_fold_indirect_ref (dest);
9090 if (TREE_THIS_VOLATILE (destvar))
9091 return NULL_TREE;
9092 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9093 destvar = NULL_TREE;
9094 else if (!var_decl_component_p (destvar))
9095 destvar = NULL_TREE;
9096 }
9097
9098 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9099 return NULL_TREE;
9100
9101 if (srcvar == NULL_TREE)
9102 {
9103 tree srcptype;
9104 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9105 return NULL_TREE;
9106
9107 srctype = build_qualified_type (desttype, 0);
9108 if (src_align < (int) TYPE_ALIGN (srctype))
9109 {
9110 if (AGGREGATE_TYPE_P (srctype)
9111 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9112 return NULL_TREE;
9113
9114 srctype = build_variant_type_copy (srctype);
9115 TYPE_ALIGN (srctype) = src_align;
9116 TYPE_USER_ALIGN (srctype) = 1;
9117 TYPE_PACKED (srctype) = 1;
9118 }
9119 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9120 src = fold_convert (srcptype, src);
9121 srcvar = build_fold_indirect_ref (src);
9122 }
9123 else if (destvar == NULL_TREE)
9124 {
9125 tree destptype;
9126 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9127 return NULL_TREE;
9128
9129 desttype = build_qualified_type (srctype, 0);
9130 if (dest_align < (int) TYPE_ALIGN (desttype))
9131 {
9132 if (AGGREGATE_TYPE_P (desttype)
9133 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9134 return NULL_TREE;
9135
9136 desttype = build_variant_type_copy (desttype);
9137 TYPE_ALIGN (desttype) = dest_align;
9138 TYPE_USER_ALIGN (desttype) = 1;
9139 TYPE_PACKED (desttype) = 1;
9140 }
9141 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9142 dest = fold_convert (destptype, dest);
9143 destvar = build_fold_indirect_ref (dest);
9144 }
9145
9146 if (srctype == desttype
9147 || (gimple_in_ssa_p (cfun)
9148 && useless_type_conversion_p (desttype, srctype)))
9149 expr = srcvar;
9150 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9151 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9152 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9153 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9154 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9155 else
9156 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9157 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9158 }
9159
9160 if (ignore)
9161 return expr;
9162
9163 if (endp == 0 || endp == 3)
9164 return omit_one_operand (type, dest, expr);
9165
9166 if (expr == len)
9167 expr = NULL_TREE;
9168
9169 if (endp == 2)
9170 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9171 ssize_int (1));
9172
9173 len = fold_convert (sizetype, len);
9174 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9175 dest = fold_convert (type, dest);
9176 if (expr)
9177 dest = omit_one_operand (type, dest, expr);
9178 return dest;
9179 }
9180
9181 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9182 If LEN is not NULL, it represents the length of the string to be
9183 copied. Return NULL_TREE if no simplification can be made. */
9184
9185 tree
9186 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9187 {
9188 tree fn;
9189
9190 if (!validate_arg (dest, POINTER_TYPE)
9191 || !validate_arg (src, POINTER_TYPE))
9192 return NULL_TREE;
9193
9194 /* If SRC and DEST are the same (and not volatile), return DEST. */
9195 if (operand_equal_p (src, dest, 0))
9196 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9197
9198 if (optimize_function_for_size_p (cfun))
9199 return NULL_TREE;
9200
9201 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9202 if (!fn)
9203 return NULL_TREE;
9204
9205 if (!len)
9206 {
9207 len = c_strlen (src, 1);
9208 if (! len || TREE_SIDE_EFFECTS (len))
9209 return NULL_TREE;
9210 }
9211
9212 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9213 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9214 build_call_expr (fn, 3, dest, src, len));
9215 }
9216
9217 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9218 If SLEN is not NULL, it represents the length of the source string.
9219 Return NULL_TREE if no simplification can be made. */
9220
9221 tree
9222 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9223 {
9224 tree fn;
9225
9226 if (!validate_arg (dest, POINTER_TYPE)
9227 || !validate_arg (src, POINTER_TYPE)
9228 || !validate_arg (len, INTEGER_TYPE))
9229 return NULL_TREE;
9230
9231 /* If the LEN parameter is zero, return DEST. */
9232 if (integer_zerop (len))
9233 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9234
9235 /* We can't compare slen with len as constants below if len is not a
9236 constant. */
9237 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9238 return NULL_TREE;
9239
9240 if (!slen)
9241 slen = c_strlen (src, 1);
9242
9243 /* Now, we must be passed a constant src ptr parameter. */
9244 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9245 return NULL_TREE;
9246
9247 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9248
9249 /* We do not support simplification of this case, though we do
9250 support it when expanding trees into RTL. */
9251 /* FIXME: generate a call to __builtin_memset. */
9252 if (tree_int_cst_lt (slen, len))
9253 return NULL_TREE;
9254
9255 /* OK transform into builtin memcpy. */
9256 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9257 if (!fn)
9258 return NULL_TREE;
9259 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9260 build_call_expr (fn, 3, dest, src, len));
9261 }
9262
9263 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9264 arguments to the call, and TYPE is its return type.
9265 Return NULL_TREE if no simplification can be made. */
9266
9267 static tree
9268 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9269 {
9270 if (!validate_arg (arg1, POINTER_TYPE)
9271 || !validate_arg (arg2, INTEGER_TYPE)
9272 || !validate_arg (len, INTEGER_TYPE))
9273 return NULL_TREE;
9274 else
9275 {
9276 const char *p1;
9277
9278 if (TREE_CODE (arg2) != INTEGER_CST
9279 || !host_integerp (len, 1))
9280 return NULL_TREE;
9281
9282 p1 = c_getstr (arg1);
9283 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9284 {
9285 char c;
9286 const char *r;
9287 tree tem;
9288
9289 if (target_char_cast (arg2, &c))
9290 return NULL_TREE;
9291
9292 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9293
9294 if (r == NULL)
9295 return build_int_cst (TREE_TYPE (arg1), 0);
9296
9297 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9298 size_int (r - p1));
9299 return fold_convert (type, tem);
9300 }
9301 return NULL_TREE;
9302 }
9303 }
9304
9305 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9306 Return NULL_TREE if no simplification can be made. */
9307
9308 static tree
9309 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9310 {
9311 const char *p1, *p2;
9312
9313 if (!validate_arg (arg1, POINTER_TYPE)
9314 || !validate_arg (arg2, POINTER_TYPE)
9315 || !validate_arg (len, INTEGER_TYPE))
9316 return NULL_TREE;
9317
9318 /* If the LEN parameter is zero, return zero. */
9319 if (integer_zerop (len))
9320 return omit_two_operands (integer_type_node, integer_zero_node,
9321 arg1, arg2);
9322
9323 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9324 if (operand_equal_p (arg1, arg2, 0))
9325 return omit_one_operand (integer_type_node, integer_zero_node, len);
9326
9327 p1 = c_getstr (arg1);
9328 p2 = c_getstr (arg2);
9329
9330 /* If all arguments are constant, and the value of len is not greater
9331 than the lengths of arg1 and arg2, evaluate at compile-time. */
9332 if (host_integerp (len, 1) && p1 && p2
9333 && compare_tree_int (len, strlen (p1) + 1) <= 0
9334 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9335 {
9336 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9337
9338 if (r > 0)
9339 return integer_one_node;
9340 else if (r < 0)
9341 return integer_minus_one_node;
9342 else
9343 return integer_zero_node;
9344 }
9345
9346 /* If len parameter is one, return an expression corresponding to
9347 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9348 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9349 {
9350 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9351 tree cst_uchar_ptr_node
9352 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9353
9354 tree ind1 = fold_convert (integer_type_node,
9355 build1 (INDIRECT_REF, cst_uchar_node,
9356 fold_convert (cst_uchar_ptr_node,
9357 arg1)));
9358 tree ind2 = fold_convert (integer_type_node,
9359 build1 (INDIRECT_REF, cst_uchar_node,
9360 fold_convert (cst_uchar_ptr_node,
9361 arg2)));
9362 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9363 }
9364
9365 return NULL_TREE;
9366 }
9367
9368 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9369 Return NULL_TREE if no simplification can be made. */
9370
9371 static tree
9372 fold_builtin_strcmp (tree arg1, tree arg2)
9373 {
9374 const char *p1, *p2;
9375
9376 if (!validate_arg (arg1, POINTER_TYPE)
9377 || !validate_arg (arg2, POINTER_TYPE))
9378 return NULL_TREE;
9379
9380 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9381 if (operand_equal_p (arg1, arg2, 0))
9382 return integer_zero_node;
9383
9384 p1 = c_getstr (arg1);
9385 p2 = c_getstr (arg2);
9386
9387 if (p1 && p2)
9388 {
9389 const int i = strcmp (p1, p2);
9390 if (i < 0)
9391 return integer_minus_one_node;
9392 else if (i > 0)
9393 return integer_one_node;
9394 else
9395 return integer_zero_node;
9396 }
9397
9398 /* If the second arg is "", return *(const unsigned char*)arg1. */
9399 if (p2 && *p2 == '\0')
9400 {
9401 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9402 tree cst_uchar_ptr_node
9403 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9404
9405 return fold_convert (integer_type_node,
9406 build1 (INDIRECT_REF, cst_uchar_node,
9407 fold_convert (cst_uchar_ptr_node,
9408 arg1)));
9409 }
9410
9411 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9412 if (p1 && *p1 == '\0')
9413 {
9414 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9415 tree cst_uchar_ptr_node
9416 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9417
9418 tree temp = fold_convert (integer_type_node,
9419 build1 (INDIRECT_REF, cst_uchar_node,
9420 fold_convert (cst_uchar_ptr_node,
9421 arg2)));
9422 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9423 }
9424
9425 return NULL_TREE;
9426 }
9427
9428 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9429 Return NULL_TREE if no simplification can be made. */
9430
9431 static tree
9432 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9433 {
9434 const char *p1, *p2;
9435
9436 if (!validate_arg (arg1, POINTER_TYPE)
9437 || !validate_arg (arg2, POINTER_TYPE)
9438 || !validate_arg (len, INTEGER_TYPE))
9439 return NULL_TREE;
9440
9441 /* If the LEN parameter is zero, return zero. */
9442 if (integer_zerop (len))
9443 return omit_two_operands (integer_type_node, integer_zero_node,
9444 arg1, arg2);
9445
9446 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9447 if (operand_equal_p (arg1, arg2, 0))
9448 return omit_one_operand (integer_type_node, integer_zero_node, len);
9449
9450 p1 = c_getstr (arg1);
9451 p2 = c_getstr (arg2);
9452
9453 if (host_integerp (len, 1) && p1 && p2)
9454 {
9455 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9456 if (i > 0)
9457 return integer_one_node;
9458 else if (i < 0)
9459 return integer_minus_one_node;
9460 else
9461 return integer_zero_node;
9462 }
9463
9464 /* If the second arg is "", and the length is greater than zero,
9465 return *(const unsigned char*)arg1. */
9466 if (p2 && *p2 == '\0'
9467 && TREE_CODE (len) == INTEGER_CST
9468 && tree_int_cst_sgn (len) == 1)
9469 {
9470 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9471 tree cst_uchar_ptr_node
9472 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9473
9474 return fold_convert (integer_type_node,
9475 build1 (INDIRECT_REF, cst_uchar_node,
9476 fold_convert (cst_uchar_ptr_node,
9477 arg1)));
9478 }
9479
9480 /* If the first arg is "", and the length is greater than zero,
9481 return -*(const unsigned char*)arg2. */
9482 if (p1 && *p1 == '\0'
9483 && TREE_CODE (len) == INTEGER_CST
9484 && tree_int_cst_sgn (len) == 1)
9485 {
9486 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9487 tree cst_uchar_ptr_node
9488 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9489
9490 tree temp = fold_convert (integer_type_node,
9491 build1 (INDIRECT_REF, cst_uchar_node,
9492 fold_convert (cst_uchar_ptr_node,
9493 arg2)));
9494 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9495 }
9496
9497 /* If len parameter is one, return an expression corresponding to
9498 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9499 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9500 {
9501 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9502 tree cst_uchar_ptr_node
9503 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9504
9505 tree ind1 = fold_convert (integer_type_node,
9506 build1 (INDIRECT_REF, cst_uchar_node,
9507 fold_convert (cst_uchar_ptr_node,
9508 arg1)));
9509 tree ind2 = fold_convert (integer_type_node,
9510 build1 (INDIRECT_REF, cst_uchar_node,
9511 fold_convert (cst_uchar_ptr_node,
9512 arg2)));
9513 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9514 }
9515
9516 return NULL_TREE;
9517 }
9518
9519 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9520 ARG. Return NULL_TREE if no simplification can be made. */
9521
9522 static tree
9523 fold_builtin_signbit (tree arg, tree type)
9524 {
9525 tree temp;
9526
9527 if (!validate_arg (arg, REAL_TYPE))
9528 return NULL_TREE;
9529
9530 /* If ARG is a compile-time constant, determine the result. */
9531 if (TREE_CODE (arg) == REAL_CST
9532 && !TREE_OVERFLOW (arg))
9533 {
9534 REAL_VALUE_TYPE c;
9535
9536 c = TREE_REAL_CST (arg);
9537 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9538 return fold_convert (type, temp);
9539 }
9540
9541 /* If ARG is non-negative, the result is always zero. */
9542 if (tree_expr_nonnegative_p (arg))
9543 return omit_one_operand (type, integer_zero_node, arg);
9544
9545 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9546 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9547 return fold_build2 (LT_EXPR, type, arg,
9548 build_real (TREE_TYPE (arg), dconst0));
9549
9550 return NULL_TREE;
9551 }
9552
9553 /* Fold function call to builtin copysign, copysignf or copysignl with
9554 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9555 be made. */
9556
9557 static tree
9558 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9559 {
9560 tree tem;
9561
9562 if (!validate_arg (arg1, REAL_TYPE)
9563 || !validate_arg (arg2, REAL_TYPE))
9564 return NULL_TREE;
9565
9566 /* copysign(X,X) is X. */
9567 if (operand_equal_p (arg1, arg2, 0))
9568 return fold_convert (type, arg1);
9569
9570 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9571 if (TREE_CODE (arg1) == REAL_CST
9572 && TREE_CODE (arg2) == REAL_CST
9573 && !TREE_OVERFLOW (arg1)
9574 && !TREE_OVERFLOW (arg2))
9575 {
9576 REAL_VALUE_TYPE c1, c2;
9577
9578 c1 = TREE_REAL_CST (arg1);
9579 c2 = TREE_REAL_CST (arg2);
9580 /* c1.sign := c2.sign. */
9581 real_copysign (&c1, &c2);
9582 return build_real (type, c1);
9583 }
9584
9585 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9586 Remember to evaluate Y for side-effects. */
9587 if (tree_expr_nonnegative_p (arg2))
9588 return omit_one_operand (type,
9589 fold_build1 (ABS_EXPR, type, arg1),
9590 arg2);
9591
9592 /* Strip sign changing operations for the first argument. */
9593 tem = fold_strip_sign_ops (arg1);
9594 if (tem)
9595 return build_call_expr (fndecl, 2, tem, arg2);
9596
9597 return NULL_TREE;
9598 }
9599
9600 /* Fold a call to builtin isascii with argument ARG. */
9601
9602 static tree
9603 fold_builtin_isascii (tree arg)
9604 {
9605 if (!validate_arg (arg, INTEGER_TYPE))
9606 return NULL_TREE;
9607 else
9608 {
9609 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9610 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9611 build_int_cst (NULL_TREE,
9612 ~ (unsigned HOST_WIDE_INT) 0x7f));
9613 return fold_build2 (EQ_EXPR, integer_type_node,
9614 arg, integer_zero_node);
9615 }
9616 }
9617
9618 /* Fold a call to builtin toascii with argument ARG. */
9619
9620 static tree
9621 fold_builtin_toascii (tree arg)
9622 {
9623 if (!validate_arg (arg, INTEGER_TYPE))
9624 return NULL_TREE;
9625
9626 /* Transform toascii(c) -> (c & 0x7f). */
9627 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9628 build_int_cst (NULL_TREE, 0x7f));
9629 }
9630
9631 /* Fold a call to builtin isdigit with argument ARG. */
9632
9633 static tree
9634 fold_builtin_isdigit (tree arg)
9635 {
9636 if (!validate_arg (arg, INTEGER_TYPE))
9637 return NULL_TREE;
9638 else
9639 {
9640 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9641 /* According to the C standard, isdigit is unaffected by locale.
9642 However, it definitely is affected by the target character set. */
9643 unsigned HOST_WIDE_INT target_digit0
9644 = lang_hooks.to_target_charset ('0');
9645
9646 if (target_digit0 == 0)
9647 return NULL_TREE;
9648
9649 arg = fold_convert (unsigned_type_node, arg);
9650 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9651 build_int_cst (unsigned_type_node, target_digit0));
9652 return fold_build2 (LE_EXPR, integer_type_node, arg,
9653 build_int_cst (unsigned_type_node, 9));
9654 }
9655 }
9656
9657 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9658
9659 static tree
9660 fold_builtin_fabs (tree arg, tree type)
9661 {
9662 if (!validate_arg (arg, REAL_TYPE))
9663 return NULL_TREE;
9664
9665 arg = fold_convert (type, arg);
9666 if (TREE_CODE (arg) == REAL_CST)
9667 return fold_abs_const (arg, type);
9668 return fold_build1 (ABS_EXPR, type, arg);
9669 }
9670
9671 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9672
9673 static tree
9674 fold_builtin_abs (tree arg, tree type)
9675 {
9676 if (!validate_arg (arg, INTEGER_TYPE))
9677 return NULL_TREE;
9678
9679 arg = fold_convert (type, arg);
9680 if (TREE_CODE (arg) == INTEGER_CST)
9681 return fold_abs_const (arg, type);
9682 return fold_build1 (ABS_EXPR, type, arg);
9683 }
9684
9685 /* Fold a call to builtin fmin or fmax. */
9686
9687 static tree
9688 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9689 {
9690 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9691 {
9692 /* Calculate the result when the argument is a constant. */
9693 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9694
9695 if (res)
9696 return res;
9697
9698 /* If either argument is NaN, return the other one. Avoid the
9699 transformation if we get (and honor) a signalling NaN. Using
9700 omit_one_operand() ensures we create a non-lvalue. */
9701 if (TREE_CODE (arg0) == REAL_CST
9702 && real_isnan (&TREE_REAL_CST (arg0))
9703 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9704 || ! TREE_REAL_CST (arg0).signalling))
9705 return omit_one_operand (type, arg1, arg0);
9706 if (TREE_CODE (arg1) == REAL_CST
9707 && real_isnan (&TREE_REAL_CST (arg1))
9708 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9709 || ! TREE_REAL_CST (arg1).signalling))
9710 return omit_one_operand (type, arg0, arg1);
9711
9712 /* Transform fmin/fmax(x,x) -> x. */
9713 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9714 return omit_one_operand (type, arg0, arg1);
9715
9716 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9717 functions to return the numeric arg if the other one is NaN.
9718 These tree codes don't honor that, so only transform if
9719 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9720 handled, so we don't have to worry about it either. */
9721 if (flag_finite_math_only)
9722 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9723 fold_convert (type, arg0),
9724 fold_convert (type, arg1));
9725 }
9726 return NULL_TREE;
9727 }
9728
9729 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9730
9731 static tree
9732 fold_builtin_carg (tree arg, tree type)
9733 {
9734 if (validate_arg (arg, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9736 {
9737 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9738
9739 if (atan2_fn)
9740 {
9741 tree new_arg = builtin_save_expr (arg);
9742 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9743 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9744 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9745 }
9746 }
9747
9748 return NULL_TREE;
9749 }
9750
9751 /* Fold a call to builtin logb/ilogb. */
9752
9753 static tree
9754 fold_builtin_logb (tree arg, tree rettype)
9755 {
9756 if (! validate_arg (arg, REAL_TYPE))
9757 return NULL_TREE;
9758
9759 STRIP_NOPS (arg);
9760
9761 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9762 {
9763 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9764
9765 switch (value->cl)
9766 {
9767 case rvc_nan:
9768 case rvc_inf:
9769 /* If arg is Inf or NaN and we're logb, return it. */
9770 if (TREE_CODE (rettype) == REAL_TYPE)
9771 return fold_convert (rettype, arg);
9772 /* Fall through... */
9773 case rvc_zero:
9774 /* Zero may set errno and/or raise an exception for logb, also
9775 for ilogb we don't know FP_ILOGB0. */
9776 return NULL_TREE;
9777 case rvc_normal:
9778 /* For normal numbers, proceed iff radix == 2. In GCC,
9779 normalized significands are in the range [0.5, 1.0). We
9780 want the exponent as if they were [1.0, 2.0) so get the
9781 exponent and subtract 1. */
9782 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9783 return fold_convert (rettype, build_int_cst (NULL_TREE,
9784 REAL_EXP (value)-1));
9785 break;
9786 }
9787 }
9788
9789 return NULL_TREE;
9790 }
9791
9792 /* Fold a call to builtin significand, if radix == 2. */
9793
9794 static tree
9795 fold_builtin_significand (tree arg, tree rettype)
9796 {
9797 if (! validate_arg (arg, REAL_TYPE))
9798 return NULL_TREE;
9799
9800 STRIP_NOPS (arg);
9801
9802 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9803 {
9804 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9805
9806 switch (value->cl)
9807 {
9808 case rvc_zero:
9809 case rvc_nan:
9810 case rvc_inf:
9811 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9812 return fold_convert (rettype, arg);
9813 case rvc_normal:
9814 /* For normal numbers, proceed iff radix == 2. */
9815 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9816 {
9817 REAL_VALUE_TYPE result = *value;
9818 /* In GCC, normalized significands are in the range [0.5,
9819 1.0). We want them to be [1.0, 2.0) so set the
9820 exponent to 1. */
9821 SET_REAL_EXP (&result, 1);
9822 return build_real (rettype, result);
9823 }
9824 break;
9825 }
9826 }
9827
9828 return NULL_TREE;
9829 }
9830
9831 /* Fold a call to builtin frexp, we can assume the base is 2. */
9832
9833 static tree
9834 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9835 {
9836 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9837 return NULL_TREE;
9838
9839 STRIP_NOPS (arg0);
9840
9841 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9842 return NULL_TREE;
9843
9844 arg1 = build_fold_indirect_ref (arg1);
9845
9846 /* Proceed if a valid pointer type was passed in. */
9847 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9848 {
9849 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9850 tree frac, exp;
9851
9852 switch (value->cl)
9853 {
9854 case rvc_zero:
9855 /* For +-0, return (*exp = 0, +-0). */
9856 exp = integer_zero_node;
9857 frac = arg0;
9858 break;
9859 case rvc_nan:
9860 case rvc_inf:
9861 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9862 return omit_one_operand (rettype, arg0, arg1);
9863 case rvc_normal:
9864 {
9865 /* Since the frexp function always expects base 2, and in
9866 GCC normalized significands are already in the range
9867 [0.5, 1.0), we have exactly what frexp wants. */
9868 REAL_VALUE_TYPE frac_rvt = *value;
9869 SET_REAL_EXP (&frac_rvt, 0);
9870 frac = build_real (rettype, frac_rvt);
9871 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9872 }
9873 break;
9874 default:
9875 gcc_unreachable ();
9876 }
9877
9878 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9879 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9880 TREE_SIDE_EFFECTS (arg1) = 1;
9881 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9882 }
9883
9884 return NULL_TREE;
9885 }
9886
9887 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9888 then we can assume the base is two. If it's false, then we have to
9889 check the mode of the TYPE parameter in certain cases. */
9890
9891 static tree
9892 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9893 {
9894 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9895 {
9896 STRIP_NOPS (arg0);
9897 STRIP_NOPS (arg1);
9898
9899 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9900 if (real_zerop (arg0) || integer_zerop (arg1)
9901 || (TREE_CODE (arg0) == REAL_CST
9902 && !real_isfinite (&TREE_REAL_CST (arg0))))
9903 return omit_one_operand (type, arg0, arg1);
9904
9905 /* If both arguments are constant, then try to evaluate it. */
9906 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9907 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9908 && host_integerp (arg1, 0))
9909 {
9910 /* Bound the maximum adjustment to twice the range of the
9911 mode's valid exponents. Use abs to ensure the range is
9912 positive as a sanity check. */
9913 const long max_exp_adj = 2 *
9914 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9915 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9916
9917 /* Get the user-requested adjustment. */
9918 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9919
9920 /* The requested adjustment must be inside this range. This
9921 is a preliminary cap to avoid things like overflow, we
9922 may still fail to compute the result for other reasons. */
9923 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9924 {
9925 REAL_VALUE_TYPE initial_result;
9926
9927 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9928
9929 /* Ensure we didn't overflow. */
9930 if (! real_isinf (&initial_result))
9931 {
9932 const REAL_VALUE_TYPE trunc_result
9933 = real_value_truncate (TYPE_MODE (type), initial_result);
9934
9935 /* Only proceed if the target mode can hold the
9936 resulting value. */
9937 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9938 return build_real (type, trunc_result);
9939 }
9940 }
9941 }
9942 }
9943
9944 return NULL_TREE;
9945 }
9946
9947 /* Fold a call to builtin modf. */
9948
9949 static tree
9950 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9951 {
9952 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9953 return NULL_TREE;
9954
9955 STRIP_NOPS (arg0);
9956
9957 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9958 return NULL_TREE;
9959
9960 arg1 = build_fold_indirect_ref (arg1);
9961
9962 /* Proceed if a valid pointer type was passed in. */
9963 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9964 {
9965 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9966 REAL_VALUE_TYPE trunc, frac;
9967
9968 switch (value->cl)
9969 {
9970 case rvc_nan:
9971 case rvc_zero:
9972 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9973 trunc = frac = *value;
9974 break;
9975 case rvc_inf:
9976 /* For +-Inf, return (*arg1 = arg0, +-0). */
9977 frac = dconst0;
9978 frac.sign = value->sign;
9979 trunc = *value;
9980 break;
9981 case rvc_normal:
9982 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9983 real_trunc (&trunc, VOIDmode, value);
9984 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9985 /* If the original number was negative and already
9986 integral, then the fractional part is -0.0. */
9987 if (value->sign && frac.cl == rvc_zero)
9988 frac.sign = value->sign;
9989 break;
9990 }
9991
9992 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9993 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9994 build_real (rettype, trunc));
9995 TREE_SIDE_EFFECTS (arg1) = 1;
9996 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9997 build_real (rettype, frac));
9998 }
9999
10000 return NULL_TREE;
10001 }
10002
10003 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10004 ARG is the argument for the call. */
10005
10006 static tree
10007 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
10008 {
10009 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10010 REAL_VALUE_TYPE r;
10011
10012 if (!validate_arg (arg, REAL_TYPE))
10013 return NULL_TREE;
10014
10015 switch (builtin_index)
10016 {
10017 case BUILT_IN_ISINF:
10018 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10019 return omit_one_operand (type, integer_zero_node, arg);
10020
10021 if (TREE_CODE (arg) == REAL_CST)
10022 {
10023 r = TREE_REAL_CST (arg);
10024 if (real_isinf (&r))
10025 return real_compare (GT_EXPR, &r, &dconst0)
10026 ? integer_one_node : integer_minus_one_node;
10027 else
10028 return integer_zero_node;
10029 }
10030
10031 return NULL_TREE;
10032
10033 case BUILT_IN_ISINF_SIGN:
10034 {
10035 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10036 /* In a boolean context, GCC will fold the inner COND_EXPR to
10037 1. So e.g. "if (isinf_sign(x))" would be folded to just
10038 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10039 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10040 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10041 tree tmp = NULL_TREE;
10042
10043 arg = builtin_save_expr (arg);
10044
10045 if (signbit_fn && isinf_fn)
10046 {
10047 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
10048 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
10049
10050 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
10051 signbit_call, integer_zero_node);
10052 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
10053 isinf_call, integer_zero_node);
10054
10055 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
10056 integer_minus_one_node, integer_one_node);
10057 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
10058 integer_zero_node);
10059 }
10060
10061 return tmp;
10062 }
10063
10064 case BUILT_IN_ISFINITE:
10065 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10066 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10067 return omit_one_operand (type, integer_one_node, arg);
10068
10069 if (TREE_CODE (arg) == REAL_CST)
10070 {
10071 r = TREE_REAL_CST (arg);
10072 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10073 }
10074
10075 return NULL_TREE;
10076
10077 case BUILT_IN_ISNAN:
10078 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10079 return omit_one_operand (type, integer_zero_node, arg);
10080
10081 if (TREE_CODE (arg) == REAL_CST)
10082 {
10083 r = TREE_REAL_CST (arg);
10084 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10085 }
10086
10087 arg = builtin_save_expr (arg);
10088 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
10089
10090 default:
10091 gcc_unreachable ();
10092 }
10093 }
10094
10095 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10096 This builtin will generate code to return the appropriate floating
10097 point classification depending on the value of the floating point
10098 number passed in. The possible return values must be supplied as
10099 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10100 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10101 one floating point argument which is "type generic". */
10102
10103 static tree
10104 fold_builtin_fpclassify (tree exp)
10105 {
10106 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10107 arg, type, res, tmp;
10108 enum machine_mode mode;
10109 REAL_VALUE_TYPE r;
10110 char buf[128];
10111
10112 /* Verify the required arguments in the original call. */
10113 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10114 INTEGER_TYPE, INTEGER_TYPE,
10115 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10116 return NULL_TREE;
10117
10118 fp_nan = CALL_EXPR_ARG (exp, 0);
10119 fp_infinite = CALL_EXPR_ARG (exp, 1);
10120 fp_normal = CALL_EXPR_ARG (exp, 2);
10121 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10122 fp_zero = CALL_EXPR_ARG (exp, 4);
10123 arg = CALL_EXPR_ARG (exp, 5);
10124 type = TREE_TYPE (arg);
10125 mode = TYPE_MODE (type);
10126 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10127
10128 /* fpclassify(x) ->
10129 isnan(x) ? FP_NAN :
10130 (fabs(x) == Inf ? FP_INFINITE :
10131 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10132 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10133
10134 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10135 build_real (type, dconst0));
10136 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10137
10138 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10139 real_from_string (&r, buf);
10140 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10141 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10142
10143 if (HONOR_INFINITIES (mode))
10144 {
10145 real_inf (&r);
10146 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10147 build_real (type, r));
10148 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10149 }
10150
10151 if (HONOR_NANS (mode))
10152 {
10153 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10154 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10155 }
10156
10157 return res;
10158 }
10159
10160 /* Fold a call to an unordered comparison function such as
10161 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10162 being called and ARG0 and ARG1 are the arguments for the call.
10163 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10164 the opposite of the desired result. UNORDERED_CODE is used
10165 for modes that can hold NaNs and ORDERED_CODE is used for
10166 the rest. */
10167
10168 static tree
10169 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10170 enum tree_code unordered_code,
10171 enum tree_code ordered_code)
10172 {
10173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10174 enum tree_code code;
10175 tree type0, type1;
10176 enum tree_code code0, code1;
10177 tree cmp_type = NULL_TREE;
10178
10179 type0 = TREE_TYPE (arg0);
10180 type1 = TREE_TYPE (arg1);
10181
10182 code0 = TREE_CODE (type0);
10183 code1 = TREE_CODE (type1);
10184
10185 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10186 /* Choose the wider of two real types. */
10187 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10188 ? type0 : type1;
10189 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10190 cmp_type = type0;
10191 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10192 cmp_type = type1;
10193
10194 arg0 = fold_convert (cmp_type, arg0);
10195 arg1 = fold_convert (cmp_type, arg1);
10196
10197 if (unordered_code == UNORDERED_EXPR)
10198 {
10199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10200 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10201 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10202 }
10203
10204 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10205 : ordered_code;
10206 return fold_build1 (TRUTH_NOT_EXPR, type,
10207 fold_build2 (code, type, arg0, arg1));
10208 }
10209
10210 /* Fold a call to built-in function FNDECL with 0 arguments.
10211 IGNORE is true if the result of the function call is ignored. This
10212 function returns NULL_TREE if no simplification was possible. */
10213
10214 static tree
10215 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10216 {
10217 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10218 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10219 switch (fcode)
10220 {
10221 CASE_FLT_FN (BUILT_IN_INF):
10222 case BUILT_IN_INFD32:
10223 case BUILT_IN_INFD64:
10224 case BUILT_IN_INFD128:
10225 return fold_builtin_inf (type, true);
10226
10227 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10228 return fold_builtin_inf (type, false);
10229
10230 case BUILT_IN_CLASSIFY_TYPE:
10231 return fold_builtin_classify_type (NULL_TREE);
10232
10233 default:
10234 break;
10235 }
10236 return NULL_TREE;
10237 }
10238
10239 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10240 IGNORE is true if the result of the function call is ignored. This
10241 function returns NULL_TREE if no simplification was possible. */
10242
10243 static tree
10244 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10245 {
10246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10247 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10248 switch (fcode)
10249 {
10250
10251 case BUILT_IN_CONSTANT_P:
10252 {
10253 tree val = fold_builtin_constant_p (arg0);
10254
10255 /* Gimplification will pull the CALL_EXPR for the builtin out of
10256 an if condition. When not optimizing, we'll not CSE it back.
10257 To avoid link error types of regressions, return false now. */
10258 if (!val && !optimize)
10259 val = integer_zero_node;
10260
10261 return val;
10262 }
10263
10264 case BUILT_IN_CLASSIFY_TYPE:
10265 return fold_builtin_classify_type (arg0);
10266
10267 case BUILT_IN_STRLEN:
10268 return fold_builtin_strlen (arg0);
10269
10270 CASE_FLT_FN (BUILT_IN_FABS):
10271 return fold_builtin_fabs (arg0, type);
10272
10273 case BUILT_IN_ABS:
10274 case BUILT_IN_LABS:
10275 case BUILT_IN_LLABS:
10276 case BUILT_IN_IMAXABS:
10277 return fold_builtin_abs (arg0, type);
10278
10279 CASE_FLT_FN (BUILT_IN_CONJ):
10280 if (validate_arg (arg0, COMPLEX_TYPE)
10281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10282 return fold_build1 (CONJ_EXPR, type, arg0);
10283 break;
10284
10285 CASE_FLT_FN (BUILT_IN_CREAL):
10286 if (validate_arg (arg0, COMPLEX_TYPE)
10287 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10288 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10289 break;
10290
10291 CASE_FLT_FN (BUILT_IN_CIMAG):
10292 if (validate_arg (arg0, COMPLEX_TYPE))
10293 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10294 break;
10295
10296 CASE_FLT_FN (BUILT_IN_CCOS):
10297 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ false);
10298
10299 CASE_FLT_FN (BUILT_IN_CCOSH):
10300 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ true);
10301
10302 #ifdef HAVE_mpc
10303 CASE_FLT_FN (BUILT_IN_CSIN):
10304 if (validate_arg (arg0, COMPLEX_TYPE)
10305 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10306 return do_mpc_arg1 (arg0, type, mpc_sin);
10307 break;
10308
10309 CASE_FLT_FN (BUILT_IN_CSINH):
10310 if (validate_arg (arg0, COMPLEX_TYPE)
10311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10312 return do_mpc_arg1 (arg0, type, mpc_sinh);
10313 break;
10314
10315 CASE_FLT_FN (BUILT_IN_CTAN):
10316 if (validate_arg (arg0, COMPLEX_TYPE)
10317 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10318 return do_mpc_arg1 (arg0, type, mpc_tan);
10319 break;
10320
10321 CASE_FLT_FN (BUILT_IN_CTANH):
10322 if (validate_arg (arg0, COMPLEX_TYPE)
10323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10324 return do_mpc_arg1 (arg0, type, mpc_tanh);
10325 break;
10326
10327 CASE_FLT_FN (BUILT_IN_CLOG):
10328 if (validate_arg (arg0, COMPLEX_TYPE)
10329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10330 return do_mpc_arg1 (arg0, type, mpc_log);
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_CSQRT):
10334 if (validate_arg (arg0, COMPLEX_TYPE)
10335 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10336 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10337 break;
10338 #endif
10339
10340 CASE_FLT_FN (BUILT_IN_CABS):
10341 return fold_builtin_cabs (arg0, type, fndecl);
10342
10343 CASE_FLT_FN (BUILT_IN_CARG):
10344 return fold_builtin_carg (arg0, type);
10345
10346 CASE_FLT_FN (BUILT_IN_SQRT):
10347 return fold_builtin_sqrt (arg0, type);
10348
10349 CASE_FLT_FN (BUILT_IN_CBRT):
10350 return fold_builtin_cbrt (arg0, type);
10351
10352 CASE_FLT_FN (BUILT_IN_ASIN):
10353 if (validate_arg (arg0, REAL_TYPE))
10354 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10355 &dconstm1, &dconst1, true);
10356 break;
10357
10358 CASE_FLT_FN (BUILT_IN_ACOS):
10359 if (validate_arg (arg0, REAL_TYPE))
10360 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10361 &dconstm1, &dconst1, true);
10362 break;
10363
10364 CASE_FLT_FN (BUILT_IN_ATAN):
10365 if (validate_arg (arg0, REAL_TYPE))
10366 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10367 break;
10368
10369 CASE_FLT_FN (BUILT_IN_ASINH):
10370 if (validate_arg (arg0, REAL_TYPE))
10371 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_ACOSH):
10375 if (validate_arg (arg0, REAL_TYPE))
10376 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10377 &dconst1, NULL, true);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_ATANH):
10381 if (validate_arg (arg0, REAL_TYPE))
10382 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10383 &dconstm1, &dconst1, false);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_SIN):
10387 if (validate_arg (arg0, REAL_TYPE))
10388 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_COS):
10392 return fold_builtin_cos (arg0, type, fndecl);
10393
10394 CASE_FLT_FN (BUILT_IN_TAN):
10395 return fold_builtin_tan (arg0, type);
10396
10397 CASE_FLT_FN (BUILT_IN_CEXP):
10398 return fold_builtin_cexp (arg0, type);
10399
10400 CASE_FLT_FN (BUILT_IN_CEXPI):
10401 if (validate_arg (arg0, REAL_TYPE))
10402 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10403 break;
10404
10405 CASE_FLT_FN (BUILT_IN_SINH):
10406 if (validate_arg (arg0, REAL_TYPE))
10407 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10408 break;
10409
10410 CASE_FLT_FN (BUILT_IN_COSH):
10411 return fold_builtin_cosh (arg0, type, fndecl);
10412
10413 CASE_FLT_FN (BUILT_IN_TANH):
10414 if (validate_arg (arg0, REAL_TYPE))
10415 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10416 break;
10417
10418 CASE_FLT_FN (BUILT_IN_ERF):
10419 if (validate_arg (arg0, REAL_TYPE))
10420 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10421 break;
10422
10423 CASE_FLT_FN (BUILT_IN_ERFC):
10424 if (validate_arg (arg0, REAL_TYPE))
10425 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10426 break;
10427
10428 CASE_FLT_FN (BUILT_IN_TGAMMA):
10429 if (validate_arg (arg0, REAL_TYPE))
10430 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10431 break;
10432
10433 CASE_FLT_FN (BUILT_IN_EXP):
10434 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10435
10436 CASE_FLT_FN (BUILT_IN_EXP2):
10437 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10438
10439 CASE_FLT_FN (BUILT_IN_EXP10):
10440 CASE_FLT_FN (BUILT_IN_POW10):
10441 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10442
10443 CASE_FLT_FN (BUILT_IN_EXPM1):
10444 if (validate_arg (arg0, REAL_TYPE))
10445 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10446 break;
10447
10448 CASE_FLT_FN (BUILT_IN_LOG):
10449 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10450
10451 CASE_FLT_FN (BUILT_IN_LOG2):
10452 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10453
10454 CASE_FLT_FN (BUILT_IN_LOG10):
10455 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10456
10457 CASE_FLT_FN (BUILT_IN_LOG1P):
10458 if (validate_arg (arg0, REAL_TYPE))
10459 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10460 &dconstm1, NULL, false);
10461 break;
10462
10463 CASE_FLT_FN (BUILT_IN_J0):
10464 if (validate_arg (arg0, REAL_TYPE))
10465 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10466 NULL, NULL, 0);
10467 break;
10468
10469 CASE_FLT_FN (BUILT_IN_J1):
10470 if (validate_arg (arg0, REAL_TYPE))
10471 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10472 NULL, NULL, 0);
10473 break;
10474
10475 CASE_FLT_FN (BUILT_IN_Y0):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10478 &dconst0, NULL, false);
10479 break;
10480
10481 CASE_FLT_FN (BUILT_IN_Y1):
10482 if (validate_arg (arg0, REAL_TYPE))
10483 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10484 &dconst0, NULL, false);
10485 break;
10486
10487 CASE_FLT_FN (BUILT_IN_NAN):
10488 case BUILT_IN_NAND32:
10489 case BUILT_IN_NAND64:
10490 case BUILT_IN_NAND128:
10491 return fold_builtin_nan (arg0, type, true);
10492
10493 CASE_FLT_FN (BUILT_IN_NANS):
10494 return fold_builtin_nan (arg0, type, false);
10495
10496 CASE_FLT_FN (BUILT_IN_FLOOR):
10497 return fold_builtin_floor (fndecl, arg0);
10498
10499 CASE_FLT_FN (BUILT_IN_CEIL):
10500 return fold_builtin_ceil (fndecl, arg0);
10501
10502 CASE_FLT_FN (BUILT_IN_TRUNC):
10503 return fold_builtin_trunc (fndecl, arg0);
10504
10505 CASE_FLT_FN (BUILT_IN_ROUND):
10506 return fold_builtin_round (fndecl, arg0);
10507
10508 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10509 CASE_FLT_FN (BUILT_IN_RINT):
10510 return fold_trunc_transparent_mathfn (fndecl, arg0);
10511
10512 CASE_FLT_FN (BUILT_IN_LCEIL):
10513 CASE_FLT_FN (BUILT_IN_LLCEIL):
10514 CASE_FLT_FN (BUILT_IN_LFLOOR):
10515 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10516 CASE_FLT_FN (BUILT_IN_LROUND):
10517 CASE_FLT_FN (BUILT_IN_LLROUND):
10518 return fold_builtin_int_roundingfn (fndecl, arg0);
10519
10520 CASE_FLT_FN (BUILT_IN_LRINT):
10521 CASE_FLT_FN (BUILT_IN_LLRINT):
10522 return fold_fixed_mathfn (fndecl, arg0);
10523
10524 case BUILT_IN_BSWAP32:
10525 case BUILT_IN_BSWAP64:
10526 return fold_builtin_bswap (fndecl, arg0);
10527
10528 CASE_INT_FN (BUILT_IN_FFS):
10529 CASE_INT_FN (BUILT_IN_CLZ):
10530 CASE_INT_FN (BUILT_IN_CTZ):
10531 CASE_INT_FN (BUILT_IN_POPCOUNT):
10532 CASE_INT_FN (BUILT_IN_PARITY):
10533 return fold_builtin_bitop (fndecl, arg0);
10534
10535 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10536 return fold_builtin_signbit (arg0, type);
10537
10538 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10539 return fold_builtin_significand (arg0, type);
10540
10541 CASE_FLT_FN (BUILT_IN_ILOGB):
10542 CASE_FLT_FN (BUILT_IN_LOGB):
10543 return fold_builtin_logb (arg0, type);
10544
10545 case BUILT_IN_ISASCII:
10546 return fold_builtin_isascii (arg0);
10547
10548 case BUILT_IN_TOASCII:
10549 return fold_builtin_toascii (arg0);
10550
10551 case BUILT_IN_ISDIGIT:
10552 return fold_builtin_isdigit (arg0);
10553
10554 CASE_FLT_FN (BUILT_IN_FINITE):
10555 case BUILT_IN_FINITED32:
10556 case BUILT_IN_FINITED64:
10557 case BUILT_IN_FINITED128:
10558 case BUILT_IN_ISFINITE:
10559 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10560
10561 CASE_FLT_FN (BUILT_IN_ISINF):
10562 case BUILT_IN_ISINFD32:
10563 case BUILT_IN_ISINFD64:
10564 case BUILT_IN_ISINFD128:
10565 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10566
10567 case BUILT_IN_ISINF_SIGN:
10568 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10569
10570 CASE_FLT_FN (BUILT_IN_ISNAN):
10571 case BUILT_IN_ISNAND32:
10572 case BUILT_IN_ISNAND64:
10573 case BUILT_IN_ISNAND128:
10574 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10575
10576 case BUILT_IN_PRINTF:
10577 case BUILT_IN_PRINTF_UNLOCKED:
10578 case BUILT_IN_VPRINTF:
10579 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10580
10581 default:
10582 break;
10583 }
10584
10585 return NULL_TREE;
10586
10587 }
10588
10589 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10590 IGNORE is true if the result of the function call is ignored. This
10591 function returns NULL_TREE if no simplification was possible. */
10592
10593 static tree
10594 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10595 {
10596 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10597 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10598
10599 switch (fcode)
10600 {
10601 CASE_FLT_FN (BUILT_IN_JN):
10602 if (validate_arg (arg0, INTEGER_TYPE)
10603 && validate_arg (arg1, REAL_TYPE))
10604 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10605 break;
10606
10607 CASE_FLT_FN (BUILT_IN_YN):
10608 if (validate_arg (arg0, INTEGER_TYPE)
10609 && validate_arg (arg1, REAL_TYPE))
10610 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10611 &dconst0, false);
10612 break;
10613
10614 CASE_FLT_FN (BUILT_IN_DREM):
10615 CASE_FLT_FN (BUILT_IN_REMAINDER):
10616 if (validate_arg (arg0, REAL_TYPE)
10617 && validate_arg(arg1, REAL_TYPE))
10618 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10619 break;
10620
10621 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10622 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10623 if (validate_arg (arg0, REAL_TYPE)
10624 && validate_arg(arg1, POINTER_TYPE))
10625 return do_mpfr_lgamma_r (arg0, arg1, type);
10626 break;
10627
10628 CASE_FLT_FN (BUILT_IN_ATAN2):
10629 if (validate_arg (arg0, REAL_TYPE)
10630 && validate_arg(arg1, REAL_TYPE))
10631 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10632 break;
10633
10634 CASE_FLT_FN (BUILT_IN_FDIM):
10635 if (validate_arg (arg0, REAL_TYPE)
10636 && validate_arg(arg1, REAL_TYPE))
10637 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10638 break;
10639
10640 CASE_FLT_FN (BUILT_IN_HYPOT):
10641 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10642
10643 CASE_FLT_FN (BUILT_IN_LDEXP):
10644 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10645 CASE_FLT_FN (BUILT_IN_SCALBN):
10646 CASE_FLT_FN (BUILT_IN_SCALBLN):
10647 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10648
10649 CASE_FLT_FN (BUILT_IN_FREXP):
10650 return fold_builtin_frexp (arg0, arg1, type);
10651
10652 CASE_FLT_FN (BUILT_IN_MODF):
10653 return fold_builtin_modf (arg0, arg1, type);
10654
10655 case BUILT_IN_BZERO:
10656 return fold_builtin_bzero (arg0, arg1, ignore);
10657
10658 case BUILT_IN_FPUTS:
10659 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10660
10661 case BUILT_IN_FPUTS_UNLOCKED:
10662 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10663
10664 case BUILT_IN_STRSTR:
10665 return fold_builtin_strstr (arg0, arg1, type);
10666
10667 case BUILT_IN_STRCAT:
10668 return fold_builtin_strcat (arg0, arg1);
10669
10670 case BUILT_IN_STRSPN:
10671 return fold_builtin_strspn (arg0, arg1);
10672
10673 case BUILT_IN_STRCSPN:
10674 return fold_builtin_strcspn (arg0, arg1);
10675
10676 case BUILT_IN_STRCHR:
10677 case BUILT_IN_INDEX:
10678 return fold_builtin_strchr (arg0, arg1, type);
10679
10680 case BUILT_IN_STRRCHR:
10681 case BUILT_IN_RINDEX:
10682 return fold_builtin_strrchr (arg0, arg1, type);
10683
10684 case BUILT_IN_STRCPY:
10685 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10686
10687 case BUILT_IN_STPCPY:
10688 if (ignore)
10689 {
10690 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10691 if (!fn)
10692 break;
10693
10694 return build_call_expr (fn, 2, arg0, arg1);
10695 }
10696 break;
10697
10698 case BUILT_IN_STRCMP:
10699 return fold_builtin_strcmp (arg0, arg1);
10700
10701 case BUILT_IN_STRPBRK:
10702 return fold_builtin_strpbrk (arg0, arg1, type);
10703
10704 case BUILT_IN_EXPECT:
10705 return fold_builtin_expect (arg0, arg1);
10706
10707 CASE_FLT_FN (BUILT_IN_POW):
10708 return fold_builtin_pow (fndecl, arg0, arg1, type);
10709
10710 CASE_FLT_FN (BUILT_IN_POWI):
10711 return fold_builtin_powi (fndecl, arg0, arg1, type);
10712
10713 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10714 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10715
10716 CASE_FLT_FN (BUILT_IN_FMIN):
10717 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10718
10719 CASE_FLT_FN (BUILT_IN_FMAX):
10720 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10721
10722 case BUILT_IN_ISGREATER:
10723 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10724 case BUILT_IN_ISGREATEREQUAL:
10725 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10726 case BUILT_IN_ISLESS:
10727 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10728 case BUILT_IN_ISLESSEQUAL:
10729 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10730 case BUILT_IN_ISLESSGREATER:
10731 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10732 case BUILT_IN_ISUNORDERED:
10733 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10734 NOP_EXPR);
10735
10736 /* We do the folding for va_start in the expander. */
10737 case BUILT_IN_VA_START:
10738 break;
10739
10740 case BUILT_IN_SPRINTF:
10741 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10742
10743 case BUILT_IN_OBJECT_SIZE:
10744 return fold_builtin_object_size (arg0, arg1);
10745
10746 case BUILT_IN_PRINTF:
10747 case BUILT_IN_PRINTF_UNLOCKED:
10748 case BUILT_IN_VPRINTF:
10749 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10750
10751 case BUILT_IN_PRINTF_CHK:
10752 case BUILT_IN_VPRINTF_CHK:
10753 if (!validate_arg (arg0, INTEGER_TYPE)
10754 || TREE_SIDE_EFFECTS (arg0))
10755 return NULL_TREE;
10756 else
10757 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10758 break;
10759
10760 case BUILT_IN_FPRINTF:
10761 case BUILT_IN_FPRINTF_UNLOCKED:
10762 case BUILT_IN_VFPRINTF:
10763 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10764 ignore, fcode);
10765
10766 default:
10767 break;
10768 }
10769 return NULL_TREE;
10770 }
10771
10772 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10773 and ARG2. IGNORE is true if the result of the function call is ignored.
10774 This function returns NULL_TREE if no simplification was possible. */
10775
10776 static tree
10777 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10778 {
10779 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10780 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10781 switch (fcode)
10782 {
10783
10784 CASE_FLT_FN (BUILT_IN_SINCOS):
10785 return fold_builtin_sincos (arg0, arg1, arg2);
10786
10787 CASE_FLT_FN (BUILT_IN_FMA):
10788 if (validate_arg (arg0, REAL_TYPE)
10789 && validate_arg(arg1, REAL_TYPE)
10790 && validate_arg(arg2, REAL_TYPE))
10791 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10792 break;
10793
10794 CASE_FLT_FN (BUILT_IN_REMQUO):
10795 if (validate_arg (arg0, REAL_TYPE)
10796 && validate_arg(arg1, REAL_TYPE)
10797 && validate_arg(arg2, POINTER_TYPE))
10798 return do_mpfr_remquo (arg0, arg1, arg2);
10799 break;
10800
10801 case BUILT_IN_MEMSET:
10802 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10803
10804 case BUILT_IN_BCOPY:
10805 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10806
10807 case BUILT_IN_MEMCPY:
10808 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10809
10810 case BUILT_IN_MEMPCPY:
10811 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10812
10813 case BUILT_IN_MEMMOVE:
10814 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10815
10816 case BUILT_IN_STRNCAT:
10817 return fold_builtin_strncat (arg0, arg1, arg2);
10818
10819 case BUILT_IN_STRNCPY:
10820 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10821
10822 case BUILT_IN_STRNCMP:
10823 return fold_builtin_strncmp (arg0, arg1, arg2);
10824
10825 case BUILT_IN_MEMCHR:
10826 return fold_builtin_memchr (arg0, arg1, arg2, type);
10827
10828 case BUILT_IN_BCMP:
10829 case BUILT_IN_MEMCMP:
10830 return fold_builtin_memcmp (arg0, arg1, arg2);;
10831
10832 case BUILT_IN_SPRINTF:
10833 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10834
10835 case BUILT_IN_STRCPY_CHK:
10836 case BUILT_IN_STPCPY_CHK:
10837 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10838 ignore, fcode);
10839
10840 case BUILT_IN_STRCAT_CHK:
10841 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10842
10843 case BUILT_IN_PRINTF_CHK:
10844 case BUILT_IN_VPRINTF_CHK:
10845 if (!validate_arg (arg0, INTEGER_TYPE)
10846 || TREE_SIDE_EFFECTS (arg0))
10847 return NULL_TREE;
10848 else
10849 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10850 break;
10851
10852 case BUILT_IN_FPRINTF:
10853 case BUILT_IN_FPRINTF_UNLOCKED:
10854 case BUILT_IN_VFPRINTF:
10855 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10856
10857 case BUILT_IN_FPRINTF_CHK:
10858 case BUILT_IN_VFPRINTF_CHK:
10859 if (!validate_arg (arg1, INTEGER_TYPE)
10860 || TREE_SIDE_EFFECTS (arg1))
10861 return NULL_TREE;
10862 else
10863 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10864 ignore, fcode);
10865
10866 default:
10867 break;
10868 }
10869 return NULL_TREE;
10870 }
10871
10872 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10873 ARG2, and ARG3. IGNORE is true if the result of the function call is
10874 ignored. This function returns NULL_TREE if no simplification was
10875 possible. */
10876
10877 static tree
10878 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10879 bool ignore)
10880 {
10881 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10882
10883 switch (fcode)
10884 {
10885 case BUILT_IN_MEMCPY_CHK:
10886 case BUILT_IN_MEMPCPY_CHK:
10887 case BUILT_IN_MEMMOVE_CHK:
10888 case BUILT_IN_MEMSET_CHK:
10889 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10890 NULL_TREE, ignore,
10891 DECL_FUNCTION_CODE (fndecl));
10892
10893 case BUILT_IN_STRNCPY_CHK:
10894 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10895
10896 case BUILT_IN_STRNCAT_CHK:
10897 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10898
10899 case BUILT_IN_FPRINTF_CHK:
10900 case BUILT_IN_VFPRINTF_CHK:
10901 if (!validate_arg (arg1, INTEGER_TYPE)
10902 || TREE_SIDE_EFFECTS (arg1))
10903 return NULL_TREE;
10904 else
10905 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10906 ignore, fcode);
10907 break;
10908
10909 default:
10910 break;
10911 }
10912 return NULL_TREE;
10913 }
10914
10915 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10916 arguments, where NARGS <= 4. IGNORE is true if the result of the
10917 function call is ignored. This function returns NULL_TREE if no
10918 simplification was possible. Note that this only folds builtins with
10919 fixed argument patterns. Foldings that do varargs-to-varargs
10920 transformations, or that match calls with more than 4 arguments,
10921 need to be handled with fold_builtin_varargs instead. */
10922
10923 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10924
10925 static tree
10926 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10927 {
10928 tree ret = NULL_TREE;
10929
10930 switch (nargs)
10931 {
10932 case 0:
10933 ret = fold_builtin_0 (fndecl, ignore);
10934 break;
10935 case 1:
10936 ret = fold_builtin_1 (fndecl, args[0], ignore);
10937 break;
10938 case 2:
10939 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10940 break;
10941 case 3:
10942 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10943 break;
10944 case 4:
10945 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10946 ignore);
10947 break;
10948 default:
10949 break;
10950 }
10951 if (ret)
10952 {
10953 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10954 TREE_NO_WARNING (ret) = 1;
10955 return ret;
10956 }
10957 return NULL_TREE;
10958 }
10959
10960 /* Builtins with folding operations that operate on "..." arguments
10961 need special handling; we need to store the arguments in a convenient
10962 data structure before attempting any folding. Fortunately there are
10963 only a few builtins that fall into this category. FNDECL is the
10964 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10965 result of the function call is ignored. */
10966
10967 static tree
10968 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10969 {
10970 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10971 tree ret = NULL_TREE;
10972
10973 switch (fcode)
10974 {
10975 case BUILT_IN_SPRINTF_CHK:
10976 case BUILT_IN_VSPRINTF_CHK:
10977 ret = fold_builtin_sprintf_chk (exp, fcode);
10978 break;
10979
10980 case BUILT_IN_SNPRINTF_CHK:
10981 case BUILT_IN_VSNPRINTF_CHK:
10982 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10983 break;
10984
10985 case BUILT_IN_FPCLASSIFY:
10986 ret = fold_builtin_fpclassify (exp);
10987 break;
10988
10989 default:
10990 break;
10991 }
10992 if (ret)
10993 {
10994 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10995 TREE_NO_WARNING (ret) = 1;
10996 return ret;
10997 }
10998 return NULL_TREE;
10999 }
11000
11001 /* Return true if FNDECL shouldn't be folded right now.
11002 If a built-in function has an inline attribute always_inline
11003 wrapper, defer folding it after always_inline functions have
11004 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11005 might not be performed. */
11006
11007 static bool
11008 avoid_folding_inline_builtin (tree fndecl)
11009 {
11010 return (DECL_DECLARED_INLINE_P (fndecl)
11011 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11012 && cfun
11013 && !cfun->always_inline_functions_inlined
11014 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11015 }
11016
11017 /* A wrapper function for builtin folding that prevents warnings for
11018 "statement without effect" and the like, caused by removing the
11019 call node earlier than the warning is generated. */
11020
11021 tree
11022 fold_call_expr (tree exp, bool ignore)
11023 {
11024 tree ret = NULL_TREE;
11025 tree fndecl = get_callee_fndecl (exp);
11026 if (fndecl
11027 && TREE_CODE (fndecl) == FUNCTION_DECL
11028 && DECL_BUILT_IN (fndecl)
11029 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11030 yet. Defer folding until we see all the arguments
11031 (after inlining). */
11032 && !CALL_EXPR_VA_ARG_PACK (exp))
11033 {
11034 int nargs = call_expr_nargs (exp);
11035
11036 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11037 instead last argument is __builtin_va_arg_pack (). Defer folding
11038 even in that case, until arguments are finalized. */
11039 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11040 {
11041 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11042 if (fndecl2
11043 && TREE_CODE (fndecl2) == FUNCTION_DECL
11044 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11045 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11046 return NULL_TREE;
11047 }
11048
11049 if (avoid_folding_inline_builtin (fndecl))
11050 return NULL_TREE;
11051
11052 /* FIXME: Don't use a list in this interface. */
11053 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11054 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11055 else
11056 {
11057 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11058 {
11059 tree *args = CALL_EXPR_ARGP (exp);
11060 ret = fold_builtin_n (fndecl, args, nargs, ignore);
11061 }
11062 if (!ret)
11063 ret = fold_builtin_varargs (fndecl, exp, ignore);
11064 if (ret)
11065 {
11066 /* Propagate location information from original call to
11067 expansion of builtin. Otherwise things like
11068 maybe_emit_chk_warning, that operate on the expansion
11069 of a builtin, will use the wrong location information. */
11070 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
11071 {
11072 tree realret = ret;
11073 if (TREE_CODE (ret) == NOP_EXPR)
11074 realret = TREE_OPERAND (ret, 0);
11075 if (CAN_HAVE_LOCATION_P (realret)
11076 && !EXPR_HAS_LOCATION (realret))
11077 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
11078 }
11079 return ret;
11080 }
11081 }
11082 }
11083 return NULL_TREE;
11084 }
11085
11086 /* Conveniently construct a function call expression. FNDECL names the
11087 function to be called and ARGLIST is a TREE_LIST of arguments. */
11088
11089 tree
11090 build_function_call_expr (tree fndecl, tree arglist)
11091 {
11092 tree fntype = TREE_TYPE (fndecl);
11093 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11094 int n = list_length (arglist);
11095 tree *argarray = (tree *) alloca (n * sizeof (tree));
11096 int i;
11097
11098 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11099 argarray[i] = TREE_VALUE (arglist);
11100 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11101 }
11102
11103 /* Conveniently construct a function call expression. FNDECL names the
11104 function to be called, N is the number of arguments, and the "..."
11105 parameters are the argument expressions. */
11106
11107 tree
11108 build_call_expr (tree fndecl, int n, ...)
11109 {
11110 va_list ap;
11111 tree fntype = TREE_TYPE (fndecl);
11112 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11113 tree *argarray = (tree *) alloca (n * sizeof (tree));
11114 int i;
11115
11116 va_start (ap, n);
11117 for (i = 0; i < n; i++)
11118 argarray[i] = va_arg (ap, tree);
11119 va_end (ap);
11120 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11121 }
11122
11123 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11124 N arguments are passed in the array ARGARRAY. */
11125
11126 tree
11127 fold_builtin_call_array (tree type,
11128 tree fn,
11129 int n,
11130 tree *argarray)
11131 {
11132 tree ret = NULL_TREE;
11133 int i;
11134 tree exp;
11135
11136 if (TREE_CODE (fn) == ADDR_EXPR)
11137 {
11138 tree fndecl = TREE_OPERAND (fn, 0);
11139 if (TREE_CODE (fndecl) == FUNCTION_DECL
11140 && DECL_BUILT_IN (fndecl))
11141 {
11142 /* If last argument is __builtin_va_arg_pack (), arguments to this
11143 function are not finalized yet. Defer folding until they are. */
11144 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11145 {
11146 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11147 if (fndecl2
11148 && TREE_CODE (fndecl2) == FUNCTION_DECL
11149 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11150 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11151 return build_call_array (type, fn, n, argarray);
11152 }
11153 if (avoid_folding_inline_builtin (fndecl))
11154 return build_call_array (type, fn, n, argarray);
11155 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11156 {
11157 tree arglist = NULL_TREE;
11158 for (i = n - 1; i >= 0; i--)
11159 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11160 ret = targetm.fold_builtin (fndecl, arglist, false);
11161 if (ret)
11162 return ret;
11163 return build_call_array (type, fn, n, argarray);
11164 }
11165 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11166 {
11167 /* First try the transformations that don't require consing up
11168 an exp. */
11169 ret = fold_builtin_n (fndecl, argarray, n, false);
11170 if (ret)
11171 return ret;
11172 }
11173
11174 /* If we got this far, we need to build an exp. */
11175 exp = build_call_array (type, fn, n, argarray);
11176 ret = fold_builtin_varargs (fndecl, exp, false);
11177 return ret ? ret : exp;
11178 }
11179 }
11180
11181 return build_call_array (type, fn, n, argarray);
11182 }
11183
11184 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11185 along with N new arguments specified as the "..." parameters. SKIP
11186 is the number of arguments in EXP to be omitted. This function is used
11187 to do varargs-to-varargs transformations. */
11188
11189 static tree
11190 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11191 {
11192 int oldnargs = call_expr_nargs (exp);
11193 int nargs = oldnargs - skip + n;
11194 tree fntype = TREE_TYPE (fndecl);
11195 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11196 tree *buffer;
11197
11198 if (n > 0)
11199 {
11200 int i, j;
11201 va_list ap;
11202
11203 buffer = XALLOCAVEC (tree, nargs);
11204 va_start (ap, n);
11205 for (i = 0; i < n; i++)
11206 buffer[i] = va_arg (ap, tree);
11207 va_end (ap);
11208 for (j = skip; j < oldnargs; j++, i++)
11209 buffer[i] = CALL_EXPR_ARG (exp, j);
11210 }
11211 else
11212 buffer = CALL_EXPR_ARGP (exp) + skip;
11213
11214 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11215 }
11216
11217 /* Validate a single argument ARG against a tree code CODE representing
11218 a type. */
11219
11220 static bool
11221 validate_arg (const_tree arg, enum tree_code code)
11222 {
11223 if (!arg)
11224 return false;
11225 else if (code == POINTER_TYPE)
11226 return POINTER_TYPE_P (TREE_TYPE (arg));
11227 else if (code == INTEGER_TYPE)
11228 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11229 return code == TREE_CODE (TREE_TYPE (arg));
11230 }
11231
11232 /* This function validates the types of a function call argument list
11233 against a specified list of tree_codes. If the last specifier is a 0,
11234 that represents an ellipses, otherwise the last specifier must be a
11235 VOID_TYPE.
11236
11237 This is the GIMPLE version of validate_arglist. Eventually we want to
11238 completely convert builtins.c to work from GIMPLEs and the tree based
11239 validate_arglist will then be removed. */
11240
11241 bool
11242 validate_gimple_arglist (const_gimple call, ...)
11243 {
11244 enum tree_code code;
11245 bool res = 0;
11246 va_list ap;
11247 const_tree arg;
11248 size_t i;
11249
11250 va_start (ap, call);
11251 i = 0;
11252
11253 do
11254 {
11255 code = (enum tree_code) va_arg (ap, int);
11256 switch (code)
11257 {
11258 case 0:
11259 /* This signifies an ellipses, any further arguments are all ok. */
11260 res = true;
11261 goto end;
11262 case VOID_TYPE:
11263 /* This signifies an endlink, if no arguments remain, return
11264 true, otherwise return false. */
11265 res = (i == gimple_call_num_args (call));
11266 goto end;
11267 default:
11268 /* If no parameters remain or the parameter's code does not
11269 match the specified code, return false. Otherwise continue
11270 checking any remaining arguments. */
11271 arg = gimple_call_arg (call, i++);
11272 if (!validate_arg (arg, code))
11273 goto end;
11274 break;
11275 }
11276 }
11277 while (1);
11278
11279 /* We need gotos here since we can only have one VA_CLOSE in a
11280 function. */
11281 end: ;
11282 va_end (ap);
11283
11284 return res;
11285 }
11286
11287 /* This function validates the types of a function call argument list
11288 against a specified list of tree_codes. If the last specifier is a 0,
11289 that represents an ellipses, otherwise the last specifier must be a
11290 VOID_TYPE. */
11291
11292 bool
11293 validate_arglist (const_tree callexpr, ...)
11294 {
11295 enum tree_code code;
11296 bool res = 0;
11297 va_list ap;
11298 const_call_expr_arg_iterator iter;
11299 const_tree arg;
11300
11301 va_start (ap, callexpr);
11302 init_const_call_expr_arg_iterator (callexpr, &iter);
11303
11304 do
11305 {
11306 code = (enum tree_code) va_arg (ap, int);
11307 switch (code)
11308 {
11309 case 0:
11310 /* This signifies an ellipses, any further arguments are all ok. */
11311 res = true;
11312 goto end;
11313 case VOID_TYPE:
11314 /* This signifies an endlink, if no arguments remain, return
11315 true, otherwise return false. */
11316 res = !more_const_call_expr_args_p (&iter);
11317 goto end;
11318 default:
11319 /* If no parameters remain or the parameter's code does not
11320 match the specified code, return false. Otherwise continue
11321 checking any remaining arguments. */
11322 arg = next_const_call_expr_arg (&iter);
11323 if (!validate_arg (arg, code))
11324 goto end;
11325 break;
11326 }
11327 }
11328 while (1);
11329
11330 /* We need gotos here since we can only have one VA_CLOSE in a
11331 function. */
11332 end: ;
11333 va_end (ap);
11334
11335 return res;
11336 }
11337
11338 /* Default target-specific builtin expander that does nothing. */
11339
11340 rtx
11341 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11342 rtx target ATTRIBUTE_UNUSED,
11343 rtx subtarget ATTRIBUTE_UNUSED,
11344 enum machine_mode mode ATTRIBUTE_UNUSED,
11345 int ignore ATTRIBUTE_UNUSED)
11346 {
11347 return NULL_RTX;
11348 }
11349
11350 /* Returns true is EXP represents data that would potentially reside
11351 in a readonly section. */
11352
11353 static bool
11354 readonly_data_expr (tree exp)
11355 {
11356 STRIP_NOPS (exp);
11357
11358 if (TREE_CODE (exp) != ADDR_EXPR)
11359 return false;
11360
11361 exp = get_base_address (TREE_OPERAND (exp, 0));
11362 if (!exp)
11363 return false;
11364
11365 /* Make sure we call decl_readonly_section only for trees it
11366 can handle (since it returns true for everything it doesn't
11367 understand). */
11368 if (TREE_CODE (exp) == STRING_CST
11369 || TREE_CODE (exp) == CONSTRUCTOR
11370 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11371 return decl_readonly_section (exp, 0);
11372 else
11373 return false;
11374 }
11375
11376 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11377 to the call, and TYPE is its return type.
11378
11379 Return NULL_TREE if no simplification was possible, otherwise return the
11380 simplified form of the call as a tree.
11381
11382 The simplified form may be a constant or other expression which
11383 computes the same value, but in a more efficient manner (including
11384 calls to other builtin functions).
11385
11386 The call may contain arguments which need to be evaluated, but
11387 which are not useful to determine the result of the call. In
11388 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11389 COMPOUND_EXPR will be an argument which must be evaluated.
11390 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11391 COMPOUND_EXPR in the chain will contain the tree for the simplified
11392 form of the builtin function call. */
11393
11394 static tree
11395 fold_builtin_strstr (tree s1, tree s2, tree type)
11396 {
11397 if (!validate_arg (s1, POINTER_TYPE)
11398 || !validate_arg (s2, POINTER_TYPE))
11399 return NULL_TREE;
11400 else
11401 {
11402 tree fn;
11403 const char *p1, *p2;
11404
11405 p2 = c_getstr (s2);
11406 if (p2 == NULL)
11407 return NULL_TREE;
11408
11409 p1 = c_getstr (s1);
11410 if (p1 != NULL)
11411 {
11412 const char *r = strstr (p1, p2);
11413 tree tem;
11414
11415 if (r == NULL)
11416 return build_int_cst (TREE_TYPE (s1), 0);
11417
11418 /* Return an offset into the constant string argument. */
11419 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11420 s1, size_int (r - p1));
11421 return fold_convert (type, tem);
11422 }
11423
11424 /* The argument is const char *, and the result is char *, so we need
11425 a type conversion here to avoid a warning. */
11426 if (p2[0] == '\0')
11427 return fold_convert (type, s1);
11428
11429 if (p2[1] != '\0')
11430 return NULL_TREE;
11431
11432 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11433 if (!fn)
11434 return NULL_TREE;
11435
11436 /* New argument list transforming strstr(s1, s2) to
11437 strchr(s1, s2[0]). */
11438 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11439 }
11440 }
11441
11442 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11443 the call, and TYPE is its return type.
11444
11445 Return NULL_TREE if no simplification was possible, otherwise return the
11446 simplified form of the call as a tree.
11447
11448 The simplified form may be a constant or other expression which
11449 computes the same value, but in a more efficient manner (including
11450 calls to other builtin functions).
11451
11452 The call may contain arguments which need to be evaluated, but
11453 which are not useful to determine the result of the call. In
11454 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11455 COMPOUND_EXPR will be an argument which must be evaluated.
11456 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11457 COMPOUND_EXPR in the chain will contain the tree for the simplified
11458 form of the builtin function call. */
11459
11460 static tree
11461 fold_builtin_strchr (tree s1, tree s2, tree type)
11462 {
11463 if (!validate_arg (s1, POINTER_TYPE)
11464 || !validate_arg (s2, INTEGER_TYPE))
11465 return NULL_TREE;
11466 else
11467 {
11468 const char *p1;
11469
11470 if (TREE_CODE (s2) != INTEGER_CST)
11471 return NULL_TREE;
11472
11473 p1 = c_getstr (s1);
11474 if (p1 != NULL)
11475 {
11476 char c;
11477 const char *r;
11478 tree tem;
11479
11480 if (target_char_cast (s2, &c))
11481 return NULL_TREE;
11482
11483 r = strchr (p1, c);
11484
11485 if (r == NULL)
11486 return build_int_cst (TREE_TYPE (s1), 0);
11487
11488 /* Return an offset into the constant string argument. */
11489 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11490 s1, size_int (r - p1));
11491 return fold_convert (type, tem);
11492 }
11493 return NULL_TREE;
11494 }
11495 }
11496
11497 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11498 the call, and TYPE is its return type.
11499
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree.
11502
11503 The simplified form may be a constant or other expression which
11504 computes the same value, but in a more efficient manner (including
11505 calls to other builtin functions).
11506
11507 The call may contain arguments which need to be evaluated, but
11508 which are not useful to determine the result of the call. In
11509 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11510 COMPOUND_EXPR will be an argument which must be evaluated.
11511 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11512 COMPOUND_EXPR in the chain will contain the tree for the simplified
11513 form of the builtin function call. */
11514
11515 static tree
11516 fold_builtin_strrchr (tree s1, tree s2, tree type)
11517 {
11518 if (!validate_arg (s1, POINTER_TYPE)
11519 || !validate_arg (s2, INTEGER_TYPE))
11520 return NULL_TREE;
11521 else
11522 {
11523 tree fn;
11524 const char *p1;
11525
11526 if (TREE_CODE (s2) != INTEGER_CST)
11527 return NULL_TREE;
11528
11529 p1 = c_getstr (s1);
11530 if (p1 != NULL)
11531 {
11532 char c;
11533 const char *r;
11534 tree tem;
11535
11536 if (target_char_cast (s2, &c))
11537 return NULL_TREE;
11538
11539 r = strrchr (p1, c);
11540
11541 if (r == NULL)
11542 return build_int_cst (TREE_TYPE (s1), 0);
11543
11544 /* Return an offset into the constant string argument. */
11545 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11546 s1, size_int (r - p1));
11547 return fold_convert (type, tem);
11548 }
11549
11550 if (! integer_zerop (s2))
11551 return NULL_TREE;
11552
11553 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11554 if (!fn)
11555 return NULL_TREE;
11556
11557 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11558 return build_call_expr (fn, 2, s1, s2);
11559 }
11560 }
11561
11562 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11563 to the call, and TYPE is its return type.
11564
11565 Return NULL_TREE if no simplification was possible, otherwise return the
11566 simplified form of the call as a tree.
11567
11568 The simplified form may be a constant or other expression which
11569 computes the same value, but in a more efficient manner (including
11570 calls to other builtin functions).
11571
11572 The call may contain arguments which need to be evaluated, but
11573 which are not useful to determine the result of the call. In
11574 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11575 COMPOUND_EXPR will be an argument which must be evaluated.
11576 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11577 COMPOUND_EXPR in the chain will contain the tree for the simplified
11578 form of the builtin function call. */
11579
11580 static tree
11581 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11582 {
11583 if (!validate_arg (s1, POINTER_TYPE)
11584 || !validate_arg (s2, POINTER_TYPE))
11585 return NULL_TREE;
11586 else
11587 {
11588 tree fn;
11589 const char *p1, *p2;
11590
11591 p2 = c_getstr (s2);
11592 if (p2 == NULL)
11593 return NULL_TREE;
11594
11595 p1 = c_getstr (s1);
11596 if (p1 != NULL)
11597 {
11598 const char *r = strpbrk (p1, p2);
11599 tree tem;
11600
11601 if (r == NULL)
11602 return build_int_cst (TREE_TYPE (s1), 0);
11603
11604 /* Return an offset into the constant string argument. */
11605 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11606 s1, size_int (r - p1));
11607 return fold_convert (type, tem);
11608 }
11609
11610 if (p2[0] == '\0')
11611 /* strpbrk(x, "") == NULL.
11612 Evaluate and ignore s1 in case it had side-effects. */
11613 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11614
11615 if (p2[1] != '\0')
11616 return NULL_TREE; /* Really call strpbrk. */
11617
11618 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11619 if (!fn)
11620 return NULL_TREE;
11621
11622 /* New argument list transforming strpbrk(s1, s2) to
11623 strchr(s1, s2[0]). */
11624 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11625 }
11626 }
11627
11628 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11629 to the call.
11630
11631 Return NULL_TREE if no simplification was possible, otherwise return the
11632 simplified form of the call as a tree.
11633
11634 The simplified form may be a constant or other expression which
11635 computes the same value, but in a more efficient manner (including
11636 calls to other builtin functions).
11637
11638 The call may contain arguments which need to be evaluated, but
11639 which are not useful to determine the result of the call. In
11640 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11641 COMPOUND_EXPR will be an argument which must be evaluated.
11642 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11643 COMPOUND_EXPR in the chain will contain the tree for the simplified
11644 form of the builtin function call. */
11645
11646 static tree
11647 fold_builtin_strcat (tree dst, tree src)
11648 {
11649 if (!validate_arg (dst, POINTER_TYPE)
11650 || !validate_arg (src, POINTER_TYPE))
11651 return NULL_TREE;
11652 else
11653 {
11654 const char *p = c_getstr (src);
11655
11656 /* If the string length is zero, return the dst parameter. */
11657 if (p && *p == '\0')
11658 return dst;
11659
11660 return NULL_TREE;
11661 }
11662 }
11663
11664 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11665 arguments to the call.
11666
11667 Return NULL_TREE if no simplification was possible, otherwise return the
11668 simplified form of the call as a tree.
11669
11670 The simplified form may be a constant or other expression which
11671 computes the same value, but in a more efficient manner (including
11672 calls to other builtin functions).
11673
11674 The call may contain arguments which need to be evaluated, but
11675 which are not useful to determine the result of the call. In
11676 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11677 COMPOUND_EXPR will be an argument which must be evaluated.
11678 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11679 COMPOUND_EXPR in the chain will contain the tree for the simplified
11680 form of the builtin function call. */
11681
11682 static tree
11683 fold_builtin_strncat (tree dst, tree src, tree len)
11684 {
11685 if (!validate_arg (dst, POINTER_TYPE)
11686 || !validate_arg (src, POINTER_TYPE)
11687 || !validate_arg (len, INTEGER_TYPE))
11688 return NULL_TREE;
11689 else
11690 {
11691 const char *p = c_getstr (src);
11692
11693 /* If the requested length is zero, or the src parameter string
11694 length is zero, return the dst parameter. */
11695 if (integer_zerop (len) || (p && *p == '\0'))
11696 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11697
11698 /* If the requested len is greater than or equal to the string
11699 length, call strcat. */
11700 if (TREE_CODE (len) == INTEGER_CST && p
11701 && compare_tree_int (len, strlen (p)) >= 0)
11702 {
11703 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11704
11705 /* If the replacement _DECL isn't initialized, don't do the
11706 transformation. */
11707 if (!fn)
11708 return NULL_TREE;
11709
11710 return build_call_expr (fn, 2, dst, src);
11711 }
11712 return NULL_TREE;
11713 }
11714 }
11715
11716 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11717 to the call.
11718
11719 Return NULL_TREE if no simplification was possible, otherwise return the
11720 simplified form of the call as a tree.
11721
11722 The simplified form may be a constant or other expression which
11723 computes the same value, but in a more efficient manner (including
11724 calls to other builtin functions).
11725
11726 The call may contain arguments which need to be evaluated, but
11727 which are not useful to determine the result of the call. In
11728 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11729 COMPOUND_EXPR will be an argument which must be evaluated.
11730 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11731 COMPOUND_EXPR in the chain will contain the tree for the simplified
11732 form of the builtin function call. */
11733
11734 static tree
11735 fold_builtin_strspn (tree s1, tree s2)
11736 {
11737 if (!validate_arg (s1, POINTER_TYPE)
11738 || !validate_arg (s2, POINTER_TYPE))
11739 return NULL_TREE;
11740 else
11741 {
11742 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11743
11744 /* If both arguments are constants, evaluate at compile-time. */
11745 if (p1 && p2)
11746 {
11747 const size_t r = strspn (p1, p2);
11748 return size_int (r);
11749 }
11750
11751 /* If either argument is "", return NULL_TREE. */
11752 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11753 /* Evaluate and ignore both arguments in case either one has
11754 side-effects. */
11755 return omit_two_operands (size_type_node, size_zero_node,
11756 s1, s2);
11757 return NULL_TREE;
11758 }
11759 }
11760
11761 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11762 to the call.
11763
11764 Return NULL_TREE if no simplification was possible, otherwise return the
11765 simplified form of the call as a tree.
11766
11767 The simplified form may be a constant or other expression which
11768 computes the same value, but in a more efficient manner (including
11769 calls to other builtin functions).
11770
11771 The call may contain arguments which need to be evaluated, but
11772 which are not useful to determine the result of the call. In
11773 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11774 COMPOUND_EXPR will be an argument which must be evaluated.
11775 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11776 COMPOUND_EXPR in the chain will contain the tree for the simplified
11777 form of the builtin function call. */
11778
11779 static tree
11780 fold_builtin_strcspn (tree s1, tree s2)
11781 {
11782 if (!validate_arg (s1, POINTER_TYPE)
11783 || !validate_arg (s2, POINTER_TYPE))
11784 return NULL_TREE;
11785 else
11786 {
11787 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11788
11789 /* If both arguments are constants, evaluate at compile-time. */
11790 if (p1 && p2)
11791 {
11792 const size_t r = strcspn (p1, p2);
11793 return size_int (r);
11794 }
11795
11796 /* If the first argument is "", return NULL_TREE. */
11797 if (p1 && *p1 == '\0')
11798 {
11799 /* Evaluate and ignore argument s2 in case it has
11800 side-effects. */
11801 return omit_one_operand (size_type_node,
11802 size_zero_node, s2);
11803 }
11804
11805 /* If the second argument is "", return __builtin_strlen(s1). */
11806 if (p2 && *p2 == '\0')
11807 {
11808 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11809
11810 /* If the replacement _DECL isn't initialized, don't do the
11811 transformation. */
11812 if (!fn)
11813 return NULL_TREE;
11814
11815 return build_call_expr (fn, 1, s1);
11816 }
11817 return NULL_TREE;
11818 }
11819 }
11820
11821 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11822 to the call. IGNORE is true if the value returned
11823 by the builtin will be ignored. UNLOCKED is true is true if this
11824 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11825 the known length of the string. Return NULL_TREE if no simplification
11826 was possible. */
11827
11828 tree
11829 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11830 {
11831 /* If we're using an unlocked function, assume the other unlocked
11832 functions exist explicitly. */
11833 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11834 : implicit_built_in_decls[BUILT_IN_FPUTC];
11835 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11836 : implicit_built_in_decls[BUILT_IN_FWRITE];
11837
11838 /* If the return value is used, don't do the transformation. */
11839 if (!ignore)
11840 return NULL_TREE;
11841
11842 /* Verify the arguments in the original call. */
11843 if (!validate_arg (arg0, POINTER_TYPE)
11844 || !validate_arg (arg1, POINTER_TYPE))
11845 return NULL_TREE;
11846
11847 if (! len)
11848 len = c_strlen (arg0, 0);
11849
11850 /* Get the length of the string passed to fputs. If the length
11851 can't be determined, punt. */
11852 if (!len
11853 || TREE_CODE (len) != INTEGER_CST)
11854 return NULL_TREE;
11855
11856 switch (compare_tree_int (len, 1))
11857 {
11858 case -1: /* length is 0, delete the call entirely . */
11859 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11860
11861 case 0: /* length is 1, call fputc. */
11862 {
11863 const char *p = c_getstr (arg0);
11864
11865 if (p != NULL)
11866 {
11867 if (fn_fputc)
11868 return build_call_expr (fn_fputc, 2,
11869 build_int_cst (NULL_TREE, p[0]), arg1);
11870 else
11871 return NULL_TREE;
11872 }
11873 }
11874 /* FALLTHROUGH */
11875 case 1: /* length is greater than 1, call fwrite. */
11876 {
11877 /* If optimizing for size keep fputs. */
11878 if (optimize_function_for_size_p (cfun))
11879 return NULL_TREE;
11880 /* New argument list transforming fputs(string, stream) to
11881 fwrite(string, 1, len, stream). */
11882 if (fn_fwrite)
11883 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11884 else
11885 return NULL_TREE;
11886 }
11887 default:
11888 gcc_unreachable ();
11889 }
11890 return NULL_TREE;
11891 }
11892
11893 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11894 produced. False otherwise. This is done so that we don't output the error
11895 or warning twice or three times. */
11896
11897 bool
11898 fold_builtin_next_arg (tree exp, bool va_start_p)
11899 {
11900 tree fntype = TREE_TYPE (current_function_decl);
11901 int nargs = call_expr_nargs (exp);
11902 tree arg;
11903
11904 if (TYPE_ARG_TYPES (fntype) == 0
11905 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11906 == void_type_node))
11907 {
11908 error ("%<va_start%> used in function with fixed args");
11909 return true;
11910 }
11911
11912 if (va_start_p)
11913 {
11914 if (va_start_p && (nargs != 2))
11915 {
11916 error ("wrong number of arguments to function %<va_start%>");
11917 return true;
11918 }
11919 arg = CALL_EXPR_ARG (exp, 1);
11920 }
11921 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11922 when we checked the arguments and if needed issued a warning. */
11923 else
11924 {
11925 if (nargs == 0)
11926 {
11927 /* Evidently an out of date version of <stdarg.h>; can't validate
11928 va_start's second argument, but can still work as intended. */
11929 warning (0, "%<__builtin_next_arg%> called without an argument");
11930 return true;
11931 }
11932 else if (nargs > 1)
11933 {
11934 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11935 return true;
11936 }
11937 arg = CALL_EXPR_ARG (exp, 0);
11938 }
11939
11940 if (TREE_CODE (arg) == SSA_NAME)
11941 arg = SSA_NAME_VAR (arg);
11942
11943 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11944 or __builtin_next_arg (0) the first time we see it, after checking
11945 the arguments and if needed issuing a warning. */
11946 if (!integer_zerop (arg))
11947 {
11948 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11949
11950 /* Strip off all nops for the sake of the comparison. This
11951 is not quite the same as STRIP_NOPS. It does more.
11952 We must also strip off INDIRECT_EXPR for C++ reference
11953 parameters. */
11954 while (CONVERT_EXPR_P (arg)
11955 || TREE_CODE (arg) == INDIRECT_REF)
11956 arg = TREE_OPERAND (arg, 0);
11957 if (arg != last_parm)
11958 {
11959 /* FIXME: Sometimes with the tree optimizers we can get the
11960 not the last argument even though the user used the last
11961 argument. We just warn and set the arg to be the last
11962 argument so that we will get wrong-code because of
11963 it. */
11964 warning (0, "second parameter of %<va_start%> not last named argument");
11965 }
11966
11967 /* Undefined by C99 7.15.1.4p4 (va_start):
11968 "If the parameter parmN is declared with the register storage
11969 class, with a function or array type, or with a type that is
11970 not compatible with the type that results after application of
11971 the default argument promotions, the behavior is undefined."
11972 */
11973 else if (DECL_REGISTER (arg))
11974 warning (0, "undefined behaviour when second parameter of "
11975 "%<va_start%> is declared with %<register%> storage");
11976
11977 /* We want to verify the second parameter just once before the tree
11978 optimizers are run and then avoid keeping it in the tree,
11979 as otherwise we could warn even for correct code like:
11980 void foo (int i, ...)
11981 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11982 if (va_start_p)
11983 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11984 else
11985 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11986 }
11987 return false;
11988 }
11989
11990
11991 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11992 ORIG may be null if this is a 2-argument call. We don't attempt to
11993 simplify calls with more than 3 arguments.
11994
11995 Return NULL_TREE if no simplification was possible, otherwise return the
11996 simplified form of the call as a tree. If IGNORED is true, it means that
11997 the caller does not use the returned value of the function. */
11998
11999 static tree
12000 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
12001 {
12002 tree call, retval;
12003 const char *fmt_str = NULL;
12004
12005 /* Verify the required arguments in the original call. We deal with two
12006 types of sprintf() calls: 'sprintf (str, fmt)' and
12007 'sprintf (dest, "%s", orig)'. */
12008 if (!validate_arg (dest, POINTER_TYPE)
12009 || !validate_arg (fmt, POINTER_TYPE))
12010 return NULL_TREE;
12011 if (orig && !validate_arg (orig, POINTER_TYPE))
12012 return NULL_TREE;
12013
12014 /* Check whether the format is a literal string constant. */
12015 fmt_str = c_getstr (fmt);
12016 if (fmt_str == NULL)
12017 return NULL_TREE;
12018
12019 call = NULL_TREE;
12020 retval = NULL_TREE;
12021
12022 if (!init_target_chars ())
12023 return NULL_TREE;
12024
12025 /* If the format doesn't contain % args or %%, use strcpy. */
12026 if (strchr (fmt_str, target_percent) == NULL)
12027 {
12028 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12029
12030 if (!fn)
12031 return NULL_TREE;
12032
12033 /* Don't optimize sprintf (buf, "abc", ptr++). */
12034 if (orig)
12035 return NULL_TREE;
12036
12037 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12038 'format' is known to contain no % formats. */
12039 call = build_call_expr (fn, 2, dest, fmt);
12040 if (!ignored)
12041 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12042 }
12043
12044 /* If the format is "%s", use strcpy if the result isn't used. */
12045 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12046 {
12047 tree fn;
12048 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12049
12050 if (!fn)
12051 return NULL_TREE;
12052
12053 /* Don't crash on sprintf (str1, "%s"). */
12054 if (!orig)
12055 return NULL_TREE;
12056
12057 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12058 if (!ignored)
12059 {
12060 retval = c_strlen (orig, 1);
12061 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12062 return NULL_TREE;
12063 }
12064 call = build_call_expr (fn, 2, dest, orig);
12065 }
12066
12067 if (call && retval)
12068 {
12069 retval = fold_convert
12070 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12071 retval);
12072 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12073 }
12074 else
12075 return call;
12076 }
12077
12078 /* Expand a call EXP to __builtin_object_size. */
12079
12080 rtx
12081 expand_builtin_object_size (tree exp)
12082 {
12083 tree ost;
12084 int object_size_type;
12085 tree fndecl = get_callee_fndecl (exp);
12086
12087 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12088 {
12089 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12090 exp, fndecl);
12091 expand_builtin_trap ();
12092 return const0_rtx;
12093 }
12094
12095 ost = CALL_EXPR_ARG (exp, 1);
12096 STRIP_NOPS (ost);
12097
12098 if (TREE_CODE (ost) != INTEGER_CST
12099 || tree_int_cst_sgn (ost) < 0
12100 || compare_tree_int (ost, 3) > 0)
12101 {
12102 error ("%Klast argument of %D is not integer constant between 0 and 3",
12103 exp, fndecl);
12104 expand_builtin_trap ();
12105 return const0_rtx;
12106 }
12107
12108 object_size_type = tree_low_cst (ost, 0);
12109
12110 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12111 }
12112
12113 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12114 FCODE is the BUILT_IN_* to use.
12115 Return NULL_RTX if we failed; the caller should emit a normal call,
12116 otherwise try to get the result in TARGET, if convenient (and in
12117 mode MODE if that's convenient). */
12118
12119 static rtx
12120 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12121 enum built_in_function fcode)
12122 {
12123 tree dest, src, len, size;
12124
12125 if (!validate_arglist (exp,
12126 POINTER_TYPE,
12127 fcode == BUILT_IN_MEMSET_CHK
12128 ? INTEGER_TYPE : POINTER_TYPE,
12129 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12130 return NULL_RTX;
12131
12132 dest = CALL_EXPR_ARG (exp, 0);
12133 src = CALL_EXPR_ARG (exp, 1);
12134 len = CALL_EXPR_ARG (exp, 2);
12135 size = CALL_EXPR_ARG (exp, 3);
12136
12137 if (! host_integerp (size, 1))
12138 return NULL_RTX;
12139
12140 if (host_integerp (len, 1) || integer_all_onesp (size))
12141 {
12142 tree fn;
12143
12144 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12145 {
12146 warning_at (tree_nonartificial_location (exp),
12147 0, "%Kcall to %D will always overflow destination buffer",
12148 exp, get_callee_fndecl (exp));
12149 return NULL_RTX;
12150 }
12151
12152 fn = NULL_TREE;
12153 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12154 mem{cpy,pcpy,move,set} is available. */
12155 switch (fcode)
12156 {
12157 case BUILT_IN_MEMCPY_CHK:
12158 fn = built_in_decls[BUILT_IN_MEMCPY];
12159 break;
12160 case BUILT_IN_MEMPCPY_CHK:
12161 fn = built_in_decls[BUILT_IN_MEMPCPY];
12162 break;
12163 case BUILT_IN_MEMMOVE_CHK:
12164 fn = built_in_decls[BUILT_IN_MEMMOVE];
12165 break;
12166 case BUILT_IN_MEMSET_CHK:
12167 fn = built_in_decls[BUILT_IN_MEMSET];
12168 break;
12169 default:
12170 break;
12171 }
12172
12173 if (! fn)
12174 return NULL_RTX;
12175
12176 fn = build_call_expr (fn, 3, dest, src, len);
12177 STRIP_TYPE_NOPS (fn);
12178 while (TREE_CODE (fn) == COMPOUND_EXPR)
12179 {
12180 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12181 EXPAND_NORMAL);
12182 fn = TREE_OPERAND (fn, 1);
12183 }
12184 if (TREE_CODE (fn) == CALL_EXPR)
12185 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12186 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12187 }
12188 else if (fcode == BUILT_IN_MEMSET_CHK)
12189 return NULL_RTX;
12190 else
12191 {
12192 unsigned int dest_align
12193 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12194
12195 /* If DEST is not a pointer type, call the normal function. */
12196 if (dest_align == 0)
12197 return NULL_RTX;
12198
12199 /* If SRC and DEST are the same (and not volatile), do nothing. */
12200 if (operand_equal_p (src, dest, 0))
12201 {
12202 tree expr;
12203
12204 if (fcode != BUILT_IN_MEMPCPY_CHK)
12205 {
12206 /* Evaluate and ignore LEN in case it has side-effects. */
12207 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12208 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12209 }
12210
12211 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12212 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12213 }
12214
12215 /* __memmove_chk special case. */
12216 if (fcode == BUILT_IN_MEMMOVE_CHK)
12217 {
12218 unsigned int src_align
12219 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12220
12221 if (src_align == 0)
12222 return NULL_RTX;
12223
12224 /* If src is categorized for a readonly section we can use
12225 normal __memcpy_chk. */
12226 if (readonly_data_expr (src))
12227 {
12228 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12229 if (!fn)
12230 return NULL_RTX;
12231 fn = build_call_expr (fn, 4, dest, src, len, size);
12232 STRIP_TYPE_NOPS (fn);
12233 while (TREE_CODE (fn) == COMPOUND_EXPR)
12234 {
12235 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12236 EXPAND_NORMAL);
12237 fn = TREE_OPERAND (fn, 1);
12238 }
12239 if (TREE_CODE (fn) == CALL_EXPR)
12240 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12241 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12242 }
12243 }
12244 return NULL_RTX;
12245 }
12246 }
12247
12248 /* Emit warning if a buffer overflow is detected at compile time. */
12249
12250 static void
12251 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12252 {
12253 int is_strlen = 0;
12254 tree len, size;
12255 location_t loc = tree_nonartificial_location (exp);
12256
12257 switch (fcode)
12258 {
12259 case BUILT_IN_STRCPY_CHK:
12260 case BUILT_IN_STPCPY_CHK:
12261 /* For __strcat_chk the warning will be emitted only if overflowing
12262 by at least strlen (dest) + 1 bytes. */
12263 case BUILT_IN_STRCAT_CHK:
12264 len = CALL_EXPR_ARG (exp, 1);
12265 size = CALL_EXPR_ARG (exp, 2);
12266 is_strlen = 1;
12267 break;
12268 case BUILT_IN_STRNCAT_CHK:
12269 case BUILT_IN_STRNCPY_CHK:
12270 len = CALL_EXPR_ARG (exp, 2);
12271 size = CALL_EXPR_ARG (exp, 3);
12272 break;
12273 case BUILT_IN_SNPRINTF_CHK:
12274 case BUILT_IN_VSNPRINTF_CHK:
12275 len = CALL_EXPR_ARG (exp, 1);
12276 size = CALL_EXPR_ARG (exp, 3);
12277 break;
12278 default:
12279 gcc_unreachable ();
12280 }
12281
12282 if (!len || !size)
12283 return;
12284
12285 if (! host_integerp (size, 1) || integer_all_onesp (size))
12286 return;
12287
12288 if (is_strlen)
12289 {
12290 len = c_strlen (len, 1);
12291 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12292 return;
12293 }
12294 else if (fcode == BUILT_IN_STRNCAT_CHK)
12295 {
12296 tree src = CALL_EXPR_ARG (exp, 1);
12297 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12298 return;
12299 src = c_strlen (src, 1);
12300 if (! src || ! host_integerp (src, 1))
12301 {
12302 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12303 exp, get_callee_fndecl (exp));
12304 return;
12305 }
12306 else if (tree_int_cst_lt (src, size))
12307 return;
12308 }
12309 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12310 return;
12311
12312 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12313 exp, get_callee_fndecl (exp));
12314 }
12315
12316 /* Emit warning if a buffer overflow is detected at compile time
12317 in __sprintf_chk/__vsprintf_chk calls. */
12318
12319 static void
12320 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12321 {
12322 tree dest, size, len, fmt, flag;
12323 const char *fmt_str;
12324 int nargs = call_expr_nargs (exp);
12325
12326 /* Verify the required arguments in the original call. */
12327
12328 if (nargs < 4)
12329 return;
12330 dest = CALL_EXPR_ARG (exp, 0);
12331 flag = CALL_EXPR_ARG (exp, 1);
12332 size = CALL_EXPR_ARG (exp, 2);
12333 fmt = CALL_EXPR_ARG (exp, 3);
12334
12335 if (! host_integerp (size, 1) || integer_all_onesp (size))
12336 return;
12337
12338 /* Check whether the format is a literal string constant. */
12339 fmt_str = c_getstr (fmt);
12340 if (fmt_str == NULL)
12341 return;
12342
12343 if (!init_target_chars ())
12344 return;
12345
12346 /* If the format doesn't contain % args or %%, we know its size. */
12347 if (strchr (fmt_str, target_percent) == 0)
12348 len = build_int_cstu (size_type_node, strlen (fmt_str));
12349 /* If the format is "%s" and first ... argument is a string literal,
12350 we know it too. */
12351 else if (fcode == BUILT_IN_SPRINTF_CHK
12352 && strcmp (fmt_str, target_percent_s) == 0)
12353 {
12354 tree arg;
12355
12356 if (nargs < 5)
12357 return;
12358 arg = CALL_EXPR_ARG (exp, 4);
12359 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12360 return;
12361
12362 len = c_strlen (arg, 1);
12363 if (!len || ! host_integerp (len, 1))
12364 return;
12365 }
12366 else
12367 return;
12368
12369 if (! tree_int_cst_lt (len, size))
12370 warning_at (tree_nonartificial_location (exp),
12371 0, "%Kcall to %D will always overflow destination buffer",
12372 exp, get_callee_fndecl (exp));
12373 }
12374
12375 /* Emit warning if a free is called with address of a variable. */
12376
12377 static void
12378 maybe_emit_free_warning (tree exp)
12379 {
12380 tree arg = CALL_EXPR_ARG (exp, 0);
12381
12382 STRIP_NOPS (arg);
12383 if (TREE_CODE (arg) != ADDR_EXPR)
12384 return;
12385
12386 arg = get_base_address (TREE_OPERAND (arg, 0));
12387 if (arg == NULL || INDIRECT_REF_P (arg))
12388 return;
12389
12390 if (SSA_VAR_P (arg))
12391 warning_at (tree_nonartificial_location (exp),
12392 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12393 else
12394 warning_at (tree_nonartificial_location (exp),
12395 0, "%Kattempt to free a non-heap object", exp);
12396 }
12397
12398 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12399 if possible. */
12400
12401 tree
12402 fold_builtin_object_size (tree ptr, tree ost)
12403 {
12404 tree ret = NULL_TREE;
12405 int object_size_type;
12406
12407 if (!validate_arg (ptr, POINTER_TYPE)
12408 || !validate_arg (ost, INTEGER_TYPE))
12409 return NULL_TREE;
12410
12411 STRIP_NOPS (ost);
12412
12413 if (TREE_CODE (ost) != INTEGER_CST
12414 || tree_int_cst_sgn (ost) < 0
12415 || compare_tree_int (ost, 3) > 0)
12416 return NULL_TREE;
12417
12418 object_size_type = tree_low_cst (ost, 0);
12419
12420 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12421 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12422 and (size_t) 0 for types 2 and 3. */
12423 if (TREE_SIDE_EFFECTS (ptr))
12424 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12425
12426 if (TREE_CODE (ptr) == ADDR_EXPR)
12427 ret = build_int_cstu (size_type_node,
12428 compute_builtin_object_size (ptr, object_size_type));
12429
12430 else if (TREE_CODE (ptr) == SSA_NAME)
12431 {
12432 unsigned HOST_WIDE_INT bytes;
12433
12434 /* If object size is not known yet, delay folding until
12435 later. Maybe subsequent passes will help determining
12436 it. */
12437 bytes = compute_builtin_object_size (ptr, object_size_type);
12438 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12439 ? -1 : 0))
12440 ret = build_int_cstu (size_type_node, bytes);
12441 }
12442
12443 if (ret)
12444 {
12445 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12446 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12447 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12448 ret = NULL_TREE;
12449 }
12450
12451 return ret;
12452 }
12453
12454 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12455 DEST, SRC, LEN, and SIZE are the arguments to the call.
12456 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12457 code of the builtin. If MAXLEN is not NULL, it is maximum length
12458 passed as third argument. */
12459
12460 tree
12461 fold_builtin_memory_chk (tree fndecl,
12462 tree dest, tree src, tree len, tree size,
12463 tree maxlen, bool ignore,
12464 enum built_in_function fcode)
12465 {
12466 tree fn;
12467
12468 if (!validate_arg (dest, POINTER_TYPE)
12469 || !validate_arg (src,
12470 (fcode == BUILT_IN_MEMSET_CHK
12471 ? INTEGER_TYPE : POINTER_TYPE))
12472 || !validate_arg (len, INTEGER_TYPE)
12473 || !validate_arg (size, INTEGER_TYPE))
12474 return NULL_TREE;
12475
12476 /* If SRC and DEST are the same (and not volatile), return DEST
12477 (resp. DEST+LEN for __mempcpy_chk). */
12478 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12479 {
12480 if (fcode != BUILT_IN_MEMPCPY_CHK)
12481 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12482 else
12483 {
12484 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12485 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12486 }
12487 }
12488
12489 if (! host_integerp (size, 1))
12490 return NULL_TREE;
12491
12492 if (! integer_all_onesp (size))
12493 {
12494 if (! host_integerp (len, 1))
12495 {
12496 /* If LEN is not constant, try MAXLEN too.
12497 For MAXLEN only allow optimizing into non-_ocs function
12498 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12499 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12500 {
12501 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12502 {
12503 /* (void) __mempcpy_chk () can be optimized into
12504 (void) __memcpy_chk (). */
12505 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12506 if (!fn)
12507 return NULL_TREE;
12508
12509 return build_call_expr (fn, 4, dest, src, len, size);
12510 }
12511 return NULL_TREE;
12512 }
12513 }
12514 else
12515 maxlen = len;
12516
12517 if (tree_int_cst_lt (size, maxlen))
12518 return NULL_TREE;
12519 }
12520
12521 fn = NULL_TREE;
12522 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12523 mem{cpy,pcpy,move,set} is available. */
12524 switch (fcode)
12525 {
12526 case BUILT_IN_MEMCPY_CHK:
12527 fn = built_in_decls[BUILT_IN_MEMCPY];
12528 break;
12529 case BUILT_IN_MEMPCPY_CHK:
12530 fn = built_in_decls[BUILT_IN_MEMPCPY];
12531 break;
12532 case BUILT_IN_MEMMOVE_CHK:
12533 fn = built_in_decls[BUILT_IN_MEMMOVE];
12534 break;
12535 case BUILT_IN_MEMSET_CHK:
12536 fn = built_in_decls[BUILT_IN_MEMSET];
12537 break;
12538 default:
12539 break;
12540 }
12541
12542 if (!fn)
12543 return NULL_TREE;
12544
12545 return build_call_expr (fn, 3, dest, src, len);
12546 }
12547
12548 /* Fold a call to the __st[rp]cpy_chk builtin.
12549 DEST, SRC, and SIZE are the arguments to the call.
12550 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12551 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12552 strings passed as second argument. */
12553
12554 tree
12555 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12556 tree maxlen, bool ignore,
12557 enum built_in_function fcode)
12558 {
12559 tree len, fn;
12560
12561 if (!validate_arg (dest, POINTER_TYPE)
12562 || !validate_arg (src, POINTER_TYPE)
12563 || !validate_arg (size, INTEGER_TYPE))
12564 return NULL_TREE;
12565
12566 /* If SRC and DEST are the same (and not volatile), return DEST. */
12567 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12568 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12569
12570 if (! host_integerp (size, 1))
12571 return NULL_TREE;
12572
12573 if (! integer_all_onesp (size))
12574 {
12575 len = c_strlen (src, 1);
12576 if (! len || ! host_integerp (len, 1))
12577 {
12578 /* If LEN is not constant, try MAXLEN too.
12579 For MAXLEN only allow optimizing into non-_ocs function
12580 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12581 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12582 {
12583 if (fcode == BUILT_IN_STPCPY_CHK)
12584 {
12585 if (! ignore)
12586 return NULL_TREE;
12587
12588 /* If return value of __stpcpy_chk is ignored,
12589 optimize into __strcpy_chk. */
12590 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12591 if (!fn)
12592 return NULL_TREE;
12593
12594 return build_call_expr (fn, 3, dest, src, size);
12595 }
12596
12597 if (! len || TREE_SIDE_EFFECTS (len))
12598 return NULL_TREE;
12599
12600 /* If c_strlen returned something, but not a constant,
12601 transform __strcpy_chk into __memcpy_chk. */
12602 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12603 if (!fn)
12604 return NULL_TREE;
12605
12606 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12607 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12608 build_call_expr (fn, 4,
12609 dest, src, len, size));
12610 }
12611 }
12612 else
12613 maxlen = len;
12614
12615 if (! tree_int_cst_lt (maxlen, size))
12616 return NULL_TREE;
12617 }
12618
12619 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12620 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12621 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12622 if (!fn)
12623 return NULL_TREE;
12624
12625 return build_call_expr (fn, 2, dest, src);
12626 }
12627
12628 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12629 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12630 length passed as third argument. */
12631
12632 tree
12633 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12634 tree maxlen)
12635 {
12636 tree fn;
12637
12638 if (!validate_arg (dest, POINTER_TYPE)
12639 || !validate_arg (src, POINTER_TYPE)
12640 || !validate_arg (len, INTEGER_TYPE)
12641 || !validate_arg (size, INTEGER_TYPE))
12642 return NULL_TREE;
12643
12644 if (! host_integerp (size, 1))
12645 return NULL_TREE;
12646
12647 if (! integer_all_onesp (size))
12648 {
12649 if (! host_integerp (len, 1))
12650 {
12651 /* If LEN is not constant, try MAXLEN too.
12652 For MAXLEN only allow optimizing into non-_ocs function
12653 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12654 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12655 return NULL_TREE;
12656 }
12657 else
12658 maxlen = len;
12659
12660 if (tree_int_cst_lt (size, maxlen))
12661 return NULL_TREE;
12662 }
12663
12664 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12665 fn = built_in_decls[BUILT_IN_STRNCPY];
12666 if (!fn)
12667 return NULL_TREE;
12668
12669 return build_call_expr (fn, 3, dest, src, len);
12670 }
12671
12672 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12673 are the arguments to the call. */
12674
12675 static tree
12676 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12677 {
12678 tree fn;
12679 const char *p;
12680
12681 if (!validate_arg (dest, POINTER_TYPE)
12682 || !validate_arg (src, POINTER_TYPE)
12683 || !validate_arg (size, INTEGER_TYPE))
12684 return NULL_TREE;
12685
12686 p = c_getstr (src);
12687 /* If the SRC parameter is "", return DEST. */
12688 if (p && *p == '\0')
12689 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12690
12691 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12692 return NULL_TREE;
12693
12694 /* If __builtin_strcat_chk is used, assume strcat is available. */
12695 fn = built_in_decls[BUILT_IN_STRCAT];
12696 if (!fn)
12697 return NULL_TREE;
12698
12699 return build_call_expr (fn, 2, dest, src);
12700 }
12701
12702 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12703 LEN, and SIZE. */
12704
12705 static tree
12706 fold_builtin_strncat_chk (tree fndecl,
12707 tree dest, tree src, tree len, tree size)
12708 {
12709 tree fn;
12710 const char *p;
12711
12712 if (!validate_arg (dest, POINTER_TYPE)
12713 || !validate_arg (src, POINTER_TYPE)
12714 || !validate_arg (size, INTEGER_TYPE)
12715 || !validate_arg (size, INTEGER_TYPE))
12716 return NULL_TREE;
12717
12718 p = c_getstr (src);
12719 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12720 if (p && *p == '\0')
12721 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12722 else if (integer_zerop (len))
12723 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12724
12725 if (! host_integerp (size, 1))
12726 return NULL_TREE;
12727
12728 if (! integer_all_onesp (size))
12729 {
12730 tree src_len = c_strlen (src, 1);
12731 if (src_len
12732 && host_integerp (src_len, 1)
12733 && host_integerp (len, 1)
12734 && ! tree_int_cst_lt (len, src_len))
12735 {
12736 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12737 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12738 if (!fn)
12739 return NULL_TREE;
12740
12741 return build_call_expr (fn, 3, dest, src, size);
12742 }
12743 return NULL_TREE;
12744 }
12745
12746 /* If __builtin_strncat_chk is used, assume strncat is available. */
12747 fn = built_in_decls[BUILT_IN_STRNCAT];
12748 if (!fn)
12749 return NULL_TREE;
12750
12751 return build_call_expr (fn, 3, dest, src, len);
12752 }
12753
12754 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12755 a normal call should be emitted rather than expanding the function
12756 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12757
12758 static tree
12759 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12760 {
12761 tree dest, size, len, fn, fmt, flag;
12762 const char *fmt_str;
12763 int nargs = call_expr_nargs (exp);
12764
12765 /* Verify the required arguments in the original call. */
12766 if (nargs < 4)
12767 return NULL_TREE;
12768 dest = CALL_EXPR_ARG (exp, 0);
12769 if (!validate_arg (dest, POINTER_TYPE))
12770 return NULL_TREE;
12771 flag = CALL_EXPR_ARG (exp, 1);
12772 if (!validate_arg (flag, INTEGER_TYPE))
12773 return NULL_TREE;
12774 size = CALL_EXPR_ARG (exp, 2);
12775 if (!validate_arg (size, INTEGER_TYPE))
12776 return NULL_TREE;
12777 fmt = CALL_EXPR_ARG (exp, 3);
12778 if (!validate_arg (fmt, POINTER_TYPE))
12779 return NULL_TREE;
12780
12781 if (! host_integerp (size, 1))
12782 return NULL_TREE;
12783
12784 len = NULL_TREE;
12785
12786 if (!init_target_chars ())
12787 return NULL_TREE;
12788
12789 /* Check whether the format is a literal string constant. */
12790 fmt_str = c_getstr (fmt);
12791 if (fmt_str != NULL)
12792 {
12793 /* If the format doesn't contain % args or %%, we know the size. */
12794 if (strchr (fmt_str, target_percent) == 0)
12795 {
12796 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12797 len = build_int_cstu (size_type_node, strlen (fmt_str));
12798 }
12799 /* If the format is "%s" and first ... argument is a string literal,
12800 we know the size too. */
12801 else if (fcode == BUILT_IN_SPRINTF_CHK
12802 && strcmp (fmt_str, target_percent_s) == 0)
12803 {
12804 tree arg;
12805
12806 if (nargs == 5)
12807 {
12808 arg = CALL_EXPR_ARG (exp, 4);
12809 if (validate_arg (arg, POINTER_TYPE))
12810 {
12811 len = c_strlen (arg, 1);
12812 if (! len || ! host_integerp (len, 1))
12813 len = NULL_TREE;
12814 }
12815 }
12816 }
12817 }
12818
12819 if (! integer_all_onesp (size))
12820 {
12821 if (! len || ! tree_int_cst_lt (len, size))
12822 return NULL_TREE;
12823 }
12824
12825 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12826 or if format doesn't contain % chars or is "%s". */
12827 if (! integer_zerop (flag))
12828 {
12829 if (fmt_str == NULL)
12830 return NULL_TREE;
12831 if (strchr (fmt_str, target_percent) != NULL
12832 && strcmp (fmt_str, target_percent_s))
12833 return NULL_TREE;
12834 }
12835
12836 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12837 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12838 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12839 if (!fn)
12840 return NULL_TREE;
12841
12842 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12843 }
12844
12845 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12846 a normal call should be emitted rather than expanding the function
12847 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12848 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12849 passed as second argument. */
12850
12851 tree
12852 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12853 enum built_in_function fcode)
12854 {
12855 tree dest, size, len, fn, fmt, flag;
12856 const char *fmt_str;
12857
12858 /* Verify the required arguments in the original call. */
12859 if (call_expr_nargs (exp) < 5)
12860 return NULL_TREE;
12861 dest = CALL_EXPR_ARG (exp, 0);
12862 if (!validate_arg (dest, POINTER_TYPE))
12863 return NULL_TREE;
12864 len = CALL_EXPR_ARG (exp, 1);
12865 if (!validate_arg (len, INTEGER_TYPE))
12866 return NULL_TREE;
12867 flag = CALL_EXPR_ARG (exp, 2);
12868 if (!validate_arg (flag, INTEGER_TYPE))
12869 return NULL_TREE;
12870 size = CALL_EXPR_ARG (exp, 3);
12871 if (!validate_arg (size, INTEGER_TYPE))
12872 return NULL_TREE;
12873 fmt = CALL_EXPR_ARG (exp, 4);
12874 if (!validate_arg (fmt, POINTER_TYPE))
12875 return NULL_TREE;
12876
12877 if (! host_integerp (size, 1))
12878 return NULL_TREE;
12879
12880 if (! integer_all_onesp (size))
12881 {
12882 if (! host_integerp (len, 1))
12883 {
12884 /* If LEN is not constant, try MAXLEN too.
12885 For MAXLEN only allow optimizing into non-_ocs function
12886 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12887 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12888 return NULL_TREE;
12889 }
12890 else
12891 maxlen = len;
12892
12893 if (tree_int_cst_lt (size, maxlen))
12894 return NULL_TREE;
12895 }
12896
12897 if (!init_target_chars ())
12898 return NULL_TREE;
12899
12900 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12901 or if format doesn't contain % chars or is "%s". */
12902 if (! integer_zerop (flag))
12903 {
12904 fmt_str = c_getstr (fmt);
12905 if (fmt_str == NULL)
12906 return NULL_TREE;
12907 if (strchr (fmt_str, target_percent) != NULL
12908 && strcmp (fmt_str, target_percent_s))
12909 return NULL_TREE;
12910 }
12911
12912 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12913 available. */
12914 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12915 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12916 if (!fn)
12917 return NULL_TREE;
12918
12919 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12920 }
12921
12922 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12923 FMT and ARG are the arguments to the call; we don't fold cases with
12924 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12925
12926 Return NULL_TREE if no simplification was possible, otherwise return the
12927 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12928 code of the function to be simplified. */
12929
12930 static tree
12931 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12932 enum built_in_function fcode)
12933 {
12934 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12935 const char *fmt_str = NULL;
12936
12937 /* If the return value is used, don't do the transformation. */
12938 if (! ignore)
12939 return NULL_TREE;
12940
12941 /* Verify the required arguments in the original call. */
12942 if (!validate_arg (fmt, POINTER_TYPE))
12943 return NULL_TREE;
12944
12945 /* Check whether the format is a literal string constant. */
12946 fmt_str = c_getstr (fmt);
12947 if (fmt_str == NULL)
12948 return NULL_TREE;
12949
12950 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12951 {
12952 /* If we're using an unlocked function, assume the other
12953 unlocked functions exist explicitly. */
12954 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12955 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12956 }
12957 else
12958 {
12959 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12960 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12961 }
12962
12963 if (!init_target_chars ())
12964 return NULL_TREE;
12965
12966 if (strcmp (fmt_str, target_percent_s) == 0
12967 || strchr (fmt_str, target_percent) == NULL)
12968 {
12969 const char *str;
12970
12971 if (strcmp (fmt_str, target_percent_s) == 0)
12972 {
12973 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12974 return NULL_TREE;
12975
12976 if (!arg || !validate_arg (arg, POINTER_TYPE))
12977 return NULL_TREE;
12978
12979 str = c_getstr (arg);
12980 if (str == NULL)
12981 return NULL_TREE;
12982 }
12983 else
12984 {
12985 /* The format specifier doesn't contain any '%' characters. */
12986 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12987 && arg)
12988 return NULL_TREE;
12989 str = fmt_str;
12990 }
12991
12992 /* If the string was "", printf does nothing. */
12993 if (str[0] == '\0')
12994 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12995
12996 /* If the string has length of 1, call putchar. */
12997 if (str[1] == '\0')
12998 {
12999 /* Given printf("c"), (where c is any one character,)
13000 convert "c"[0] to an int and pass that to the replacement
13001 function. */
13002 newarg = build_int_cst (NULL_TREE, str[0]);
13003 if (fn_putchar)
13004 call = build_call_expr (fn_putchar, 1, newarg);
13005 }
13006 else
13007 {
13008 /* If the string was "string\n", call puts("string"). */
13009 size_t len = strlen (str);
13010 if ((unsigned char)str[len - 1] == target_newline)
13011 {
13012 /* Create a NUL-terminated string that's one char shorter
13013 than the original, stripping off the trailing '\n'. */
13014 char *newstr = XALLOCAVEC (char, len);
13015 memcpy (newstr, str, len - 1);
13016 newstr[len - 1] = 0;
13017
13018 newarg = build_string_literal (len, newstr);
13019 if (fn_puts)
13020 call = build_call_expr (fn_puts, 1, newarg);
13021 }
13022 else
13023 /* We'd like to arrange to call fputs(string,stdout) here,
13024 but we need stdout and don't have a way to get it yet. */
13025 return NULL_TREE;
13026 }
13027 }
13028
13029 /* The other optimizations can be done only on the non-va_list variants. */
13030 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13031 return NULL_TREE;
13032
13033 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13034 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13035 {
13036 if (!arg || !validate_arg (arg, POINTER_TYPE))
13037 return NULL_TREE;
13038 if (fn_puts)
13039 call = build_call_expr (fn_puts, 1, arg);
13040 }
13041
13042 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13043 else if (strcmp (fmt_str, target_percent_c) == 0)
13044 {
13045 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13046 return NULL_TREE;
13047 if (fn_putchar)
13048 call = build_call_expr (fn_putchar, 1, arg);
13049 }
13050
13051 if (!call)
13052 return NULL_TREE;
13053
13054 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13055 }
13056
13057 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13058 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13059 more than 3 arguments, and ARG may be null in the 2-argument case.
13060
13061 Return NULL_TREE if no simplification was possible, otherwise return the
13062 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13063 code of the function to be simplified. */
13064
13065 static tree
13066 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
13067 enum built_in_function fcode)
13068 {
13069 tree fn_fputc, fn_fputs, call = NULL_TREE;
13070 const char *fmt_str = NULL;
13071
13072 /* If the return value is used, don't do the transformation. */
13073 if (! ignore)
13074 return NULL_TREE;
13075
13076 /* Verify the required arguments in the original call. */
13077 if (!validate_arg (fp, POINTER_TYPE))
13078 return NULL_TREE;
13079 if (!validate_arg (fmt, POINTER_TYPE))
13080 return NULL_TREE;
13081
13082 /* Check whether the format is a literal string constant. */
13083 fmt_str = c_getstr (fmt);
13084 if (fmt_str == NULL)
13085 return NULL_TREE;
13086
13087 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13088 {
13089 /* If we're using an unlocked function, assume the other
13090 unlocked functions exist explicitly. */
13091 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13092 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13093 }
13094 else
13095 {
13096 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13097 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13098 }
13099
13100 if (!init_target_chars ())
13101 return NULL_TREE;
13102
13103 /* If the format doesn't contain % args or %%, use strcpy. */
13104 if (strchr (fmt_str, target_percent) == NULL)
13105 {
13106 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13107 && arg)
13108 return NULL_TREE;
13109
13110 /* If the format specifier was "", fprintf does nothing. */
13111 if (fmt_str[0] == '\0')
13112 {
13113 /* If FP has side-effects, just wait until gimplification is
13114 done. */
13115 if (TREE_SIDE_EFFECTS (fp))
13116 return NULL_TREE;
13117
13118 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13119 }
13120
13121 /* When "string" doesn't contain %, replace all cases of
13122 fprintf (fp, string) with fputs (string, fp). The fputs
13123 builtin will take care of special cases like length == 1. */
13124 if (fn_fputs)
13125 call = build_call_expr (fn_fputs, 2, fmt, fp);
13126 }
13127
13128 /* The other optimizations can be done only on the non-va_list variants. */
13129 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13130 return NULL_TREE;
13131
13132 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13133 else if (strcmp (fmt_str, target_percent_s) == 0)
13134 {
13135 if (!arg || !validate_arg (arg, POINTER_TYPE))
13136 return NULL_TREE;
13137 if (fn_fputs)
13138 call = build_call_expr (fn_fputs, 2, arg, fp);
13139 }
13140
13141 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13142 else if (strcmp (fmt_str, target_percent_c) == 0)
13143 {
13144 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13145 return NULL_TREE;
13146 if (fn_fputc)
13147 call = build_call_expr (fn_fputc, 2, arg, fp);
13148 }
13149
13150 if (!call)
13151 return NULL_TREE;
13152 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13153 }
13154
13155 /* Initialize format string characters in the target charset. */
13156
13157 static bool
13158 init_target_chars (void)
13159 {
13160 static bool init;
13161 if (!init)
13162 {
13163 target_newline = lang_hooks.to_target_charset ('\n');
13164 target_percent = lang_hooks.to_target_charset ('%');
13165 target_c = lang_hooks.to_target_charset ('c');
13166 target_s = lang_hooks.to_target_charset ('s');
13167 if (target_newline == 0 || target_percent == 0 || target_c == 0
13168 || target_s == 0)
13169 return false;
13170
13171 target_percent_c[0] = target_percent;
13172 target_percent_c[1] = target_c;
13173 target_percent_c[2] = '\0';
13174
13175 target_percent_s[0] = target_percent;
13176 target_percent_s[1] = target_s;
13177 target_percent_s[2] = '\0';
13178
13179 target_percent_s_newline[0] = target_percent;
13180 target_percent_s_newline[1] = target_s;
13181 target_percent_s_newline[2] = target_newline;
13182 target_percent_s_newline[3] = '\0';
13183
13184 init = true;
13185 }
13186 return true;
13187 }
13188
13189 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13190 and no overflow/underflow occurred. INEXACT is true if M was not
13191 exactly calculated. TYPE is the tree type for the result. This
13192 function assumes that you cleared the MPFR flags and then
13193 calculated M to see if anything subsequently set a flag prior to
13194 entering this function. Return NULL_TREE if any checks fail. */
13195
13196 static tree
13197 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13198 {
13199 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13200 overflow/underflow occurred. If -frounding-math, proceed iff the
13201 result of calling FUNC was exact. */
13202 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13203 && (!flag_rounding_math || !inexact))
13204 {
13205 REAL_VALUE_TYPE rr;
13206
13207 real_from_mpfr (&rr, m, type, GMP_RNDN);
13208 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13209 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13210 but the mpft_t is not, then we underflowed in the
13211 conversion. */
13212 if (real_isfinite (&rr)
13213 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13214 {
13215 REAL_VALUE_TYPE rmode;
13216
13217 real_convert (&rmode, TYPE_MODE (type), &rr);
13218 /* Proceed iff the specified mode can hold the value. */
13219 if (real_identical (&rmode, &rr))
13220 return build_real (type, rmode);
13221 }
13222 }
13223 return NULL_TREE;
13224 }
13225
13226 #ifdef HAVE_mpc
13227 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13228 number and no overflow/underflow occurred. INEXACT is true if M
13229 was not exactly calculated. TYPE is the tree type for the result.
13230 This function assumes that you cleared the MPFR flags and then
13231 calculated M to see if anything subsequently set a flag prior to
13232 entering this function. Return NULL_TREE if any checks fail. */
13233
13234 static tree
13235 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13236 {
13237 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13238 overflow/underflow occurred. If -frounding-math, proceed iff the
13239 result of calling FUNC was exact. */
13240 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13241 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13242 && (!flag_rounding_math || !inexact))
13243 {
13244 REAL_VALUE_TYPE re, im;
13245
13246 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13247 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13248 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13249 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13250 but the mpft_t is not, then we underflowed in the
13251 conversion. */
13252 if (real_isfinite (&re) && real_isfinite (&im)
13253 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13254 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13255 {
13256 REAL_VALUE_TYPE re_mode, im_mode;
13257
13258 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13259 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13260 /* Proceed iff the specified mode can hold the value. */
13261 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13262 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13263 build_real (TREE_TYPE (type), im_mode));
13264 }
13265 }
13266 return NULL_TREE;
13267 }
13268 #endif /* HAVE_mpc */
13269
13270 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13271 FUNC on it and return the resulting value as a tree with type TYPE.
13272 If MIN and/or MAX are not NULL, then the supplied ARG must be
13273 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13274 acceptable values, otherwise they are not. The mpfr precision is
13275 set to the precision of TYPE. We assume that function FUNC returns
13276 zero if the result could be calculated exactly within the requested
13277 precision. */
13278
13279 static tree
13280 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13281 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13282 bool inclusive)
13283 {
13284 tree result = NULL_TREE;
13285
13286 STRIP_NOPS (arg);
13287
13288 /* To proceed, MPFR must exactly represent the target floating point
13289 format, which only happens when the target base equals two. */
13290 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13291 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13292 {
13293 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13294
13295 if (real_isfinite (ra)
13296 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13297 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13298 {
13299 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13300 const int prec = fmt->p;
13301 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13302 int inexact;
13303 mpfr_t m;
13304
13305 mpfr_init2 (m, prec);
13306 mpfr_from_real (m, ra, GMP_RNDN);
13307 mpfr_clear_flags ();
13308 inexact = func (m, m, rnd);
13309 result = do_mpfr_ckconv (m, type, inexact);
13310 mpfr_clear (m);
13311 }
13312 }
13313
13314 return result;
13315 }
13316
13317 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13318 FUNC on it and return the resulting value as a tree with type TYPE.
13319 The mpfr precision is set to the precision of TYPE. We assume that
13320 function FUNC returns zero if the result could be calculated
13321 exactly within the requested precision. */
13322
13323 static tree
13324 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13325 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13326 {
13327 tree result = NULL_TREE;
13328
13329 STRIP_NOPS (arg1);
13330 STRIP_NOPS (arg2);
13331
13332 /* To proceed, MPFR must exactly represent the target floating point
13333 format, which only happens when the target base equals two. */
13334 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13335 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13336 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13337 {
13338 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13339 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13340
13341 if (real_isfinite (ra1) && real_isfinite (ra2))
13342 {
13343 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13344 const int prec = fmt->p;
13345 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13346 int inexact;
13347 mpfr_t m1, m2;
13348
13349 mpfr_inits2 (prec, m1, m2, NULL);
13350 mpfr_from_real (m1, ra1, GMP_RNDN);
13351 mpfr_from_real (m2, ra2, GMP_RNDN);
13352 mpfr_clear_flags ();
13353 inexact = func (m1, m1, m2, rnd);
13354 result = do_mpfr_ckconv (m1, type, inexact);
13355 mpfr_clears (m1, m2, NULL);
13356 }
13357 }
13358
13359 return result;
13360 }
13361
13362 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13363 FUNC on it and return the resulting value as a tree with type TYPE.
13364 The mpfr precision is set to the precision of TYPE. We assume that
13365 function FUNC returns zero if the result could be calculated
13366 exactly within the requested precision. */
13367
13368 static tree
13369 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13370 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13371 {
13372 tree result = NULL_TREE;
13373
13374 STRIP_NOPS (arg1);
13375 STRIP_NOPS (arg2);
13376 STRIP_NOPS (arg3);
13377
13378 /* To proceed, MPFR must exactly represent the target floating point
13379 format, which only happens when the target base equals two. */
13380 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13381 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13382 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13383 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13384 {
13385 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13386 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13387 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13388
13389 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13390 {
13391 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13392 const int prec = fmt->p;
13393 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13394 int inexact;
13395 mpfr_t m1, m2, m3;
13396
13397 mpfr_inits2 (prec, m1, m2, m3, NULL);
13398 mpfr_from_real (m1, ra1, GMP_RNDN);
13399 mpfr_from_real (m2, ra2, GMP_RNDN);
13400 mpfr_from_real (m3, ra3, GMP_RNDN);
13401 mpfr_clear_flags ();
13402 inexact = func (m1, m1, m2, m3, rnd);
13403 result = do_mpfr_ckconv (m1, type, inexact);
13404 mpfr_clears (m1, m2, m3, NULL);
13405 }
13406 }
13407
13408 return result;
13409 }
13410
13411 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13412 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13413 If ARG_SINP and ARG_COSP are NULL then the result is returned
13414 as a complex value.
13415 The type is taken from the type of ARG and is used for setting the
13416 precision of the calculation and results. */
13417
13418 static tree
13419 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13420 {
13421 tree const type = TREE_TYPE (arg);
13422 tree result = NULL_TREE;
13423
13424 STRIP_NOPS (arg);
13425
13426 /* To proceed, MPFR must exactly represent the target floating point
13427 format, which only happens when the target base equals two. */
13428 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13429 && TREE_CODE (arg) == REAL_CST
13430 && !TREE_OVERFLOW (arg))
13431 {
13432 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13433
13434 if (real_isfinite (ra))
13435 {
13436 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13437 const int prec = fmt->p;
13438 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13439 tree result_s, result_c;
13440 int inexact;
13441 mpfr_t m, ms, mc;
13442
13443 mpfr_inits2 (prec, m, ms, mc, NULL);
13444 mpfr_from_real (m, ra, GMP_RNDN);
13445 mpfr_clear_flags ();
13446 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13447 result_s = do_mpfr_ckconv (ms, type, inexact);
13448 result_c = do_mpfr_ckconv (mc, type, inexact);
13449 mpfr_clears (m, ms, mc, NULL);
13450 if (result_s && result_c)
13451 {
13452 /* If we are to return in a complex value do so. */
13453 if (!arg_sinp && !arg_cosp)
13454 return build_complex (build_complex_type (type),
13455 result_c, result_s);
13456
13457 /* Dereference the sin/cos pointer arguments. */
13458 arg_sinp = build_fold_indirect_ref (arg_sinp);
13459 arg_cosp = build_fold_indirect_ref (arg_cosp);
13460 /* Proceed if valid pointer type were passed in. */
13461 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13462 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13463 {
13464 /* Set the values. */
13465 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13466 result_s);
13467 TREE_SIDE_EFFECTS (result_s) = 1;
13468 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13469 result_c);
13470 TREE_SIDE_EFFECTS (result_c) = 1;
13471 /* Combine the assignments into a compound expr. */
13472 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13473 result_s, result_c));
13474 }
13475 }
13476 }
13477 }
13478 return result;
13479 }
13480
13481 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13482 two-argument mpfr order N Bessel function FUNC on them and return
13483 the resulting value as a tree with type TYPE. The mpfr precision
13484 is set to the precision of TYPE. We assume that function FUNC
13485 returns zero if the result could be calculated exactly within the
13486 requested precision. */
13487 static tree
13488 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13489 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13490 const REAL_VALUE_TYPE *min, bool inclusive)
13491 {
13492 tree result = NULL_TREE;
13493
13494 STRIP_NOPS (arg1);
13495 STRIP_NOPS (arg2);
13496
13497 /* To proceed, MPFR must exactly represent the target floating point
13498 format, which only happens when the target base equals two. */
13499 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13500 && host_integerp (arg1, 0)
13501 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13502 {
13503 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13504 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13505
13506 if (n == (long)n
13507 && real_isfinite (ra)
13508 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13509 {
13510 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13511 const int prec = fmt->p;
13512 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13513 int inexact;
13514 mpfr_t m;
13515
13516 mpfr_init2 (m, prec);
13517 mpfr_from_real (m, ra, GMP_RNDN);
13518 mpfr_clear_flags ();
13519 inexact = func (m, n, m, rnd);
13520 result = do_mpfr_ckconv (m, type, inexact);
13521 mpfr_clear (m);
13522 }
13523 }
13524
13525 return result;
13526 }
13527
13528 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13529 the pointer *(ARG_QUO) and return the result. The type is taken
13530 from the type of ARG0 and is used for setting the precision of the
13531 calculation and results. */
13532
13533 static tree
13534 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13535 {
13536 tree const type = TREE_TYPE (arg0);
13537 tree result = NULL_TREE;
13538
13539 STRIP_NOPS (arg0);
13540 STRIP_NOPS (arg1);
13541
13542 /* To proceed, MPFR must exactly represent the target floating point
13543 format, which only happens when the target base equals two. */
13544 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13545 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13546 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13547 {
13548 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13549 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13550
13551 if (real_isfinite (ra0) && real_isfinite (ra1))
13552 {
13553 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13554 const int prec = fmt->p;
13555 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13556 tree result_rem;
13557 long integer_quo;
13558 mpfr_t m0, m1;
13559
13560 mpfr_inits2 (prec, m0, m1, NULL);
13561 mpfr_from_real (m0, ra0, GMP_RNDN);
13562 mpfr_from_real (m1, ra1, GMP_RNDN);
13563 mpfr_clear_flags ();
13564 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13565 /* Remquo is independent of the rounding mode, so pass
13566 inexact=0 to do_mpfr_ckconv(). */
13567 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13568 mpfr_clears (m0, m1, NULL);
13569 if (result_rem)
13570 {
13571 /* MPFR calculates quo in the host's long so it may
13572 return more bits in quo than the target int can hold
13573 if sizeof(host long) > sizeof(target int). This can
13574 happen even for native compilers in LP64 mode. In
13575 these cases, modulo the quo value with the largest
13576 number that the target int can hold while leaving one
13577 bit for the sign. */
13578 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13579 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13580
13581 /* Dereference the quo pointer argument. */
13582 arg_quo = build_fold_indirect_ref (arg_quo);
13583 /* Proceed iff a valid pointer type was passed in. */
13584 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13585 {
13586 /* Set the value. */
13587 tree result_quo = fold_build2 (MODIFY_EXPR,
13588 TREE_TYPE (arg_quo), arg_quo,
13589 build_int_cst (NULL, integer_quo));
13590 TREE_SIDE_EFFECTS (result_quo) = 1;
13591 /* Combine the quo assignment with the rem. */
13592 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13593 result_quo, result_rem));
13594 }
13595 }
13596 }
13597 }
13598 return result;
13599 }
13600
13601 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13602 resulting value as a tree with type TYPE. The mpfr precision is
13603 set to the precision of TYPE. We assume that this mpfr function
13604 returns zero if the result could be calculated exactly within the
13605 requested precision. In addition, the integer pointer represented
13606 by ARG_SG will be dereferenced and set to the appropriate signgam
13607 (-1,1) value. */
13608
13609 static tree
13610 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13611 {
13612 tree result = NULL_TREE;
13613
13614 STRIP_NOPS (arg);
13615
13616 /* To proceed, MPFR must exactly represent the target floating point
13617 format, which only happens when the target base equals two. Also
13618 verify ARG is a constant and that ARG_SG is an int pointer. */
13619 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13620 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13621 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13622 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13623 {
13624 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13625
13626 /* In addition to NaN and Inf, the argument cannot be zero or a
13627 negative integer. */
13628 if (real_isfinite (ra)
13629 && ra->cl != rvc_zero
13630 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13631 {
13632 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13633 const int prec = fmt->p;
13634 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13635 int inexact, sg;
13636 mpfr_t m;
13637 tree result_lg;
13638
13639 mpfr_init2 (m, prec);
13640 mpfr_from_real (m, ra, GMP_RNDN);
13641 mpfr_clear_flags ();
13642 inexact = mpfr_lgamma (m, &sg, m, rnd);
13643 result_lg = do_mpfr_ckconv (m, type, inexact);
13644 mpfr_clear (m);
13645 if (result_lg)
13646 {
13647 tree result_sg;
13648
13649 /* Dereference the arg_sg pointer argument. */
13650 arg_sg = build_fold_indirect_ref (arg_sg);
13651 /* Assign the signgam value into *arg_sg. */
13652 result_sg = fold_build2 (MODIFY_EXPR,
13653 TREE_TYPE (arg_sg), arg_sg,
13654 build_int_cst (NULL, sg));
13655 TREE_SIDE_EFFECTS (result_sg) = 1;
13656 /* Combine the signgam assignment with the lgamma result. */
13657 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13658 result_sg, result_lg));
13659 }
13660 }
13661 }
13662
13663 return result;
13664 }
13665
13666 #ifdef HAVE_mpc
13667 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13668 function FUNC on it and return the resulting value as a tree with
13669 type TYPE. The mpfr precision is set to the precision of TYPE. We
13670 assume that function FUNC returns zero if the result could be
13671 calculated exactly within the requested precision. */
13672
13673 static tree
13674 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13675 {
13676 tree result = NULL_TREE;
13677
13678 STRIP_NOPS (arg);
13679
13680 /* To proceed, MPFR must exactly represent the target floating point
13681 format, which only happens when the target base equals two. */
13682 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13683 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13684 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13685 {
13686 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13687 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13688
13689 if (real_isfinite (re) && real_isfinite (im))
13690 {
13691 const struct real_format *const fmt =
13692 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13693 const int prec = fmt->p;
13694 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13695 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13696 int inexact;
13697 mpc_t m;
13698
13699 mpc_init2 (m, prec);
13700 mpfr_from_real (mpc_realref(m), re, rnd);
13701 mpfr_from_real (mpc_imagref(m), im, rnd);
13702 mpfr_clear_flags ();
13703 inexact = func (m, m, crnd);
13704 result = do_mpc_ckconv (m, type, inexact);
13705 mpc_clear (m);
13706 }
13707 }
13708
13709 return result;
13710 }
13711 #endif /* HAVE_mpc */
13712
13713 /* FIXME tuples.
13714 The functions below provide an alternate interface for folding
13715 builtin function calls presented as GIMPLE_CALL statements rather
13716 than as CALL_EXPRs. The folded result is still expressed as a
13717 tree. There is too much code duplication in the handling of
13718 varargs functions, and a more intrusive re-factoring would permit
13719 better sharing of code between the tree and statement-based
13720 versions of these functions. */
13721
13722 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13723 along with N new arguments specified as the "..." parameters. SKIP
13724 is the number of arguments in STMT to be omitted. This function is used
13725 to do varargs-to-varargs transformations. */
13726
13727 static tree
13728 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13729 {
13730 int oldnargs = gimple_call_num_args (stmt);
13731 int nargs = oldnargs - skip + n;
13732 tree fntype = TREE_TYPE (fndecl);
13733 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13734 tree *buffer;
13735 int i, j;
13736 va_list ap;
13737
13738 buffer = XALLOCAVEC (tree, nargs);
13739 va_start (ap, n);
13740 for (i = 0; i < n; i++)
13741 buffer[i] = va_arg (ap, tree);
13742 va_end (ap);
13743 for (j = skip; j < oldnargs; j++, i++)
13744 buffer[i] = gimple_call_arg (stmt, j);
13745
13746 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13747 }
13748
13749 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13750 a normal call should be emitted rather than expanding the function
13751 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13752
13753 static tree
13754 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13755 {
13756 tree dest, size, len, fn, fmt, flag;
13757 const char *fmt_str;
13758 int nargs = gimple_call_num_args (stmt);
13759
13760 /* Verify the required arguments in the original call. */
13761 if (nargs < 4)
13762 return NULL_TREE;
13763 dest = gimple_call_arg (stmt, 0);
13764 if (!validate_arg (dest, POINTER_TYPE))
13765 return NULL_TREE;
13766 flag = gimple_call_arg (stmt, 1);
13767 if (!validate_arg (flag, INTEGER_TYPE))
13768 return NULL_TREE;
13769 size = gimple_call_arg (stmt, 2);
13770 if (!validate_arg (size, INTEGER_TYPE))
13771 return NULL_TREE;
13772 fmt = gimple_call_arg (stmt, 3);
13773 if (!validate_arg (fmt, POINTER_TYPE))
13774 return NULL_TREE;
13775
13776 if (! host_integerp (size, 1))
13777 return NULL_TREE;
13778
13779 len = NULL_TREE;
13780
13781 if (!init_target_chars ())
13782 return NULL_TREE;
13783
13784 /* Check whether the format is a literal string constant. */
13785 fmt_str = c_getstr (fmt);
13786 if (fmt_str != NULL)
13787 {
13788 /* If the format doesn't contain % args or %%, we know the size. */
13789 if (strchr (fmt_str, target_percent) == 0)
13790 {
13791 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13792 len = build_int_cstu (size_type_node, strlen (fmt_str));
13793 }
13794 /* If the format is "%s" and first ... argument is a string literal,
13795 we know the size too. */
13796 else if (fcode == BUILT_IN_SPRINTF_CHK
13797 && strcmp (fmt_str, target_percent_s) == 0)
13798 {
13799 tree arg;
13800
13801 if (nargs == 5)
13802 {
13803 arg = gimple_call_arg (stmt, 4);
13804 if (validate_arg (arg, POINTER_TYPE))
13805 {
13806 len = c_strlen (arg, 1);
13807 if (! len || ! host_integerp (len, 1))
13808 len = NULL_TREE;
13809 }
13810 }
13811 }
13812 }
13813
13814 if (! integer_all_onesp (size))
13815 {
13816 if (! len || ! tree_int_cst_lt (len, size))
13817 return NULL_TREE;
13818 }
13819
13820 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13821 or if format doesn't contain % chars or is "%s". */
13822 if (! integer_zerop (flag))
13823 {
13824 if (fmt_str == NULL)
13825 return NULL_TREE;
13826 if (strchr (fmt_str, target_percent) != NULL
13827 && strcmp (fmt_str, target_percent_s))
13828 return NULL_TREE;
13829 }
13830
13831 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13832 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13833 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13834 if (!fn)
13835 return NULL_TREE;
13836
13837 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13838 }
13839
13840 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13841 a normal call should be emitted rather than expanding the function
13842 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13843 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13844 passed as second argument. */
13845
13846 tree
13847 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13848 enum built_in_function fcode)
13849 {
13850 tree dest, size, len, fn, fmt, flag;
13851 const char *fmt_str;
13852
13853 /* Verify the required arguments in the original call. */
13854 if (gimple_call_num_args (stmt) < 5)
13855 return NULL_TREE;
13856 dest = gimple_call_arg (stmt, 0);
13857 if (!validate_arg (dest, POINTER_TYPE))
13858 return NULL_TREE;
13859 len = gimple_call_arg (stmt, 1);
13860 if (!validate_arg (len, INTEGER_TYPE))
13861 return NULL_TREE;
13862 flag = gimple_call_arg (stmt, 2);
13863 if (!validate_arg (flag, INTEGER_TYPE))
13864 return NULL_TREE;
13865 size = gimple_call_arg (stmt, 3);
13866 if (!validate_arg (size, INTEGER_TYPE))
13867 return NULL_TREE;
13868 fmt = gimple_call_arg (stmt, 4);
13869 if (!validate_arg (fmt, POINTER_TYPE))
13870 return NULL_TREE;
13871
13872 if (! host_integerp (size, 1))
13873 return NULL_TREE;
13874
13875 if (! integer_all_onesp (size))
13876 {
13877 if (! host_integerp (len, 1))
13878 {
13879 /* If LEN is not constant, try MAXLEN too.
13880 For MAXLEN only allow optimizing into non-_ocs function
13881 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13882 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13883 return NULL_TREE;
13884 }
13885 else
13886 maxlen = len;
13887
13888 if (tree_int_cst_lt (size, maxlen))
13889 return NULL_TREE;
13890 }
13891
13892 if (!init_target_chars ())
13893 return NULL_TREE;
13894
13895 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13896 or if format doesn't contain % chars or is "%s". */
13897 if (! integer_zerop (flag))
13898 {
13899 fmt_str = c_getstr (fmt);
13900 if (fmt_str == NULL)
13901 return NULL_TREE;
13902 if (strchr (fmt_str, target_percent) != NULL
13903 && strcmp (fmt_str, target_percent_s))
13904 return NULL_TREE;
13905 }
13906
13907 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13908 available. */
13909 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13910 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13911 if (!fn)
13912 return NULL_TREE;
13913
13914 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13915 }
13916
13917 /* Builtins with folding operations that operate on "..." arguments
13918 need special handling; we need to store the arguments in a convenient
13919 data structure before attempting any folding. Fortunately there are
13920 only a few builtins that fall into this category. FNDECL is the
13921 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13922 result of the function call is ignored. */
13923
13924 static tree
13925 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13926 {
13927 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13928 tree ret = NULL_TREE;
13929
13930 switch (fcode)
13931 {
13932 case BUILT_IN_SPRINTF_CHK:
13933 case BUILT_IN_VSPRINTF_CHK:
13934 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13935 break;
13936
13937 case BUILT_IN_SNPRINTF_CHK:
13938 case BUILT_IN_VSNPRINTF_CHK:
13939 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13940
13941 default:
13942 break;
13943 }
13944 if (ret)
13945 {
13946 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13947 TREE_NO_WARNING (ret) = 1;
13948 return ret;
13949 }
13950 return NULL_TREE;
13951 }
13952
13953 /* A wrapper function for builtin folding that prevents warnings for
13954 "statement without effect" and the like, caused by removing the
13955 call node earlier than the warning is generated. */
13956
13957 tree
13958 fold_call_stmt (gimple stmt, bool ignore)
13959 {
13960 tree ret = NULL_TREE;
13961 tree fndecl = gimple_call_fndecl (stmt);
13962 if (fndecl
13963 && TREE_CODE (fndecl) == FUNCTION_DECL
13964 && DECL_BUILT_IN (fndecl)
13965 && !gimple_call_va_arg_pack_p (stmt))
13966 {
13967 int nargs = gimple_call_num_args (stmt);
13968
13969 if (avoid_folding_inline_builtin (fndecl))
13970 return NULL_TREE;
13971 /* FIXME: Don't use a list in this interface. */
13972 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13973 {
13974 tree arglist = NULL_TREE;
13975 int i;
13976 for (i = nargs - 1; i >= 0; i--)
13977 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13978 return targetm.fold_builtin (fndecl, arglist, ignore);
13979 }
13980 else
13981 {
13982 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13983 {
13984 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13985 int i;
13986 for (i = 0; i < nargs; i++)
13987 args[i] = gimple_call_arg (stmt, i);
13988 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13989 }
13990 if (!ret)
13991 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13992 if (ret)
13993 {
13994 /* Propagate location information from original call to
13995 expansion of builtin. Otherwise things like
13996 maybe_emit_chk_warning, that operate on the expansion
13997 of a builtin, will use the wrong location information. */
13998 if (gimple_has_location (stmt))
13999 {
14000 tree realret = ret;
14001 if (TREE_CODE (ret) == NOP_EXPR)
14002 realret = TREE_OPERAND (ret, 0);
14003 if (CAN_HAVE_LOCATION_P (realret)
14004 && !EXPR_HAS_LOCATION (realret))
14005 SET_EXPR_LOCATION (realret, gimple_location (stmt));
14006 return realret;
14007 }
14008 return ret;
14009 }
14010 }
14011 }
14012 return NULL_TREE;
14013 }