]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Merge dataflow branch into mainline
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
64 {
65 #include "builtins.def"
66 };
67 #undef DEF_BUILTIN
68
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
76
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 enum tree_code);
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
196
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
205
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
219
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_remquo (tree, tree, tree);
239 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 #endif
241
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
245
246 static bool called_as_built_in (tree node)
247 {
248 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
249 if (strncmp (name, "__builtin_", 10) == 0)
250 return true;
251 if (strncmp (name, "__sync_", 7) == 0)
252 return true;
253 return false;
254 }
255
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
263
264 int
265 get_pointer_alignment (tree exp, unsigned int max_align)
266 {
267 unsigned int align, inner;
268
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize && flag_tree_ter))
271 return 0;
272
273 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
274 return 0;
275
276 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
277 align = MIN (align, max_align);
278
279 while (1)
280 {
281 switch (TREE_CODE (exp))
282 {
283 case NOP_EXPR:
284 case CONVERT_EXPR:
285 case NON_LVALUE_EXPR:
286 exp = TREE_OPERAND (exp, 0);
287 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
288 return align;
289
290 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
291 align = MIN (inner, max_align);
292 break;
293
294 case PLUS_EXPR:
295 /* If sum of pointer + int, restrict our maximum alignment to that
296 imposed by the integer. If not, we can't do any better than
297 ALIGN. */
298 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
299 return align;
300
301 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
302 & (max_align / BITS_PER_UNIT - 1))
303 != 0)
304 max_align >>= 1;
305
306 exp = TREE_OPERAND (exp, 0);
307 break;
308
309 case ADDR_EXPR:
310 /* See what we are pointing at and look at its alignment. */
311 exp = TREE_OPERAND (exp, 0);
312 inner = max_align;
313 if (handled_component_p (exp))
314 {
315 HOST_WIDE_INT bitsize, bitpos;
316 tree offset;
317 enum machine_mode mode;
318 int unsignedp, volatilep;
319
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
322 if (bitpos)
323 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
324 if (offset && TREE_CODE (offset) == PLUS_EXPR
325 && host_integerp (TREE_OPERAND (offset, 1), 1))
326 {
327 /* Any overflow in calculating offset_bits won't change
328 the alignment. */
329 unsigned offset_bits
330 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 * BITS_PER_UNIT);
332
333 if (offset_bits)
334 inner = MIN (inner, (offset_bits & -offset_bits));
335 offset = TREE_OPERAND (offset, 0);
336 }
337 if (offset && TREE_CODE (offset) == MULT_EXPR
338 && host_integerp (TREE_OPERAND (offset, 1), 1))
339 {
340 /* Any overflow in calculating offset_factor won't change
341 the alignment. */
342 unsigned offset_factor
343 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 * BITS_PER_UNIT);
345
346 if (offset_factor)
347 inner = MIN (inner, (offset_factor & -offset_factor));
348 }
349 else if (offset)
350 inner = MIN (inner, BITS_PER_UNIT);
351 }
352 if (TREE_CODE (exp) == FUNCTION_DECL)
353 align = FUNCTION_BOUNDARY;
354 else if (DECL_P (exp))
355 align = MIN (inner, DECL_ALIGN (exp));
356 #ifdef CONSTANT_ALIGNMENT
357 else if (CONSTANT_CLASS_P (exp))
358 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
359 #endif
360 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
361 || TREE_CODE (exp) == INDIRECT_REF)
362 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
363 else
364 align = MIN (align, inner);
365 return MIN (align, max_align);
366
367 default:
368 return align;
369 }
370 }
371 }
372
373 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
374 way, because it could contain a zero byte in the middle.
375 TREE_STRING_LENGTH is the size of the character array, not the string.
376
377 ONLY_VALUE should be nonzero if the result is not going to be emitted
378 into the instruction stream and zero if it is going to be expanded.
379 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
380 is returned, otherwise NULL, since
381 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
382 evaluate the side-effects.
383
384 The value returned is of type `ssizetype'.
385
386 Unfortunately, string_constant can't access the values of const char
387 arrays with initializers, so neither can we do so here. */
388
389 tree
390 c_strlen (tree src, int only_value)
391 {
392 tree offset_node;
393 HOST_WIDE_INT offset;
394 int max;
395 const char *ptr;
396
397 STRIP_NOPS (src);
398 if (TREE_CODE (src) == COND_EXPR
399 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
400 {
401 tree len1, len2;
402
403 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
404 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
405 if (tree_int_cst_equal (len1, len2))
406 return len1;
407 }
408
409 if (TREE_CODE (src) == COMPOUND_EXPR
410 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
411 return c_strlen (TREE_OPERAND (src, 1), only_value);
412
413 src = string_constant (src, &offset_node);
414 if (src == 0)
415 return NULL_TREE;
416
417 max = TREE_STRING_LENGTH (src) - 1;
418 ptr = TREE_STRING_POINTER (src);
419
420 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
421 {
422 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
423 compute the offset to the following null if we don't know where to
424 start searching for it. */
425 int i;
426
427 for (i = 0; i < max; i++)
428 if (ptr[i] == 0)
429 return NULL_TREE;
430
431 /* We don't know the starting offset, but we do know that the string
432 has no internal zero bytes. We can assume that the offset falls
433 within the bounds of the string; otherwise, the programmer deserves
434 what he gets. Subtract the offset from the length of the string,
435 and return that. This would perhaps not be valid if we were dealing
436 with named arrays in addition to literal string constants. */
437
438 return size_diffop (size_int (max), offset_node);
439 }
440
441 /* We have a known offset into the string. Start searching there for
442 a null character if we can represent it as a single HOST_WIDE_INT. */
443 if (offset_node == 0)
444 offset = 0;
445 else if (! host_integerp (offset_node, 0))
446 offset = -1;
447 else
448 offset = tree_low_cst (offset_node, 0);
449
450 /* If the offset is known to be out of bounds, warn, and call strlen at
451 runtime. */
452 if (offset < 0 || offset > max)
453 {
454 warning (0, "offset outside bounds of constant string");
455 return NULL_TREE;
456 }
457
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
461
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
465 }
466
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
469
470 static const char *
471 c_getstr (tree src)
472 {
473 tree offset_node;
474
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
478
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
484
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
486 }
487
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
490
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
493 {
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
497
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
499
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
504 {
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
513
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
517 }
518 return immed_double_const (c[0], c[1], mode);
519 }
520
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
524
525 static int
526 target_char_cast (tree cst, char *p)
527 {
528 unsigned HOST_WIDE_INT val, hostval;
529
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
533
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
537
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
541
542 if (val != hostval)
543 return 1;
544
545 *p = hostval;
546 return 0;
547 }
548
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
552
553 static tree
554 builtin_save_expr (tree exp)
555 {
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
560
561 return save_expr (exp);
562 }
563
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
567
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 {
571 int i;
572
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
577
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
582
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
590 {
591 tem = hard_frame_pointer_rtx;
592
593 /* Tell reload not to eliminate the frame pointer. */
594 current_function_accesses_prior_frames = 1;
595 }
596 #endif
597
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
605
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
614
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
617 {
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
626 }
627
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
636
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
646 }
647
648 /* Alias set used for setjmp buffer. */
649 static HOST_WIDE_INT setjmp_alias_set = -1;
650
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
654
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
657 {
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
661
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
664
665 buf_addr = convert_memory_address (Pmode, buf_addr);
666
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
668
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
672
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
676
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
679
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
682
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
688
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
694
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 current_function_calls_setjmp = 1;
698
699 /* We have a nonlocal label. */
700 current_function_has_nonlocal_label = 1;
701 }
702
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
705
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
708 {
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
712
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
716
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
722 {
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
727 }
728
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
731 {
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
735
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
740
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
743 {
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area (cfun)));
748 }
749 }
750 #endif
751
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
763
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
768 }
769
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
774
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
777 {
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
783
784 buf_addr = convert_memory_address (Pmode, buf_addr);
785
786 buf_addr = force_reg (Pmode, buf_addr);
787
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
794
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
801 {
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
805
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
811
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
822 {
823 lab = copy_to_reg (lab);
824
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 gen_rtx_SCRATCH (VOIDmode))));
828 emit_insn (gen_rtx_CLOBBER (VOIDmode,
829 gen_rtx_MEM (BLKmode,
830 hard_frame_pointer_rtx)));
831
832 emit_move_insn (hard_frame_pointer_rtx, fp);
833 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
834
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
837 emit_indirect_jump (lab);
838 }
839 }
840
841 /* Search backwards and mark the jump insn as a non-local goto.
842 Note that this precludes the use of __builtin_longjmp to a
843 __builtin_setjmp target in the same function. However, we've
844 already cautioned the user that these functions are for
845 internal exception handling use only. */
846 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
847 {
848 gcc_assert (insn != last);
849
850 if (JUMP_P (insn))
851 {
852 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
853 REG_NOTES (insn));
854 break;
855 }
856 else if (CALL_P (insn))
857 break;
858 }
859 }
860
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
863
864 static rtx
865 expand_builtin_nonlocal_goto (tree exp)
866 {
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
869
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
871 return NULL_RTX;
872
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
875
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 r_fp = gen_rtx_MEM (Pmode, r_save_area);
881 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
882 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
883
884 current_function_has_nonlocal_goto = 1;
885
886 #ifdef HAVE_nonlocal_goto
887 /* ??? We no longer need to pass the static chain value, afaik. */
888 if (HAVE_nonlocal_goto)
889 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
890 else
891 #endif
892 {
893 r_label = copy_to_reg (r_label);
894
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 gen_rtx_SCRATCH (VOIDmode))));
898
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 hard_frame_pointer_rtx)));
902
903 /* Restore frame pointer for containing function.
904 This sets the actual hard register used for the frame pointer
905 to the location of the function's incoming static chain info.
906 The non-local goto handler will then adjust it to contain the
907 proper value and reload the argument pointer, if needed. */
908 emit_move_insn (hard_frame_pointer_rtx, r_fp);
909 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
910
911 /* USE of hard_frame_pointer_rtx added for consistency;
912 not clear if really needed. */
913 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
914 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
915 emit_indirect_jump (r_label);
916 }
917
918 /* Search backwards to the jump insn and mark it as a
919 non-local goto. */
920 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
921 {
922 if (JUMP_P (insn))
923 {
924 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
925 const0_rtx, REG_NOTES (insn));
926 break;
927 }
928 else if (CALL_P (insn))
929 break;
930 }
931
932 return const0_rtx;
933 }
934
935 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
936 (not all will be used on all machines) that was passed to __builtin_setjmp.
937 It updates the stack pointer in that block to correspond to the current
938 stack pointer. */
939
940 static void
941 expand_builtin_update_setjmp_buf (rtx buf_addr)
942 {
943 enum machine_mode sa_mode = Pmode;
944 rtx stack_save;
945
946
947 #ifdef HAVE_save_stack_nonlocal
948 if (HAVE_save_stack_nonlocal)
949 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
950 #endif
951 #ifdef STACK_SAVEAREA_MODE
952 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
953 #endif
954
955 stack_save
956 = gen_rtx_MEM (sa_mode,
957 memory_address
958 (sa_mode,
959 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
960
961 #ifdef HAVE_setjmp
962 if (HAVE_setjmp)
963 emit_insn (gen_setjmp ());
964 #endif
965
966 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
967 }
968
969 /* Expand a call to __builtin_prefetch. For a target that does not support
970 data prefetch, evaluate the memory address argument in case it has side
971 effects. */
972
973 static void
974 expand_builtin_prefetch (tree exp)
975 {
976 tree arg0, arg1, arg2;
977 int nargs;
978 rtx op0, op1, op2;
979
980 if (!validate_arglist (exp, POINTER_TYPE, 0))
981 return;
982
983 arg0 = CALL_EXPR_ARG (exp, 0);
984
985 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
986 zero (read) and argument 2 (locality) defaults to 3 (high degree of
987 locality). */
988 nargs = call_expr_nargs (exp);
989 if (nargs > 1)
990 arg1 = CALL_EXPR_ARG (exp, 1);
991 else
992 arg1 = integer_zero_node;
993 if (nargs > 2)
994 arg2 = CALL_EXPR_ARG (exp, 2);
995 else
996 arg2 = build_int_cst (NULL_TREE, 3);
997
998 /* Argument 0 is an address. */
999 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1000
1001 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1002 if (TREE_CODE (arg1) != INTEGER_CST)
1003 {
1004 error ("second argument to %<__builtin_prefetch%> must be a constant");
1005 arg1 = integer_zero_node;
1006 }
1007 op1 = expand_normal (arg1);
1008 /* Argument 1 must be either zero or one. */
1009 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1010 {
1011 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1012 " using zero");
1013 op1 = const0_rtx;
1014 }
1015
1016 /* Argument 2 (locality) must be a compile-time constant int. */
1017 if (TREE_CODE (arg2) != INTEGER_CST)
1018 {
1019 error ("third argument to %<__builtin_prefetch%> must be a constant");
1020 arg2 = integer_zero_node;
1021 }
1022 op2 = expand_normal (arg2);
1023 /* Argument 2 must be 0, 1, 2, or 3. */
1024 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1025 {
1026 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1027 op2 = const0_rtx;
1028 }
1029
1030 #ifdef HAVE_prefetch
1031 if (HAVE_prefetch)
1032 {
1033 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1034 (op0,
1035 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1036 || (GET_MODE (op0) != Pmode))
1037 {
1038 op0 = convert_memory_address (Pmode, op0);
1039 op0 = force_reg (Pmode, op0);
1040 }
1041 emit_insn (gen_prefetch (op0, op1, op2));
1042 }
1043 #endif
1044
1045 /* Don't do anything with direct references to volatile memory, but
1046 generate code to handle other side effects. */
1047 if (!MEM_P (op0) && side_effects_p (op0))
1048 emit_insn (op0);
1049 }
1050
1051 /* Get a MEM rtx for expression EXP which is the address of an operand
1052 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1053 the maximum length of the block of memory that might be accessed or
1054 NULL if unknown. */
1055
1056 static rtx
1057 get_memory_rtx (tree exp, tree len)
1058 {
1059 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1060 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1061
1062 /* Get an expression we can use to find the attributes to assign to MEM.
1063 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1064 we can. First remove any nops. */
1065 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1066 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1067 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1068 exp = TREE_OPERAND (exp, 0);
1069
1070 if (TREE_CODE (exp) == ADDR_EXPR)
1071 exp = TREE_OPERAND (exp, 0);
1072 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1073 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1074 else
1075 exp = NULL;
1076
1077 /* Honor attributes derived from exp, except for the alias set
1078 (as builtin stringops may alias with anything) and the size
1079 (as stringops may access multiple array elements). */
1080 if (exp)
1081 {
1082 set_mem_attributes (mem, exp, 0);
1083
1084 /* Allow the string and memory builtins to overflow from one
1085 field into another, see http://gcc.gnu.org/PR23561.
1086 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1087 memory accessed by the string or memory builtin will fit
1088 within the field. */
1089 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1090 {
1091 tree mem_expr = MEM_EXPR (mem);
1092 HOST_WIDE_INT offset = -1, length = -1;
1093 tree inner = exp;
1094
1095 while (TREE_CODE (inner) == ARRAY_REF
1096 || TREE_CODE (inner) == NOP_EXPR
1097 || TREE_CODE (inner) == CONVERT_EXPR
1098 || TREE_CODE (inner) == NON_LVALUE_EXPR
1099 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1100 || TREE_CODE (inner) == SAVE_EXPR)
1101 inner = TREE_OPERAND (inner, 0);
1102
1103 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1104
1105 if (MEM_OFFSET (mem)
1106 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1107 offset = INTVAL (MEM_OFFSET (mem));
1108
1109 if (offset >= 0 && len && host_integerp (len, 0))
1110 length = tree_low_cst (len, 0);
1111
1112 while (TREE_CODE (inner) == COMPONENT_REF)
1113 {
1114 tree field = TREE_OPERAND (inner, 1);
1115 gcc_assert (! DECL_BIT_FIELD (field));
1116 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1117 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1118
1119 if (length >= 0
1120 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1121 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1122 {
1123 HOST_WIDE_INT size
1124 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1125 /* If we can prove the memory starting at XEXP (mem, 0)
1126 and ending at XEXP (mem, 0) + LENGTH will fit into
1127 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1128 if (offset <= size
1129 && length <= size
1130 && offset + length <= size)
1131 break;
1132 }
1133
1134 if (offset >= 0
1135 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1136 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1137 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1138 / BITS_PER_UNIT;
1139 else
1140 {
1141 offset = -1;
1142 length = -1;
1143 }
1144
1145 mem_expr = TREE_OPERAND (mem_expr, 0);
1146 inner = TREE_OPERAND (inner, 0);
1147 }
1148
1149 if (mem_expr == NULL)
1150 offset = -1;
1151 if (mem_expr != MEM_EXPR (mem))
1152 {
1153 set_mem_expr (mem, mem_expr);
1154 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1155 }
1156 }
1157 set_mem_alias_set (mem, 0);
1158 set_mem_size (mem, NULL_RTX);
1159 }
1160
1161 return mem;
1162 }
1163 \f
1164 /* Built-in functions to perform an untyped call and return. */
1165
1166 /* For each register that may be used for calling a function, this
1167 gives a mode used to copy the register's value. VOIDmode indicates
1168 the register is not used for calling a function. If the machine
1169 has register windows, this gives only the outbound registers.
1170 INCOMING_REGNO gives the corresponding inbound register. */
1171 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1172
1173 /* For each register that may be used for returning values, this gives
1174 a mode used to copy the register's value. VOIDmode indicates the
1175 register is not used for returning values. If the machine has
1176 register windows, this gives only the outbound registers.
1177 INCOMING_REGNO gives the corresponding inbound register. */
1178 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1179
1180 /* For each register that may be used for calling a function, this
1181 gives the offset of that register into the block returned by
1182 __builtin_apply_args. 0 indicates that the register is not
1183 used for calling a function. */
1184 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1185
1186 /* Return the size required for the block returned by __builtin_apply_args,
1187 and initialize apply_args_mode. */
1188
1189 static int
1190 apply_args_size (void)
1191 {
1192 static int size = -1;
1193 int align;
1194 unsigned int regno;
1195 enum machine_mode mode;
1196
1197 /* The values computed by this function never change. */
1198 if (size < 0)
1199 {
1200 /* The first value is the incoming arg-pointer. */
1201 size = GET_MODE_SIZE (Pmode);
1202
1203 /* The second value is the structure value address unless this is
1204 passed as an "invisible" first argument. */
1205 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1206 size += GET_MODE_SIZE (Pmode);
1207
1208 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1209 if (FUNCTION_ARG_REGNO_P (regno))
1210 {
1211 mode = reg_raw_mode[regno];
1212
1213 gcc_assert (mode != VOIDmode);
1214
1215 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1216 if (size % align != 0)
1217 size = CEIL (size, align) * align;
1218 apply_args_reg_offset[regno] = size;
1219 size += GET_MODE_SIZE (mode);
1220 apply_args_mode[regno] = mode;
1221 }
1222 else
1223 {
1224 apply_args_mode[regno] = VOIDmode;
1225 apply_args_reg_offset[regno] = 0;
1226 }
1227 }
1228 return size;
1229 }
1230
1231 /* Return the size required for the block returned by __builtin_apply,
1232 and initialize apply_result_mode. */
1233
1234 static int
1235 apply_result_size (void)
1236 {
1237 static int size = -1;
1238 int align, regno;
1239 enum machine_mode mode;
1240
1241 /* The values computed by this function never change. */
1242 if (size < 0)
1243 {
1244 size = 0;
1245
1246 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1247 if (FUNCTION_VALUE_REGNO_P (regno))
1248 {
1249 mode = reg_raw_mode[regno];
1250
1251 gcc_assert (mode != VOIDmode);
1252
1253 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1254 if (size % align != 0)
1255 size = CEIL (size, align) * align;
1256 size += GET_MODE_SIZE (mode);
1257 apply_result_mode[regno] = mode;
1258 }
1259 else
1260 apply_result_mode[regno] = VOIDmode;
1261
1262 /* Allow targets that use untyped_call and untyped_return to override
1263 the size so that machine-specific information can be stored here. */
1264 #ifdef APPLY_RESULT_SIZE
1265 size = APPLY_RESULT_SIZE;
1266 #endif
1267 }
1268 return size;
1269 }
1270
1271 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1272 /* Create a vector describing the result block RESULT. If SAVEP is true,
1273 the result block is used to save the values; otherwise it is used to
1274 restore the values. */
1275
1276 static rtx
1277 result_vector (int savep, rtx result)
1278 {
1279 int regno, size, align, nelts;
1280 enum machine_mode mode;
1281 rtx reg, mem;
1282 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1283
1284 size = nelts = 0;
1285 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1286 if ((mode = apply_result_mode[regno]) != VOIDmode)
1287 {
1288 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1289 if (size % align != 0)
1290 size = CEIL (size, align) * align;
1291 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1292 mem = adjust_address (result, mode, size);
1293 savevec[nelts++] = (savep
1294 ? gen_rtx_SET (VOIDmode, mem, reg)
1295 : gen_rtx_SET (VOIDmode, reg, mem));
1296 size += GET_MODE_SIZE (mode);
1297 }
1298 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1299 }
1300 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1301
1302 /* Save the state required to perform an untyped call with the same
1303 arguments as were passed to the current function. */
1304
1305 static rtx
1306 expand_builtin_apply_args_1 (void)
1307 {
1308 rtx registers, tem;
1309 int size, align, regno;
1310 enum machine_mode mode;
1311 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1312
1313 /* Create a block where the arg-pointer, structure value address,
1314 and argument registers can be saved. */
1315 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1316
1317 /* Walk past the arg-pointer and structure value address. */
1318 size = GET_MODE_SIZE (Pmode);
1319 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1320 size += GET_MODE_SIZE (Pmode);
1321
1322 /* Save each register used in calling a function to the block. */
1323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1324 if ((mode = apply_args_mode[regno]) != VOIDmode)
1325 {
1326 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1327 if (size % align != 0)
1328 size = CEIL (size, align) * align;
1329
1330 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1331
1332 emit_move_insn (adjust_address (registers, mode, size), tem);
1333 size += GET_MODE_SIZE (mode);
1334 }
1335
1336 /* Save the arg pointer to the block. */
1337 tem = copy_to_reg (virtual_incoming_args_rtx);
1338 #ifdef STACK_GROWS_DOWNWARD
1339 /* We need the pointer as the caller actually passed them to us, not
1340 as we might have pretended they were passed. Make sure it's a valid
1341 operand, as emit_move_insn isn't expected to handle a PLUS. */
1342 tem
1343 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1344 NULL_RTX);
1345 #endif
1346 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1347
1348 size = GET_MODE_SIZE (Pmode);
1349
1350 /* Save the structure value address unless this is passed as an
1351 "invisible" first argument. */
1352 if (struct_incoming_value)
1353 {
1354 emit_move_insn (adjust_address (registers, Pmode, size),
1355 copy_to_reg (struct_incoming_value));
1356 size += GET_MODE_SIZE (Pmode);
1357 }
1358
1359 /* Return the address of the block. */
1360 return copy_addr_to_reg (XEXP (registers, 0));
1361 }
1362
1363 /* __builtin_apply_args returns block of memory allocated on
1364 the stack into which is stored the arg pointer, structure
1365 value address, static chain, and all the registers that might
1366 possibly be used in performing a function call. The code is
1367 moved to the start of the function so the incoming values are
1368 saved. */
1369
1370 static rtx
1371 expand_builtin_apply_args (void)
1372 {
1373 /* Don't do __builtin_apply_args more than once in a function.
1374 Save the result of the first call and reuse it. */
1375 if (apply_args_value != 0)
1376 return apply_args_value;
1377 {
1378 /* When this function is called, it means that registers must be
1379 saved on entry to this function. So we migrate the
1380 call to the first insn of this function. */
1381 rtx temp;
1382 rtx seq;
1383
1384 start_sequence ();
1385 temp = expand_builtin_apply_args_1 ();
1386 seq = get_insns ();
1387 end_sequence ();
1388
1389 apply_args_value = temp;
1390
1391 /* Put the insns after the NOTE that starts the function.
1392 If this is inside a start_sequence, make the outer-level insn
1393 chain current, so the code is placed at the start of the
1394 function. */
1395 push_topmost_sequence ();
1396 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1397 pop_topmost_sequence ();
1398 return temp;
1399 }
1400 }
1401
1402 /* Perform an untyped call and save the state required to perform an
1403 untyped return of whatever value was returned by the given function. */
1404
1405 static rtx
1406 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1407 {
1408 int size, align, regno;
1409 enum machine_mode mode;
1410 rtx incoming_args, result, reg, dest, src, call_insn;
1411 rtx old_stack_level = 0;
1412 rtx call_fusage = 0;
1413 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1414
1415 arguments = convert_memory_address (Pmode, arguments);
1416
1417 /* Create a block where the return registers can be saved. */
1418 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1419
1420 /* Fetch the arg pointer from the ARGUMENTS block. */
1421 incoming_args = gen_reg_rtx (Pmode);
1422 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1423 #ifndef STACK_GROWS_DOWNWARD
1424 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1425 incoming_args, 0, OPTAB_LIB_WIDEN);
1426 #endif
1427
1428 /* Push a new argument block and copy the arguments. Do not allow
1429 the (potential) memcpy call below to interfere with our stack
1430 manipulations. */
1431 do_pending_stack_adjust ();
1432 NO_DEFER_POP;
1433
1434 /* Save the stack with nonlocal if available. */
1435 #ifdef HAVE_save_stack_nonlocal
1436 if (HAVE_save_stack_nonlocal)
1437 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1438 else
1439 #endif
1440 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1441
1442 /* Allocate a block of memory onto the stack and copy the memory
1443 arguments to the outgoing arguments address. */
1444 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1445 dest = virtual_outgoing_args_rtx;
1446 #ifndef STACK_GROWS_DOWNWARD
1447 if (GET_CODE (argsize) == CONST_INT)
1448 dest = plus_constant (dest, -INTVAL (argsize));
1449 else
1450 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1451 #endif
1452 dest = gen_rtx_MEM (BLKmode, dest);
1453 set_mem_align (dest, PARM_BOUNDARY);
1454 src = gen_rtx_MEM (BLKmode, incoming_args);
1455 set_mem_align (src, PARM_BOUNDARY);
1456 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1457
1458 /* Refer to the argument block. */
1459 apply_args_size ();
1460 arguments = gen_rtx_MEM (BLKmode, arguments);
1461 set_mem_align (arguments, PARM_BOUNDARY);
1462
1463 /* Walk past the arg-pointer and structure value address. */
1464 size = GET_MODE_SIZE (Pmode);
1465 if (struct_value)
1466 size += GET_MODE_SIZE (Pmode);
1467
1468 /* Restore each of the registers previously saved. Make USE insns
1469 for each of these registers for use in making the call. */
1470 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1471 if ((mode = apply_args_mode[regno]) != VOIDmode)
1472 {
1473 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1474 if (size % align != 0)
1475 size = CEIL (size, align) * align;
1476 reg = gen_rtx_REG (mode, regno);
1477 emit_move_insn (reg, adjust_address (arguments, mode, size));
1478 use_reg (&call_fusage, reg);
1479 size += GET_MODE_SIZE (mode);
1480 }
1481
1482 /* Restore the structure value address unless this is passed as an
1483 "invisible" first argument. */
1484 size = GET_MODE_SIZE (Pmode);
1485 if (struct_value)
1486 {
1487 rtx value = gen_reg_rtx (Pmode);
1488 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1489 emit_move_insn (struct_value, value);
1490 if (REG_P (struct_value))
1491 use_reg (&call_fusage, struct_value);
1492 size += GET_MODE_SIZE (Pmode);
1493 }
1494
1495 /* All arguments and registers used for the call are set up by now! */
1496 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1497
1498 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1499 and we don't want to load it into a register as an optimization,
1500 because prepare_call_address already did it if it should be done. */
1501 if (GET_CODE (function) != SYMBOL_REF)
1502 function = memory_address (FUNCTION_MODE, function);
1503
1504 /* Generate the actual call instruction and save the return value. */
1505 #ifdef HAVE_untyped_call
1506 if (HAVE_untyped_call)
1507 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1508 result, result_vector (1, result)));
1509 else
1510 #endif
1511 #ifdef HAVE_call_value
1512 if (HAVE_call_value)
1513 {
1514 rtx valreg = 0;
1515
1516 /* Locate the unique return register. It is not possible to
1517 express a call that sets more than one return register using
1518 call_value; use untyped_call for that. In fact, untyped_call
1519 only needs to save the return registers in the given block. */
1520 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1521 if ((mode = apply_result_mode[regno]) != VOIDmode)
1522 {
1523 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1524
1525 valreg = gen_rtx_REG (mode, regno);
1526 }
1527
1528 emit_call_insn (GEN_CALL_VALUE (valreg,
1529 gen_rtx_MEM (FUNCTION_MODE, function),
1530 const0_rtx, NULL_RTX, const0_rtx));
1531
1532 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1533 }
1534 else
1535 #endif
1536 gcc_unreachable ();
1537
1538 /* Find the CALL insn we just emitted, and attach the register usage
1539 information. */
1540 call_insn = last_call_insn ();
1541 add_function_usage_to (call_insn, call_fusage);
1542
1543 /* Restore the stack. */
1544 #ifdef HAVE_save_stack_nonlocal
1545 if (HAVE_save_stack_nonlocal)
1546 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1547 else
1548 #endif
1549 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1550
1551 OK_DEFER_POP;
1552
1553 /* Return the address of the result block. */
1554 result = copy_addr_to_reg (XEXP (result, 0));
1555 return convert_memory_address (ptr_mode, result);
1556 }
1557
1558 /* Perform an untyped return. */
1559
1560 static void
1561 expand_builtin_return (rtx result)
1562 {
1563 int size, align, regno;
1564 enum machine_mode mode;
1565 rtx reg;
1566 rtx call_fusage = 0;
1567
1568 result = convert_memory_address (Pmode, result);
1569
1570 apply_result_size ();
1571 result = gen_rtx_MEM (BLKmode, result);
1572
1573 #ifdef HAVE_untyped_return
1574 if (HAVE_untyped_return)
1575 {
1576 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1577 emit_barrier ();
1578 return;
1579 }
1580 #endif
1581
1582 /* Restore the return value and note that each value is used. */
1583 size = 0;
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if ((mode = apply_result_mode[regno]) != VOIDmode)
1586 {
1587 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1588 if (size % align != 0)
1589 size = CEIL (size, align) * align;
1590 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1591 emit_move_insn (reg, adjust_address (result, mode, size));
1592
1593 push_to_sequence (call_fusage);
1594 emit_insn (gen_rtx_USE (VOIDmode, reg));
1595 call_fusage = get_insns ();
1596 end_sequence ();
1597 size += GET_MODE_SIZE (mode);
1598 }
1599
1600 /* Put the USE insns before the return. */
1601 emit_insn (call_fusage);
1602
1603 /* Return whatever values was restored by jumping directly to the end
1604 of the function. */
1605 expand_naked_return ();
1606 }
1607
1608 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1609
1610 static enum type_class
1611 type_to_class (tree type)
1612 {
1613 switch (TREE_CODE (type))
1614 {
1615 case VOID_TYPE: return void_type_class;
1616 case INTEGER_TYPE: return integer_type_class;
1617 case ENUMERAL_TYPE: return enumeral_type_class;
1618 case BOOLEAN_TYPE: return boolean_type_class;
1619 case POINTER_TYPE: return pointer_type_class;
1620 case REFERENCE_TYPE: return reference_type_class;
1621 case OFFSET_TYPE: return offset_type_class;
1622 case REAL_TYPE: return real_type_class;
1623 case COMPLEX_TYPE: return complex_type_class;
1624 case FUNCTION_TYPE: return function_type_class;
1625 case METHOD_TYPE: return method_type_class;
1626 case RECORD_TYPE: return record_type_class;
1627 case UNION_TYPE:
1628 case QUAL_UNION_TYPE: return union_type_class;
1629 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1630 ? string_type_class : array_type_class);
1631 case LANG_TYPE: return lang_type_class;
1632 default: return no_type_class;
1633 }
1634 }
1635
1636 /* Expand a call EXP to __builtin_classify_type. */
1637
1638 static rtx
1639 expand_builtin_classify_type (tree exp)
1640 {
1641 if (call_expr_nargs (exp))
1642 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1643 return GEN_INT (no_type_class);
1644 }
1645
1646 /* This helper macro, meant to be used in mathfn_built_in below,
1647 determines which among a set of three builtin math functions is
1648 appropriate for a given type mode. The `F' and `L' cases are
1649 automatically generated from the `double' case. */
1650 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1651 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1652 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1653 fcodel = BUILT_IN_MATHFN##L ; break;
1654 /* Similar to above, but appends _R after any F/L suffix. */
1655 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1656 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1657 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1658 fcodel = BUILT_IN_MATHFN##L_R ; break;
1659
1660 /* Return mathematic function equivalent to FN but operating directly
1661 on TYPE, if available. If we can't do the conversion, return zero. */
1662 tree
1663 mathfn_built_in (tree type, enum built_in_function fn)
1664 {
1665 enum built_in_function fcode, fcodef, fcodel;
1666
1667 switch (fn)
1668 {
1669 CASE_MATHFN (BUILT_IN_ACOS)
1670 CASE_MATHFN (BUILT_IN_ACOSH)
1671 CASE_MATHFN (BUILT_IN_ASIN)
1672 CASE_MATHFN (BUILT_IN_ASINH)
1673 CASE_MATHFN (BUILT_IN_ATAN)
1674 CASE_MATHFN (BUILT_IN_ATAN2)
1675 CASE_MATHFN (BUILT_IN_ATANH)
1676 CASE_MATHFN (BUILT_IN_CBRT)
1677 CASE_MATHFN (BUILT_IN_CEIL)
1678 CASE_MATHFN (BUILT_IN_CEXPI)
1679 CASE_MATHFN (BUILT_IN_COPYSIGN)
1680 CASE_MATHFN (BUILT_IN_COS)
1681 CASE_MATHFN (BUILT_IN_COSH)
1682 CASE_MATHFN (BUILT_IN_DREM)
1683 CASE_MATHFN (BUILT_IN_ERF)
1684 CASE_MATHFN (BUILT_IN_ERFC)
1685 CASE_MATHFN (BUILT_IN_EXP)
1686 CASE_MATHFN (BUILT_IN_EXP10)
1687 CASE_MATHFN (BUILT_IN_EXP2)
1688 CASE_MATHFN (BUILT_IN_EXPM1)
1689 CASE_MATHFN (BUILT_IN_FABS)
1690 CASE_MATHFN (BUILT_IN_FDIM)
1691 CASE_MATHFN (BUILT_IN_FLOOR)
1692 CASE_MATHFN (BUILT_IN_FMA)
1693 CASE_MATHFN (BUILT_IN_FMAX)
1694 CASE_MATHFN (BUILT_IN_FMIN)
1695 CASE_MATHFN (BUILT_IN_FMOD)
1696 CASE_MATHFN (BUILT_IN_FREXP)
1697 CASE_MATHFN (BUILT_IN_GAMMA)
1698 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1699 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1700 CASE_MATHFN (BUILT_IN_HYPOT)
1701 CASE_MATHFN (BUILT_IN_ILOGB)
1702 CASE_MATHFN (BUILT_IN_INF)
1703 CASE_MATHFN (BUILT_IN_ISINF)
1704 CASE_MATHFN (BUILT_IN_J0)
1705 CASE_MATHFN (BUILT_IN_J1)
1706 CASE_MATHFN (BUILT_IN_JN)
1707 CASE_MATHFN (BUILT_IN_LCEIL)
1708 CASE_MATHFN (BUILT_IN_LDEXP)
1709 CASE_MATHFN (BUILT_IN_LFLOOR)
1710 CASE_MATHFN (BUILT_IN_LGAMMA)
1711 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1712 CASE_MATHFN (BUILT_IN_LLCEIL)
1713 CASE_MATHFN (BUILT_IN_LLFLOOR)
1714 CASE_MATHFN (BUILT_IN_LLRINT)
1715 CASE_MATHFN (BUILT_IN_LLROUND)
1716 CASE_MATHFN (BUILT_IN_LOG)
1717 CASE_MATHFN (BUILT_IN_LOG10)
1718 CASE_MATHFN (BUILT_IN_LOG1P)
1719 CASE_MATHFN (BUILT_IN_LOG2)
1720 CASE_MATHFN (BUILT_IN_LOGB)
1721 CASE_MATHFN (BUILT_IN_LRINT)
1722 CASE_MATHFN (BUILT_IN_LROUND)
1723 CASE_MATHFN (BUILT_IN_MODF)
1724 CASE_MATHFN (BUILT_IN_NAN)
1725 CASE_MATHFN (BUILT_IN_NANS)
1726 CASE_MATHFN (BUILT_IN_NEARBYINT)
1727 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1728 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1729 CASE_MATHFN (BUILT_IN_POW)
1730 CASE_MATHFN (BUILT_IN_POWI)
1731 CASE_MATHFN (BUILT_IN_POW10)
1732 CASE_MATHFN (BUILT_IN_REMAINDER)
1733 CASE_MATHFN (BUILT_IN_REMQUO)
1734 CASE_MATHFN (BUILT_IN_RINT)
1735 CASE_MATHFN (BUILT_IN_ROUND)
1736 CASE_MATHFN (BUILT_IN_SCALB)
1737 CASE_MATHFN (BUILT_IN_SCALBLN)
1738 CASE_MATHFN (BUILT_IN_SCALBN)
1739 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1740 CASE_MATHFN (BUILT_IN_SIN)
1741 CASE_MATHFN (BUILT_IN_SINCOS)
1742 CASE_MATHFN (BUILT_IN_SINH)
1743 CASE_MATHFN (BUILT_IN_SQRT)
1744 CASE_MATHFN (BUILT_IN_TAN)
1745 CASE_MATHFN (BUILT_IN_TANH)
1746 CASE_MATHFN (BUILT_IN_TGAMMA)
1747 CASE_MATHFN (BUILT_IN_TRUNC)
1748 CASE_MATHFN (BUILT_IN_Y0)
1749 CASE_MATHFN (BUILT_IN_Y1)
1750 CASE_MATHFN (BUILT_IN_YN)
1751
1752 default:
1753 return NULL_TREE;
1754 }
1755
1756 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1757 return implicit_built_in_decls[fcode];
1758 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1759 return implicit_built_in_decls[fcodef];
1760 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1761 return implicit_built_in_decls[fcodel];
1762 else
1763 return NULL_TREE;
1764 }
1765
1766 /* If errno must be maintained, expand the RTL to check if the result,
1767 TARGET, of a built-in function call, EXP, is NaN, and if so set
1768 errno to EDOM. */
1769
1770 static void
1771 expand_errno_check (tree exp, rtx target)
1772 {
1773 rtx lab = gen_label_rtx ();
1774
1775 /* Test the result; if it is NaN, set errno=EDOM because
1776 the argument was not in the domain. */
1777 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1778 0, lab);
1779
1780 #ifdef TARGET_EDOM
1781 /* If this built-in doesn't throw an exception, set errno directly. */
1782 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1783 {
1784 #ifdef GEN_ERRNO_RTX
1785 rtx errno_rtx = GEN_ERRNO_RTX;
1786 #else
1787 rtx errno_rtx
1788 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1789 #endif
1790 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1791 emit_label (lab);
1792 return;
1793 }
1794 #endif
1795
1796 /* We can't set errno=EDOM directly; let the library call do it.
1797 Pop the arguments right away in case the call gets deleted. */
1798 NO_DEFER_POP;
1799 expand_call (exp, target, 0);
1800 OK_DEFER_POP;
1801 emit_label (lab);
1802 }
1803
1804 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1805 Return NULL_RTX if a normal call should be emitted rather than expanding
1806 the function in-line. EXP is the expression that is a call to the builtin
1807 function; if convenient, the result should be placed in TARGET.
1808 SUBTARGET may be used as the target for computing one of EXP's operands. */
1809
1810 static rtx
1811 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1812 {
1813 optab builtin_optab;
1814 rtx op0, insns, before_call;
1815 tree fndecl = get_callee_fndecl (exp);
1816 enum machine_mode mode;
1817 bool errno_set = false;
1818 tree arg, narg;
1819
1820 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1821 return NULL_RTX;
1822
1823 arg = CALL_EXPR_ARG (exp, 0);
1824
1825 switch (DECL_FUNCTION_CODE (fndecl))
1826 {
1827 CASE_FLT_FN (BUILT_IN_SQRT):
1828 errno_set = ! tree_expr_nonnegative_p (arg);
1829 builtin_optab = sqrt_optab;
1830 break;
1831 CASE_FLT_FN (BUILT_IN_EXP):
1832 errno_set = true; builtin_optab = exp_optab; break;
1833 CASE_FLT_FN (BUILT_IN_EXP10):
1834 CASE_FLT_FN (BUILT_IN_POW10):
1835 errno_set = true; builtin_optab = exp10_optab; break;
1836 CASE_FLT_FN (BUILT_IN_EXP2):
1837 errno_set = true; builtin_optab = exp2_optab; break;
1838 CASE_FLT_FN (BUILT_IN_EXPM1):
1839 errno_set = true; builtin_optab = expm1_optab; break;
1840 CASE_FLT_FN (BUILT_IN_LOGB):
1841 errno_set = true; builtin_optab = logb_optab; break;
1842 CASE_FLT_FN (BUILT_IN_LOG):
1843 errno_set = true; builtin_optab = log_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOG10):
1845 errno_set = true; builtin_optab = log10_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG2):
1847 errno_set = true; builtin_optab = log2_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG1P):
1849 errno_set = true; builtin_optab = log1p_optab; break;
1850 CASE_FLT_FN (BUILT_IN_ASIN):
1851 builtin_optab = asin_optab; break;
1852 CASE_FLT_FN (BUILT_IN_ACOS):
1853 builtin_optab = acos_optab; break;
1854 CASE_FLT_FN (BUILT_IN_TAN):
1855 builtin_optab = tan_optab; break;
1856 CASE_FLT_FN (BUILT_IN_ATAN):
1857 builtin_optab = atan_optab; break;
1858 CASE_FLT_FN (BUILT_IN_FLOOR):
1859 builtin_optab = floor_optab; break;
1860 CASE_FLT_FN (BUILT_IN_CEIL):
1861 builtin_optab = ceil_optab; break;
1862 CASE_FLT_FN (BUILT_IN_TRUNC):
1863 builtin_optab = btrunc_optab; break;
1864 CASE_FLT_FN (BUILT_IN_ROUND):
1865 builtin_optab = round_optab; break;
1866 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1867 builtin_optab = nearbyint_optab;
1868 if (flag_trapping_math)
1869 break;
1870 /* Else fallthrough and expand as rint. */
1871 CASE_FLT_FN (BUILT_IN_RINT):
1872 builtin_optab = rint_optab; break;
1873 default:
1874 gcc_unreachable ();
1875 }
1876
1877 /* Make a suitable register to place result in. */
1878 mode = TYPE_MODE (TREE_TYPE (exp));
1879
1880 if (! flag_errno_math || ! HONOR_NANS (mode))
1881 errno_set = false;
1882
1883 /* Before working hard, check whether the instruction is available. */
1884 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1885 {
1886 target = gen_reg_rtx (mode);
1887
1888 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1889 need to expand the argument again. This way, we will not perform
1890 side-effects more the once. */
1891 narg = builtin_save_expr (arg);
1892 if (narg != arg)
1893 {
1894 arg = narg;
1895 exp = build_call_expr (fndecl, 1, arg);
1896 }
1897
1898 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1899
1900 start_sequence ();
1901
1902 /* Compute into TARGET.
1903 Set TARGET to wherever the result comes back. */
1904 target = expand_unop (mode, builtin_optab, op0, target, 0);
1905
1906 if (target != 0)
1907 {
1908 if (errno_set)
1909 expand_errno_check (exp, target);
1910
1911 /* Output the entire sequence. */
1912 insns = get_insns ();
1913 end_sequence ();
1914 emit_insn (insns);
1915 return target;
1916 }
1917
1918 /* If we were unable to expand via the builtin, stop the sequence
1919 (without outputting the insns) and call to the library function
1920 with the stabilized argument list. */
1921 end_sequence ();
1922 }
1923
1924 before_call = get_last_insn ();
1925
1926 target = expand_call (exp, target, target == const0_rtx);
1927
1928 /* If this is a sqrt operation and we don't care about errno, try to
1929 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1930 This allows the semantics of the libcall to be visible to the RTL
1931 optimizers. */
1932 if (builtin_optab == sqrt_optab && !errno_set)
1933 {
1934 /* Search backwards through the insns emitted by expand_call looking
1935 for the instruction with the REG_RETVAL note. */
1936 rtx last = get_last_insn ();
1937 while (last != before_call)
1938 {
1939 if (find_reg_note (last, REG_RETVAL, NULL))
1940 {
1941 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1942 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1943 two elements, i.e. symbol_ref(sqrt) and the operand. */
1944 if (note
1945 && GET_CODE (note) == EXPR_LIST
1946 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1947 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1948 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1949 {
1950 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1951 /* Check operand is a register with expected mode. */
1952 if (operand
1953 && REG_P (operand)
1954 && GET_MODE (operand) == mode)
1955 {
1956 /* Replace the REG_EQUAL note with a SQRT rtx. */
1957 rtx equiv = gen_rtx_SQRT (mode, operand);
1958 set_unique_reg_note (last, REG_EQUAL, equiv);
1959 }
1960 }
1961 break;
1962 }
1963 last = PREV_INSN (last);
1964 }
1965 }
1966
1967 return target;
1968 }
1969
1970 /* Expand a call to the builtin binary math functions (pow and atan2).
1971 Return NULL_RTX if a normal call should be emitted rather than expanding the
1972 function in-line. EXP is the expression that is a call to the builtin
1973 function; if convenient, the result should be placed in TARGET.
1974 SUBTARGET may be used as the target for computing one of EXP's
1975 operands. */
1976
1977 static rtx
1978 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1979 {
1980 optab builtin_optab;
1981 rtx op0, op1, insns;
1982 int op1_type = REAL_TYPE;
1983 tree fndecl = get_callee_fndecl (exp);
1984 tree arg0, arg1, narg;
1985 enum machine_mode mode;
1986 bool errno_set = true;
1987 bool stable = true;
1988
1989 switch (DECL_FUNCTION_CODE (fndecl))
1990 {
1991 CASE_FLT_FN (BUILT_IN_SCALBN):
1992 CASE_FLT_FN (BUILT_IN_SCALBLN):
1993 CASE_FLT_FN (BUILT_IN_LDEXP):
1994 op1_type = INTEGER_TYPE;
1995 default:
1996 break;
1997 }
1998
1999 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2000 return NULL_RTX;
2001
2002 arg0 = CALL_EXPR_ARG (exp, 0);
2003 arg1 = CALL_EXPR_ARG (exp, 1);
2004
2005 switch (DECL_FUNCTION_CODE (fndecl))
2006 {
2007 CASE_FLT_FN (BUILT_IN_POW):
2008 builtin_optab = pow_optab; break;
2009 CASE_FLT_FN (BUILT_IN_ATAN2):
2010 builtin_optab = atan2_optab; break;
2011 CASE_FLT_FN (BUILT_IN_SCALB):
2012 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 return 0;
2014 builtin_optab = scalb_optab; break;
2015 CASE_FLT_FN (BUILT_IN_SCALBN):
2016 CASE_FLT_FN (BUILT_IN_SCALBLN):
2017 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2018 return 0;
2019 /* Fall through... */
2020 CASE_FLT_FN (BUILT_IN_LDEXP):
2021 builtin_optab = ldexp_optab; break;
2022 CASE_FLT_FN (BUILT_IN_FMOD):
2023 builtin_optab = fmod_optab; break;
2024 CASE_FLT_FN (BUILT_IN_REMAINDER):
2025 CASE_FLT_FN (BUILT_IN_DREM):
2026 builtin_optab = remainder_optab; break;
2027 default:
2028 gcc_unreachable ();
2029 }
2030
2031 /* Make a suitable register to place result in. */
2032 mode = TYPE_MODE (TREE_TYPE (exp));
2033
2034 /* Before working hard, check whether the instruction is available. */
2035 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2036 return NULL_RTX;
2037
2038 target = gen_reg_rtx (mode);
2039
2040 if (! flag_errno_math || ! HONOR_NANS (mode))
2041 errno_set = false;
2042
2043 /* Always stabilize the argument list. */
2044 narg = builtin_save_expr (arg1);
2045 if (narg != arg1)
2046 {
2047 arg1 = narg;
2048 stable = false;
2049 }
2050 narg = builtin_save_expr (arg0);
2051 if (narg != arg0)
2052 {
2053 arg0 = narg;
2054 stable = false;
2055 }
2056
2057 if (! stable)
2058 exp = build_call_expr (fndecl, 2, arg0, arg1);
2059
2060 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2061 op1 = expand_normal (arg1);
2062
2063 start_sequence ();
2064
2065 /* Compute into TARGET.
2066 Set TARGET to wherever the result comes back. */
2067 target = expand_binop (mode, builtin_optab, op0, op1,
2068 target, 0, OPTAB_DIRECT);
2069
2070 /* If we were unable to expand via the builtin, stop the sequence
2071 (without outputting the insns) and call to the library function
2072 with the stabilized argument list. */
2073 if (target == 0)
2074 {
2075 end_sequence ();
2076 return expand_call (exp, target, target == const0_rtx);
2077 }
2078
2079 if (errno_set)
2080 expand_errno_check (exp, target);
2081
2082 /* Output the entire sequence. */
2083 insns = get_insns ();
2084 end_sequence ();
2085 emit_insn (insns);
2086
2087 return target;
2088 }
2089
2090 /* Expand a call to the builtin sin and cos math functions.
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2096
2097 static rtx
2098 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2099 {
2100 optab builtin_optab;
2101 rtx op0, insns;
2102 tree fndecl = get_callee_fndecl (exp);
2103 enum machine_mode mode;
2104 tree arg, narg;
2105
2106 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2107 return NULL_RTX;
2108
2109 arg = CALL_EXPR_ARG (exp, 0);
2110
2111 switch (DECL_FUNCTION_CODE (fndecl))
2112 {
2113 CASE_FLT_FN (BUILT_IN_SIN):
2114 CASE_FLT_FN (BUILT_IN_COS):
2115 builtin_optab = sincos_optab; break;
2116 default:
2117 gcc_unreachable ();
2118 }
2119
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2122
2123 /* Check if sincos insn is available, otherwise fallback
2124 to sin or cos insn. */
2125 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2126 switch (DECL_FUNCTION_CODE (fndecl))
2127 {
2128 CASE_FLT_FN (BUILT_IN_SIN):
2129 builtin_optab = sin_optab; break;
2130 CASE_FLT_FN (BUILT_IN_COS):
2131 builtin_optab = cos_optab; break;
2132 default:
2133 gcc_unreachable ();
2134 }
2135
2136 /* Before working hard, check whether the instruction is available. */
2137 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2138 {
2139 target = gen_reg_rtx (mode);
2140
2141 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2142 need to expand the argument again. This way, we will not perform
2143 side-effects more the once. */
2144 narg = save_expr (arg);
2145 if (narg != arg)
2146 {
2147 arg = narg;
2148 exp = build_call_expr (fndecl, 1, arg);
2149 }
2150
2151 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2152
2153 start_sequence ();
2154
2155 /* Compute into TARGET.
2156 Set TARGET to wherever the result comes back. */
2157 if (builtin_optab == sincos_optab)
2158 {
2159 int result;
2160
2161 switch (DECL_FUNCTION_CODE (fndecl))
2162 {
2163 CASE_FLT_FN (BUILT_IN_SIN):
2164 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2165 break;
2166 CASE_FLT_FN (BUILT_IN_COS):
2167 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2168 break;
2169 default:
2170 gcc_unreachable ();
2171 }
2172 gcc_assert (result);
2173 }
2174 else
2175 {
2176 target = expand_unop (mode, builtin_optab, op0, target, 0);
2177 }
2178
2179 if (target != 0)
2180 {
2181 /* Output the entire sequence. */
2182 insns = get_insns ();
2183 end_sequence ();
2184 emit_insn (insns);
2185 return target;
2186 }
2187
2188 /* If we were unable to expand via the builtin, stop the sequence
2189 (without outputting the insns) and call to the library function
2190 with the stabilized argument list. */
2191 end_sequence ();
2192 }
2193
2194 target = expand_call (exp, target, target == const0_rtx);
2195
2196 return target;
2197 }
2198
2199 /* Expand a call to one of the builtin math functions that operate on
2200 floating point argument and output an integer result (ilogb, isinf,
2201 isnan, etc).
2202 Return 0 if a normal call should be emitted rather than expanding the
2203 function in-line. EXP is the expression that is a call to the builtin
2204 function; if convenient, the result should be placed in TARGET.
2205 SUBTARGET may be used as the target for computing one of EXP's operands. */
2206
2207 static rtx
2208 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2209 {
2210 optab builtin_optab;
2211 enum insn_code icode;
2212 rtx op0;
2213 tree fndecl = get_callee_fndecl (exp);
2214 enum machine_mode mode;
2215 bool errno_set = false;
2216 tree arg, narg;
2217
2218 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2219 return NULL_RTX;
2220
2221 arg = CALL_EXPR_ARG (exp, 0);
2222
2223 switch (DECL_FUNCTION_CODE (fndecl))
2224 {
2225 CASE_FLT_FN (BUILT_IN_ILOGB):
2226 errno_set = true; builtin_optab = ilogb_optab; break;
2227 CASE_FLT_FN (BUILT_IN_ISINF):
2228 builtin_optab = isinf_optab; break;
2229 default:
2230 gcc_unreachable ();
2231 }
2232
2233 /* There's no easy way to detect the case we need to set EDOM. */
2234 if (flag_errno_math && errno_set)
2235 return NULL_RTX;
2236
2237 /* Optab mode depends on the mode of the input argument. */
2238 mode = TYPE_MODE (TREE_TYPE (arg));
2239
2240 icode = builtin_optab->handlers[(int) mode].insn_code;
2241
2242 /* Before working hard, check whether the instruction is available. */
2243 if (icode != CODE_FOR_nothing)
2244 {
2245 /* Make a suitable register to place result in. */
2246 if (!target
2247 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2248 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2249
2250 gcc_assert (insn_data[icode].operand[0].predicate
2251 (target, GET_MODE (target)));
2252
2253 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2254 need to expand the argument again. This way, we will not perform
2255 side-effects more the once. */
2256 narg = builtin_save_expr (arg);
2257 if (narg != arg)
2258 {
2259 arg = narg;
2260 exp = build_call_expr (fndecl, 1, arg);
2261 }
2262
2263 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2264
2265 if (mode != GET_MODE (op0))
2266 op0 = convert_to_mode (mode, op0, 0);
2267
2268 /* Compute into TARGET.
2269 Set TARGET to wherever the result comes back. */
2270 emit_unop_insn (icode, target, op0, UNKNOWN);
2271 return target;
2272 }
2273
2274 target = expand_call (exp, target, target == const0_rtx);
2275
2276 return target;
2277 }
2278
2279 /* Expand a call to the builtin sincos math function.
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2282 function. */
2283
2284 static rtx
2285 expand_builtin_sincos (tree exp)
2286 {
2287 rtx op0, op1, op2, target1, target2;
2288 enum machine_mode mode;
2289 tree arg, sinp, cosp;
2290 int result;
2291
2292 if (!validate_arglist (exp, REAL_TYPE,
2293 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2294 return NULL_RTX;
2295
2296 arg = CALL_EXPR_ARG (exp, 0);
2297 sinp = CALL_EXPR_ARG (exp, 1);
2298 cosp = CALL_EXPR_ARG (exp, 2);
2299
2300 /* Make a suitable register to place result in. */
2301 mode = TYPE_MODE (TREE_TYPE (arg));
2302
2303 /* Check if sincos insn is available, otherwise emit the call. */
2304 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2305 return NULL_RTX;
2306
2307 target1 = gen_reg_rtx (mode);
2308 target2 = gen_reg_rtx (mode);
2309
2310 op0 = expand_normal (arg);
2311 op1 = expand_normal (build_fold_indirect_ref (sinp));
2312 op2 = expand_normal (build_fold_indirect_ref (cosp));
2313
2314 /* Compute into target1 and target2.
2315 Set TARGET to wherever the result comes back. */
2316 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2317 gcc_assert (result);
2318
2319 /* Move target1 and target2 to the memory locations indicated
2320 by op1 and op2. */
2321 emit_move_insn (op1, target1);
2322 emit_move_insn (op2, target2);
2323
2324 return const0_rtx;
2325 }
2326
2327 /* Expand a call to the internal cexpi builtin to the sincos math function.
2328 EXP is the expression that is a call to the builtin function; if convenient,
2329 the result should be placed in TARGET. SUBTARGET may be used as the target
2330 for computing one of EXP's operands. */
2331
2332 static rtx
2333 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2334 {
2335 tree fndecl = get_callee_fndecl (exp);
2336 tree arg, type;
2337 enum machine_mode mode;
2338 rtx op0, op1, op2;
2339
2340 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2341 return NULL_RTX;
2342
2343 arg = CALL_EXPR_ARG (exp, 0);
2344 type = TREE_TYPE (arg);
2345 mode = TYPE_MODE (TREE_TYPE (arg));
2346
2347 /* Try expanding via a sincos optab, fall back to emitting a libcall
2348 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2349 is only generated from sincos, cexp or if we have either of them. */
2350 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2351 {
2352 op1 = gen_reg_rtx (mode);
2353 op2 = gen_reg_rtx (mode);
2354
2355 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2356
2357 /* Compute into op1 and op2. */
2358 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2359 }
2360 else if (TARGET_HAS_SINCOS)
2361 {
2362 tree call, fn = NULL_TREE;
2363 tree top1, top2;
2364 rtx op1a, op2a;
2365
2366 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2367 fn = built_in_decls[BUILT_IN_SINCOSF];
2368 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2369 fn = built_in_decls[BUILT_IN_SINCOS];
2370 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2371 fn = built_in_decls[BUILT_IN_SINCOSL];
2372 else
2373 gcc_unreachable ();
2374
2375 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2376 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2377 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2378 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2379 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2380 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2381
2382 /* Make sure not to fold the sincos call again. */
2383 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2384 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2385 call, 3, arg, top1, top2));
2386 }
2387 else
2388 {
2389 tree call, fn = NULL_TREE, narg;
2390 tree ctype = build_complex_type (type);
2391
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_CEXPF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_CEXP];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_CEXPL];
2398 else
2399 gcc_unreachable ();
2400
2401 /* If we don't have a decl for cexp create one. This is the
2402 friendliest fallback if the user calls __builtin_cexpi
2403 without full target C99 function support. */
2404 if (fn == NULL_TREE)
2405 {
2406 tree fntype;
2407 const char *name = NULL;
2408
2409 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2410 name = "cexpf";
2411 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2412 name = "cexp";
2413 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2414 name = "cexpl";
2415
2416 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2417 fn = build_fn_decl (name, fntype);
2418 }
2419
2420 narg = fold_build2 (COMPLEX_EXPR, ctype,
2421 build_real (type, dconst0), arg);
2422
2423 /* Make sure not to fold the cexp call again. */
2424 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2425 return expand_expr (build_call_nary (ctype, call, 1, narg),
2426 target, VOIDmode, EXPAND_NORMAL);
2427 }
2428
2429 /* Now build the proper return type. */
2430 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2431 make_tree (TREE_TYPE (arg), op2),
2432 make_tree (TREE_TYPE (arg), op1)),
2433 target, VOIDmode, EXPAND_NORMAL);
2434 }
2435
2436 /* Expand a call to one of the builtin rounding functions gcc defines
2437 as an extension (lfloor and lceil). As these are gcc extensions we
2438 do not need to worry about setting errno to EDOM.
2439 If expanding via optab fails, lower expression to (int)(floor(x)).
2440 EXP is the expression that is a call to the builtin function;
2441 if convenient, the result should be placed in TARGET. SUBTARGET may
2442 be used as the target for computing one of EXP's operands. */
2443
2444 static rtx
2445 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2446 {
2447 convert_optab builtin_optab;
2448 rtx op0, insns, tmp;
2449 tree fndecl = get_callee_fndecl (exp);
2450 enum built_in_function fallback_fn;
2451 tree fallback_fndecl;
2452 enum machine_mode mode;
2453 tree arg, narg;
2454
2455 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2456 gcc_unreachable ();
2457
2458 arg = CALL_EXPR_ARG (exp, 0);
2459
2460 switch (DECL_FUNCTION_CODE (fndecl))
2461 {
2462 CASE_FLT_FN (BUILT_IN_LCEIL):
2463 CASE_FLT_FN (BUILT_IN_LLCEIL):
2464 builtin_optab = lceil_optab;
2465 fallback_fn = BUILT_IN_CEIL;
2466 break;
2467
2468 CASE_FLT_FN (BUILT_IN_LFLOOR):
2469 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2470 builtin_optab = lfloor_optab;
2471 fallback_fn = BUILT_IN_FLOOR;
2472 break;
2473
2474 default:
2475 gcc_unreachable ();
2476 }
2477
2478 /* Make a suitable register to place result in. */
2479 mode = TYPE_MODE (TREE_TYPE (exp));
2480
2481 target = gen_reg_rtx (mode);
2482
2483 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2484 need to expand the argument again. This way, we will not perform
2485 side-effects more the once. */
2486 narg = builtin_save_expr (arg);
2487 if (narg != arg)
2488 {
2489 arg = narg;
2490 exp = build_call_expr (fndecl, 1, arg);
2491 }
2492
2493 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2494
2495 start_sequence ();
2496
2497 /* Compute into TARGET. */
2498 if (expand_sfix_optab (target, op0, builtin_optab))
2499 {
2500 /* Output the entire sequence. */
2501 insns = get_insns ();
2502 end_sequence ();
2503 emit_insn (insns);
2504 return target;
2505 }
2506
2507 /* If we were unable to expand via the builtin, stop the sequence
2508 (without outputting the insns). */
2509 end_sequence ();
2510
2511 /* Fall back to floating point rounding optab. */
2512 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2513
2514 /* For non-C99 targets we may end up without a fallback fndecl here
2515 if the user called __builtin_lfloor directly. In this case emit
2516 a call to the floor/ceil variants nevertheless. This should result
2517 in the best user experience for not full C99 targets. */
2518 if (fallback_fndecl == NULL_TREE)
2519 {
2520 tree fntype;
2521 const char *name = NULL;
2522
2523 switch (DECL_FUNCTION_CODE (fndecl))
2524 {
2525 case BUILT_IN_LCEIL:
2526 case BUILT_IN_LLCEIL:
2527 name = "ceil";
2528 break;
2529 case BUILT_IN_LCEILF:
2530 case BUILT_IN_LLCEILF:
2531 name = "ceilf";
2532 break;
2533 case BUILT_IN_LCEILL:
2534 case BUILT_IN_LLCEILL:
2535 name = "ceill";
2536 break;
2537 case BUILT_IN_LFLOOR:
2538 case BUILT_IN_LLFLOOR:
2539 name = "floor";
2540 break;
2541 case BUILT_IN_LFLOORF:
2542 case BUILT_IN_LLFLOORF:
2543 name = "floorf";
2544 break;
2545 case BUILT_IN_LFLOORL:
2546 case BUILT_IN_LLFLOORL:
2547 name = "floorl";
2548 break;
2549 default:
2550 gcc_unreachable ();
2551 }
2552
2553 fntype = build_function_type_list (TREE_TYPE (arg),
2554 TREE_TYPE (arg), NULL_TREE);
2555 fallback_fndecl = build_fn_decl (name, fntype);
2556 }
2557
2558 exp = build_call_expr (fallback_fndecl, 1, arg);
2559
2560 tmp = expand_normal (exp);
2561
2562 /* Truncate the result of floating point optab to integer
2563 via expand_fix (). */
2564 target = gen_reg_rtx (mode);
2565 expand_fix (target, tmp, 0);
2566
2567 return target;
2568 }
2569
2570 /* Expand a call to one of the builtin math functions doing integer
2571 conversion (lrint).
2572 Return 0 if a normal call should be emitted rather than expanding the
2573 function in-line. EXP is the expression that is a call to the builtin
2574 function; if convenient, the result should be placed in TARGET.
2575 SUBTARGET may be used as the target for computing one of EXP's operands. */
2576
2577 static rtx
2578 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2579 {
2580 convert_optab builtin_optab;
2581 rtx op0, insns;
2582 tree fndecl = get_callee_fndecl (exp);
2583 tree arg, narg;
2584 enum machine_mode mode;
2585
2586 /* There's no easy way to detect the case we need to set EDOM. */
2587 if (flag_errno_math)
2588 return NULL_RTX;
2589
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 gcc_unreachable ();
2592
2593 arg = CALL_EXPR_ARG (exp, 0);
2594
2595 switch (DECL_FUNCTION_CODE (fndecl))
2596 {
2597 CASE_FLT_FN (BUILT_IN_LRINT):
2598 CASE_FLT_FN (BUILT_IN_LLRINT):
2599 builtin_optab = lrint_optab; break;
2600 CASE_FLT_FN (BUILT_IN_LROUND):
2601 CASE_FLT_FN (BUILT_IN_LLROUND):
2602 builtin_optab = lround_optab; break;
2603 default:
2604 gcc_unreachable ();
2605 }
2606
2607 /* Make a suitable register to place result in. */
2608 mode = TYPE_MODE (TREE_TYPE (exp));
2609
2610 target = gen_reg_rtx (mode);
2611
2612 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2613 need to expand the argument again. This way, we will not perform
2614 side-effects more the once. */
2615 narg = builtin_save_expr (arg);
2616 if (narg != arg)
2617 {
2618 arg = narg;
2619 exp = build_call_expr (fndecl, 1, arg);
2620 }
2621
2622 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2623
2624 start_sequence ();
2625
2626 if (expand_sfix_optab (target, op0, builtin_optab))
2627 {
2628 /* Output the entire sequence. */
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insn (insns);
2632 return target;
2633 }
2634
2635 /* If we were unable to expand via the builtin, stop the sequence
2636 (without outputting the insns) and call to the library function
2637 with the stabilized argument list. */
2638 end_sequence ();
2639
2640 target = expand_call (exp, target, target == const0_rtx);
2641
2642 return target;
2643 }
2644
2645 /* To evaluate powi(x,n), the floating point value x raised to the
2646 constant integer exponent n, we use a hybrid algorithm that
2647 combines the "window method" with look-up tables. For an
2648 introduction to exponentiation algorithms and "addition chains",
2649 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2650 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2651 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2652 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2653
2654 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2655 multiplications to inline before calling the system library's pow
2656 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2657 so this default never requires calling pow, powf or powl. */
2658
2659 #ifndef POWI_MAX_MULTS
2660 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2661 #endif
2662
2663 /* The size of the "optimal power tree" lookup table. All
2664 exponents less than this value are simply looked up in the
2665 powi_table below. This threshold is also used to size the
2666 cache of pseudo registers that hold intermediate results. */
2667 #define POWI_TABLE_SIZE 256
2668
2669 /* The size, in bits of the window, used in the "window method"
2670 exponentiation algorithm. This is equivalent to a radix of
2671 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2672 #define POWI_WINDOW_SIZE 3
2673
2674 /* The following table is an efficient representation of an
2675 "optimal power tree". For each value, i, the corresponding
2676 value, j, in the table states than an optimal evaluation
2677 sequence for calculating pow(x,i) can be found by evaluating
2678 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2679 100 integers is given in Knuth's "Seminumerical algorithms". */
2680
2681 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2682 {
2683 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2684 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2685 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2686 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2687 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2688 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2689 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2690 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2691 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2692 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2693 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2694 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2695 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2696 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2697 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2698 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2699 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2700 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2701 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2702 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2703 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2704 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2705 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2706 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2707 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2708 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2709 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2710 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2711 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2712 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2713 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2714 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2715 };
2716
2717
2718 /* Return the number of multiplications required to calculate
2719 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2720 subroutine of powi_cost. CACHE is an array indicating
2721 which exponents have already been calculated. */
2722
2723 static int
2724 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2725 {
2726 /* If we've already calculated this exponent, then this evaluation
2727 doesn't require any additional multiplications. */
2728 if (cache[n])
2729 return 0;
2730
2731 cache[n] = true;
2732 return powi_lookup_cost (n - powi_table[n], cache)
2733 + powi_lookup_cost (powi_table[n], cache) + 1;
2734 }
2735
2736 /* Return the number of multiplications required to calculate
2737 powi(x,n) for an arbitrary x, given the exponent N. This
2738 function needs to be kept in sync with expand_powi below. */
2739
2740 static int
2741 powi_cost (HOST_WIDE_INT n)
2742 {
2743 bool cache[POWI_TABLE_SIZE];
2744 unsigned HOST_WIDE_INT digit;
2745 unsigned HOST_WIDE_INT val;
2746 int result;
2747
2748 if (n == 0)
2749 return 0;
2750
2751 /* Ignore the reciprocal when calculating the cost. */
2752 val = (n < 0) ? -n : n;
2753
2754 /* Initialize the exponent cache. */
2755 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2756 cache[1] = true;
2757
2758 result = 0;
2759
2760 while (val >= POWI_TABLE_SIZE)
2761 {
2762 if (val & 1)
2763 {
2764 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2765 result += powi_lookup_cost (digit, cache)
2766 + POWI_WINDOW_SIZE + 1;
2767 val >>= POWI_WINDOW_SIZE;
2768 }
2769 else
2770 {
2771 val >>= 1;
2772 result++;
2773 }
2774 }
2775
2776 return result + powi_lookup_cost (val, cache);
2777 }
2778
2779 /* Recursive subroutine of expand_powi. This function takes the array,
2780 CACHE, of already calculated exponents and an exponent N and returns
2781 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2782
2783 static rtx
2784 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2785 {
2786 unsigned HOST_WIDE_INT digit;
2787 rtx target, result;
2788 rtx op0, op1;
2789
2790 if (n < POWI_TABLE_SIZE)
2791 {
2792 if (cache[n])
2793 return cache[n];
2794
2795 target = gen_reg_rtx (mode);
2796 cache[n] = target;
2797
2798 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2799 op1 = expand_powi_1 (mode, powi_table[n], cache);
2800 }
2801 else if (n & 1)
2802 {
2803 target = gen_reg_rtx (mode);
2804 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2805 op0 = expand_powi_1 (mode, n - digit, cache);
2806 op1 = expand_powi_1 (mode, digit, cache);
2807 }
2808 else
2809 {
2810 target = gen_reg_rtx (mode);
2811 op0 = expand_powi_1 (mode, n >> 1, cache);
2812 op1 = op0;
2813 }
2814
2815 result = expand_mult (mode, op0, op1, target, 0);
2816 if (result != target)
2817 emit_move_insn (target, result);
2818 return target;
2819 }
2820
2821 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2822 floating point operand in mode MODE, and N is the exponent. This
2823 function needs to be kept in sync with powi_cost above. */
2824
2825 static rtx
2826 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2827 {
2828 unsigned HOST_WIDE_INT val;
2829 rtx cache[POWI_TABLE_SIZE];
2830 rtx result;
2831
2832 if (n == 0)
2833 return CONST1_RTX (mode);
2834
2835 val = (n < 0) ? -n : n;
2836
2837 memset (cache, 0, sizeof (cache));
2838 cache[1] = x;
2839
2840 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2841
2842 /* If the original exponent was negative, reciprocate the result. */
2843 if (n < 0)
2844 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2845 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2846
2847 return result;
2848 }
2849
2850 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2851 a normal call should be emitted rather than expanding the function
2852 in-line. EXP is the expression that is a call to the builtin
2853 function; if convenient, the result should be placed in TARGET. */
2854
2855 static rtx
2856 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2857 {
2858 tree arg0, arg1;
2859 tree fn, narg0;
2860 tree type = TREE_TYPE (exp);
2861 REAL_VALUE_TYPE cint, c, c2;
2862 HOST_WIDE_INT n;
2863 rtx op, op2;
2864 enum machine_mode mode = TYPE_MODE (type);
2865
2866 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2867 return NULL_RTX;
2868
2869 arg0 = CALL_EXPR_ARG (exp, 0);
2870 arg1 = CALL_EXPR_ARG (exp, 1);
2871
2872 if (TREE_CODE (arg1) != REAL_CST
2873 || TREE_OVERFLOW (arg1))
2874 return expand_builtin_mathfn_2 (exp, target, subtarget);
2875
2876 /* Handle constant exponents. */
2877
2878 /* For integer valued exponents we can expand to an optimal multiplication
2879 sequence using expand_powi. */
2880 c = TREE_REAL_CST (arg1);
2881 n = real_to_integer (&c);
2882 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2883 if (real_identical (&c, &cint)
2884 && ((n >= -1 && n <= 2)
2885 || (flag_unsafe_math_optimizations
2886 && !optimize_size
2887 && powi_cost (n) <= POWI_MAX_MULTS)))
2888 {
2889 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2890 if (n != 1)
2891 {
2892 op = force_reg (mode, op);
2893 op = expand_powi (op, mode, n);
2894 }
2895 return op;
2896 }
2897
2898 narg0 = builtin_save_expr (arg0);
2899
2900 /* If the exponent is not integer valued, check if it is half of an integer.
2901 In this case we can expand to sqrt (x) * x**(n/2). */
2902 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2903 if (fn != NULL_TREE)
2904 {
2905 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2906 n = real_to_integer (&c2);
2907 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2908 if (real_identical (&c2, &cint)
2909 && ((flag_unsafe_math_optimizations
2910 && !optimize_size
2911 && powi_cost (n/2) <= POWI_MAX_MULTS)
2912 || n == 1))
2913 {
2914 tree call_expr = build_call_expr (fn, 1, narg0);
2915 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2916 if (n != 1)
2917 {
2918 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2919 op2 = force_reg (mode, op2);
2920 op2 = expand_powi (op2, mode, abs (n / 2));
2921 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2922 0, OPTAB_LIB_WIDEN);
2923 /* If the original exponent was negative, reciprocate the
2924 result. */
2925 if (n < 0)
2926 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2927 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2928 }
2929 return op;
2930 }
2931 }
2932
2933 /* Try if the exponent is a third of an integer. In this case
2934 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2935 different from pow (x, 1./3.) due to rounding and behavior
2936 with negative x we need to constrain this transformation to
2937 unsafe math and positive x or finite math. */
2938 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2939 if (fn != NULL_TREE
2940 && flag_unsafe_math_optimizations
2941 && (tree_expr_nonnegative_p (arg0)
2942 || !HONOR_NANS (mode)))
2943 {
2944 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2945 real_round (&c2, mode, &c2);
2946 n = real_to_integer (&c2);
2947 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2948 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2949 real_convert (&c2, mode, &c2);
2950 if (real_identical (&c2, &c)
2951 && ((!optimize_size
2952 && powi_cost (n/3) <= POWI_MAX_MULTS)
2953 || n == 1))
2954 {
2955 tree call_expr = build_call_expr (fn, 1,narg0);
2956 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2957 if (abs (n) % 3 == 2)
2958 op = expand_simple_binop (mode, MULT, op, op, op,
2959 0, OPTAB_LIB_WIDEN);
2960 if (n != 1)
2961 {
2962 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2963 op2 = force_reg (mode, op2);
2964 op2 = expand_powi (op2, mode, abs (n / 3));
2965 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2966 0, OPTAB_LIB_WIDEN);
2967 /* If the original exponent was negative, reciprocate the
2968 result. */
2969 if (n < 0)
2970 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2971 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2972 }
2973 return op;
2974 }
2975 }
2976
2977 /* Fall back to optab expansion. */
2978 return expand_builtin_mathfn_2 (exp, target, subtarget);
2979 }
2980
2981 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2982 a normal call should be emitted rather than expanding the function
2983 in-line. EXP is the expression that is a call to the builtin
2984 function; if convenient, the result should be placed in TARGET. */
2985
2986 static rtx
2987 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2988 {
2989 tree arg0, arg1;
2990 rtx op0, op1;
2991 enum machine_mode mode;
2992 enum machine_mode mode2;
2993
2994 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2995 return NULL_RTX;
2996
2997 arg0 = CALL_EXPR_ARG (exp, 0);
2998 arg1 = CALL_EXPR_ARG (exp, 1);
2999 mode = TYPE_MODE (TREE_TYPE (exp));
3000
3001 /* Handle constant power. */
3002
3003 if (TREE_CODE (arg1) == INTEGER_CST
3004 && !TREE_OVERFLOW (arg1))
3005 {
3006 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3007
3008 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3009 Otherwise, check the number of multiplications required. */
3010 if ((TREE_INT_CST_HIGH (arg1) == 0
3011 || TREE_INT_CST_HIGH (arg1) == -1)
3012 && ((n >= -1 && n <= 2)
3013 || (! optimize_size
3014 && powi_cost (n) <= POWI_MAX_MULTS)))
3015 {
3016 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3017 op0 = force_reg (mode, op0);
3018 return expand_powi (op0, mode, n);
3019 }
3020 }
3021
3022 /* Emit a libcall to libgcc. */
3023
3024 /* Mode of the 2nd argument must match that of an int. */
3025 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3026
3027 if (target == NULL_RTX)
3028 target = gen_reg_rtx (mode);
3029
3030 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3031 if (GET_MODE (op0) != mode)
3032 op0 = convert_to_mode (mode, op0, 0);
3033 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3034 if (GET_MODE (op1) != mode2)
3035 op1 = convert_to_mode (mode2, op1, 0);
3036
3037 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3038 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3039 op0, mode, op1, mode2);
3040
3041 return target;
3042 }
3043
3044 /* Expand expression EXP which is a call to the strlen builtin. Return
3045 NULL_RTX if we failed the caller should emit a normal call, otherwise
3046 try to get the result in TARGET, if convenient. */
3047
3048 static rtx
3049 expand_builtin_strlen (tree exp, rtx target,
3050 enum machine_mode target_mode)
3051 {
3052 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3053 return NULL_RTX;
3054 else
3055 {
3056 rtx pat;
3057 tree len;
3058 tree src = CALL_EXPR_ARG (exp, 0);
3059 rtx result, src_reg, char_rtx, before_strlen;
3060 enum machine_mode insn_mode = target_mode, char_mode;
3061 enum insn_code icode = CODE_FOR_nothing;
3062 int align;
3063
3064 /* If the length can be computed at compile-time, return it. */
3065 len = c_strlen (src, 0);
3066 if (len)
3067 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3068
3069 /* If the length can be computed at compile-time and is constant
3070 integer, but there are side-effects in src, evaluate
3071 src for side-effects, then return len.
3072 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3073 can be optimized into: i++; x = 3; */
3074 len = c_strlen (src, 1);
3075 if (len && TREE_CODE (len) == INTEGER_CST)
3076 {
3077 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3078 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3079 }
3080
3081 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3082
3083 /* If SRC is not a pointer type, don't do this operation inline. */
3084 if (align == 0)
3085 return NULL_RTX;
3086
3087 /* Bail out if we can't compute strlen in the right mode. */
3088 while (insn_mode != VOIDmode)
3089 {
3090 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3091 if (icode != CODE_FOR_nothing)
3092 break;
3093
3094 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3095 }
3096 if (insn_mode == VOIDmode)
3097 return NULL_RTX;
3098
3099 /* Make a place to write the result of the instruction. */
3100 result = target;
3101 if (! (result != 0
3102 && REG_P (result)
3103 && GET_MODE (result) == insn_mode
3104 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3105 result = gen_reg_rtx (insn_mode);
3106
3107 /* Make a place to hold the source address. We will not expand
3108 the actual source until we are sure that the expansion will
3109 not fail -- there are trees that cannot be expanded twice. */
3110 src_reg = gen_reg_rtx (Pmode);
3111
3112 /* Mark the beginning of the strlen sequence so we can emit the
3113 source operand later. */
3114 before_strlen = get_last_insn ();
3115
3116 char_rtx = const0_rtx;
3117 char_mode = insn_data[(int) icode].operand[2].mode;
3118 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3119 char_mode))
3120 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3121
3122 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3123 char_rtx, GEN_INT (align));
3124 if (! pat)
3125 return NULL_RTX;
3126 emit_insn (pat);
3127
3128 /* Now that we are assured of success, expand the source. */
3129 start_sequence ();
3130 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3131 if (pat != src_reg)
3132 emit_move_insn (src_reg, pat);
3133 pat = get_insns ();
3134 end_sequence ();
3135
3136 if (before_strlen)
3137 emit_insn_after (pat, before_strlen);
3138 else
3139 emit_insn_before (pat, get_insns ());
3140
3141 /* Return the value in the proper mode for this function. */
3142 if (GET_MODE (result) == target_mode)
3143 target = result;
3144 else if (target != 0)
3145 convert_move (target, result, 0);
3146 else
3147 target = convert_to_mode (target_mode, result, 0);
3148
3149 return target;
3150 }
3151 }
3152
3153 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3154 caller should emit a normal call, otherwise try to get the result
3155 in TARGET, if convenient (and in mode MODE if that's convenient). */
3156
3157 static rtx
3158 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3159 {
3160 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3161 {
3162 tree type = TREE_TYPE (exp);
3163 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3164 CALL_EXPR_ARG (exp, 1), type);
3165 if (result)
3166 return expand_expr (result, target, mode, EXPAND_NORMAL);
3167 }
3168 return NULL_RTX;
3169 }
3170
3171 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3172 caller should emit a normal call, otherwise try to get the result
3173 in TARGET, if convenient (and in mode MODE if that's convenient). */
3174
3175 static rtx
3176 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3177 {
3178 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3179 {
3180 tree type = TREE_TYPE (exp);
3181 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3182 CALL_EXPR_ARG (exp, 1), type);
3183 if (result)
3184 return expand_expr (result, target, mode, EXPAND_NORMAL);
3185
3186 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3187 }
3188 return NULL_RTX;
3189 }
3190
3191 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3192 caller should emit a normal call, otherwise try to get the result
3193 in TARGET, if convenient (and in mode MODE if that's convenient). */
3194
3195 static rtx
3196 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3197 {
3198 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3199 {
3200 tree type = TREE_TYPE (exp);
3201 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3202 CALL_EXPR_ARG (exp, 1), type);
3203 if (result)
3204 return expand_expr (result, target, mode, EXPAND_NORMAL);
3205 }
3206 return NULL_RTX;
3207 }
3208
3209 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3210 caller should emit a normal call, otherwise try to get the result
3211 in TARGET, if convenient (and in mode MODE if that's convenient). */
3212
3213 static rtx
3214 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3215 {
3216 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3217 {
3218 tree type = TREE_TYPE (exp);
3219 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3220 CALL_EXPR_ARG (exp, 1), type);
3221 if (result)
3222 return expand_expr (result, target, mode, EXPAND_NORMAL);
3223 }
3224 return NULL_RTX;
3225 }
3226
3227 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3228 bytes from constant string DATA + OFFSET and return it as target
3229 constant. */
3230
3231 static rtx
3232 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3233 enum machine_mode mode)
3234 {
3235 const char *str = (const char *) data;
3236
3237 gcc_assert (offset >= 0
3238 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3239 <= strlen (str) + 1));
3240
3241 return c_readstr (str + offset, mode);
3242 }
3243
3244 /* Expand a call EXP to the memcpy builtin.
3245 Return NULL_RTX if we failed, the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3248
3249 static rtx
3250 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3251 {
3252 tree fndecl = get_callee_fndecl (exp);
3253
3254 if (!validate_arglist (exp,
3255 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3256 return NULL_RTX;
3257 else
3258 {
3259 tree dest = CALL_EXPR_ARG (exp, 0);
3260 tree src = CALL_EXPR_ARG (exp, 1);
3261 tree len = CALL_EXPR_ARG (exp, 2);
3262 const char *src_str;
3263 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3264 unsigned int dest_align
3265 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3266 rtx dest_mem, src_mem, dest_addr, len_rtx;
3267 tree result = fold_builtin_memory_op (dest, src, len,
3268 TREE_TYPE (TREE_TYPE (fndecl)),
3269 false, /*endp=*/0);
3270 HOST_WIDE_INT expected_size = -1;
3271 unsigned int expected_align = 0;
3272
3273 if (result)
3274 {
3275 while (TREE_CODE (result) == COMPOUND_EXPR)
3276 {
3277 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3278 EXPAND_NORMAL);
3279 result = TREE_OPERAND (result, 1);
3280 }
3281 return expand_expr (result, target, mode, EXPAND_NORMAL);
3282 }
3283
3284 /* If DEST is not a pointer type, call the normal function. */
3285 if (dest_align == 0)
3286 return NULL_RTX;
3287
3288 /* If either SRC is not a pointer type, don't do this
3289 operation in-line. */
3290 if (src_align == 0)
3291 return NULL_RTX;
3292
3293 stringop_block_profile (exp, &expected_align, &expected_size);
3294 if (expected_align < dest_align)
3295 expected_align = dest_align;
3296 dest_mem = get_memory_rtx (dest, len);
3297 set_mem_align (dest_mem, dest_align);
3298 len_rtx = expand_normal (len);
3299 src_str = c_getstr (src);
3300
3301 /* If SRC is a string constant and block move would be done
3302 by pieces, we can avoid loading the string from memory
3303 and only stored the computed constants. */
3304 if (src_str
3305 && GET_CODE (len_rtx) == CONST_INT
3306 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3307 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3308 (void *) src_str, dest_align))
3309 {
3310 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3311 builtin_memcpy_read_str,
3312 (void *) src_str, dest_align, 0);
3313 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3314 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3315 return dest_mem;
3316 }
3317
3318 src_mem = get_memory_rtx (src, len);
3319 set_mem_align (src_mem, src_align);
3320
3321 /* Copy word part most expediently. */
3322 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3323 CALL_EXPR_TAILCALL (exp)
3324 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3325 expected_align, expected_size);
3326
3327 if (dest_addr == 0)
3328 {
3329 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3330 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3331 }
3332 return dest_addr;
3333 }
3334 }
3335
3336 /* Expand a call EXP to the mempcpy builtin.
3337 Return NULL_RTX if we failed; the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). If ENDP is 0 return the
3340 destination pointer, if ENDP is 1 return the end pointer ala
3341 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3342 stpcpy. */
3343
3344 static rtx
3345 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3346 {
3347 if (!validate_arglist (exp,
3348 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3349 return NULL_RTX;
3350 else
3351 {
3352 tree dest = CALL_EXPR_ARG (exp, 0);
3353 tree src = CALL_EXPR_ARG (exp, 1);
3354 tree len = CALL_EXPR_ARG (exp, 2);
3355 return expand_builtin_mempcpy_args (dest, src, len,
3356 TREE_TYPE (exp),
3357 target, mode, /*endp=*/ 1);
3358 }
3359 }
3360
3361 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3362 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3363 so that this can also be called without constructing an actual CALL_EXPR.
3364 TYPE is the return type of the call. The other arguments and return value
3365 are the same as for expand_builtin_mempcpy. */
3366
3367 static rtx
3368 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3369 rtx target, enum machine_mode mode, int endp)
3370 {
3371 /* If return value is ignored, transform mempcpy into memcpy. */
3372 if (target == const0_rtx)
3373 {
3374 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3375
3376 if (!fn)
3377 return NULL_RTX;
3378
3379 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3380 target, mode, EXPAND_NORMAL);
3381 }
3382 else
3383 {
3384 const char *src_str;
3385 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3386 unsigned int dest_align
3387 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3388 rtx dest_mem, src_mem, len_rtx;
3389 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3390
3391 if (result)
3392 {
3393 while (TREE_CODE (result) == COMPOUND_EXPR)
3394 {
3395 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3396 EXPAND_NORMAL);
3397 result = TREE_OPERAND (result, 1);
3398 }
3399 return expand_expr (result, target, mode, EXPAND_NORMAL);
3400 }
3401
3402 /* If either SRC or DEST is not a pointer type, don't do this
3403 operation in-line. */
3404 if (dest_align == 0 || src_align == 0)
3405 return NULL_RTX;
3406
3407 /* If LEN is not constant, call the normal function. */
3408 if (! host_integerp (len, 1))
3409 return NULL_RTX;
3410
3411 len_rtx = expand_normal (len);
3412 src_str = c_getstr (src);
3413
3414 /* If SRC is a string constant and block move would be done
3415 by pieces, we can avoid loading the string from memory
3416 and only stored the computed constants. */
3417 if (src_str
3418 && GET_CODE (len_rtx) == CONST_INT
3419 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3420 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3421 (void *) src_str, dest_align))
3422 {
3423 dest_mem = get_memory_rtx (dest, len);
3424 set_mem_align (dest_mem, dest_align);
3425 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3426 builtin_memcpy_read_str,
3427 (void *) src_str, dest_align, endp);
3428 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3429 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3430 return dest_mem;
3431 }
3432
3433 if (GET_CODE (len_rtx) == CONST_INT
3434 && can_move_by_pieces (INTVAL (len_rtx),
3435 MIN (dest_align, src_align)))
3436 {
3437 dest_mem = get_memory_rtx (dest, len);
3438 set_mem_align (dest_mem, dest_align);
3439 src_mem = get_memory_rtx (src, len);
3440 set_mem_align (src_mem, src_align);
3441 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3442 MIN (dest_align, src_align), endp);
3443 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3444 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3445 return dest_mem;
3446 }
3447
3448 return NULL_RTX;
3449 }
3450 }
3451
3452 /* Expand expression EXP, which is a call to the memmove builtin. Return
3453 NULL_RTX if we failed; the caller should emit a normal call. */
3454
3455 static rtx
3456 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3457 {
3458 if (!validate_arglist (exp,
3459 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3460 return NULL_RTX;
3461 else
3462 {
3463 tree dest = CALL_EXPR_ARG (exp, 0);
3464 tree src = CALL_EXPR_ARG (exp, 1);
3465 tree len = CALL_EXPR_ARG (exp, 2);
3466 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3467 target, mode, ignore);
3468 }
3469 }
3470
3471 /* Helper function to do the actual work for expand_builtin_memmove. The
3472 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3473 so that this can also be called without constructing an actual CALL_EXPR.
3474 TYPE is the return type of the call. The other arguments and return value
3475 are the same as for expand_builtin_memmove. */
3476
3477 static rtx
3478 expand_builtin_memmove_args (tree dest, tree src, tree len,
3479 tree type, rtx target, enum machine_mode mode,
3480 int ignore)
3481 {
3482 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3483
3484 if (result)
3485 {
3486 STRIP_TYPE_NOPS (result);
3487 while (TREE_CODE (result) == COMPOUND_EXPR)
3488 {
3489 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3490 EXPAND_NORMAL);
3491 result = TREE_OPERAND (result, 1);
3492 }
3493 return expand_expr (result, target, mode, EXPAND_NORMAL);
3494 }
3495
3496 /* Otherwise, call the normal function. */
3497 return NULL_RTX;
3498 }
3499
3500 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3501 NULL_RTX if we failed the caller should emit a normal call. */
3502
3503 static rtx
3504 expand_builtin_bcopy (tree exp, int ignore)
3505 {
3506 tree type = TREE_TYPE (exp);
3507 tree src, dest, size;
3508
3509 if (!validate_arglist (exp,
3510 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3511 return NULL_RTX;
3512
3513 src = CALL_EXPR_ARG (exp, 0);
3514 dest = CALL_EXPR_ARG (exp, 1);
3515 size = CALL_EXPR_ARG (exp, 2);
3516
3517 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3518 This is done this way so that if it isn't expanded inline, we fall
3519 back to calling bcopy instead of memmove. */
3520 return expand_builtin_memmove_args (dest, src,
3521 fold_convert (sizetype, size),
3522 type, const0_rtx, VOIDmode,
3523 ignore);
3524 }
3525
3526 #ifndef HAVE_movstr
3527 # define HAVE_movstr 0
3528 # define CODE_FOR_movstr CODE_FOR_nothing
3529 #endif
3530
3531 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3532 we failed, the caller should emit a normal call, otherwise try to
3533 get the result in TARGET, if convenient. If ENDP is 0 return the
3534 destination pointer, if ENDP is 1 return the end pointer ala
3535 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3536 stpcpy. */
3537
3538 static rtx
3539 expand_movstr (tree dest, tree src, rtx target, int endp)
3540 {
3541 rtx end;
3542 rtx dest_mem;
3543 rtx src_mem;
3544 rtx insn;
3545 const struct insn_data * data;
3546
3547 if (!HAVE_movstr)
3548 return NULL_RTX;
3549
3550 dest_mem = get_memory_rtx (dest, NULL);
3551 src_mem = get_memory_rtx (src, NULL);
3552 if (!endp)
3553 {
3554 target = force_reg (Pmode, XEXP (dest_mem, 0));
3555 dest_mem = replace_equiv_address (dest_mem, target);
3556 end = gen_reg_rtx (Pmode);
3557 }
3558 else
3559 {
3560 if (target == 0 || target == const0_rtx)
3561 {
3562 end = gen_reg_rtx (Pmode);
3563 if (target == 0)
3564 target = end;
3565 }
3566 else
3567 end = target;
3568 }
3569
3570 data = insn_data + CODE_FOR_movstr;
3571
3572 if (data->operand[0].mode != VOIDmode)
3573 end = gen_lowpart (data->operand[0].mode, end);
3574
3575 insn = data->genfun (end, dest_mem, src_mem);
3576
3577 gcc_assert (insn);
3578
3579 emit_insn (insn);
3580
3581 /* movstr is supposed to set end to the address of the NUL
3582 terminator. If the caller requested a mempcpy-like return value,
3583 adjust it. */
3584 if (endp == 1 && target != const0_rtx)
3585 {
3586 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3587 emit_move_insn (target, force_operand (tem, NULL_RTX));
3588 }
3589
3590 return target;
3591 }
3592
3593 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3594 NULL_RTX if we failed the caller should emit a normal call, otherwise
3595 try to get the result in TARGET, if convenient (and in mode MODE if that's
3596 convenient). */
3597
3598 static rtx
3599 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3600 {
3601 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3602 {
3603 tree dest = CALL_EXPR_ARG (exp, 0);
3604 tree src = CALL_EXPR_ARG (exp, 1);
3605 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3606 }
3607 return NULL_RTX;
3608 }
3609
3610 /* Helper function to do the actual work for expand_builtin_strcpy. The
3611 arguments to the builtin_strcpy call DEST and SRC are broken out
3612 so that this can also be called without constructing an actual CALL_EXPR.
3613 The other arguments and return value are the same as for
3614 expand_builtin_strcpy. */
3615
3616 static rtx
3617 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3618 rtx target, enum machine_mode mode)
3619 {
3620 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3621 if (result)
3622 return expand_expr (result, target, mode, EXPAND_NORMAL);
3623 return expand_movstr (dest, src, target, /*endp=*/0);
3624
3625 }
3626
3627 /* Expand a call EXP to the stpcpy builtin.
3628 Return NULL_RTX if we failed the caller should emit a normal call,
3629 otherwise try to get the result in TARGET, if convenient (and in
3630 mode MODE if that's convenient). */
3631
3632 static rtx
3633 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3634 {
3635 tree dst, src;
3636
3637 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3638 return NULL_RTX;
3639
3640 dst = CALL_EXPR_ARG (exp, 0);
3641 src = CALL_EXPR_ARG (exp, 1);
3642
3643 /* If return value is ignored, transform stpcpy into strcpy. */
3644 if (target == const0_rtx)
3645 {
3646 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3647 if (!fn)
3648 return NULL_RTX;
3649
3650 return expand_expr (build_call_expr (fn, 2, dst, src),
3651 target, mode, EXPAND_NORMAL);
3652 }
3653 else
3654 {
3655 tree len, lenp1;
3656 rtx ret;
3657
3658 /* Ensure we get an actual string whose length can be evaluated at
3659 compile-time, not an expression containing a string. This is
3660 because the latter will potentially produce pessimized code
3661 when used to produce the return value. */
3662 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3663 return expand_movstr (dst, src, target, /*endp=*/2);
3664
3665 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3666 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3667 target, mode, /*endp=*/2);
3668
3669 if (ret)
3670 return ret;
3671
3672 if (TREE_CODE (len) == INTEGER_CST)
3673 {
3674 rtx len_rtx = expand_normal (len);
3675
3676 if (GET_CODE (len_rtx) == CONST_INT)
3677 {
3678 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3679 dst, src, target, mode);
3680
3681 if (ret)
3682 {
3683 if (! target)
3684 {
3685 if (mode != VOIDmode)
3686 target = gen_reg_rtx (mode);
3687 else
3688 target = gen_reg_rtx (GET_MODE (ret));
3689 }
3690 if (GET_MODE (target) != GET_MODE (ret))
3691 ret = gen_lowpart (GET_MODE (target), ret);
3692
3693 ret = plus_constant (ret, INTVAL (len_rtx));
3694 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3695 gcc_assert (ret);
3696
3697 return target;
3698 }
3699 }
3700 }
3701
3702 return expand_movstr (dst, src, target, /*endp=*/2);
3703 }
3704 }
3705
3706 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3707 bytes from constant string DATA + OFFSET and return it as target
3708 constant. */
3709
3710 static rtx
3711 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3712 enum machine_mode mode)
3713 {
3714 const char *str = (const char *) data;
3715
3716 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3717 return const0_rtx;
3718
3719 return c_readstr (str + offset, mode);
3720 }
3721
3722 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3723 NULL_RTX if we failed the caller should emit a normal call. */
3724
3725 static rtx
3726 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3727 {
3728 tree fndecl = get_callee_fndecl (exp);
3729
3730 if (validate_arglist (exp,
3731 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 {
3733 tree dest = CALL_EXPR_ARG (exp, 0);
3734 tree src = CALL_EXPR_ARG (exp, 1);
3735 tree len = CALL_EXPR_ARG (exp, 2);
3736 tree slen = c_strlen (src, 1);
3737 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3738
3739 if (result)
3740 {
3741 while (TREE_CODE (result) == COMPOUND_EXPR)
3742 {
3743 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3744 EXPAND_NORMAL);
3745 result = TREE_OPERAND (result, 1);
3746 }
3747 return expand_expr (result, target, mode, EXPAND_NORMAL);
3748 }
3749
3750 /* We must be passed a constant len and src parameter. */
3751 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3752 return NULL_RTX;
3753
3754 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3755
3756 /* We're required to pad with trailing zeros if the requested
3757 len is greater than strlen(s2)+1. In that case try to
3758 use store_by_pieces, if it fails, punt. */
3759 if (tree_int_cst_lt (slen, len))
3760 {
3761 unsigned int dest_align
3762 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3763 const char *p = c_getstr (src);
3764 rtx dest_mem;
3765
3766 if (!p || dest_align == 0 || !host_integerp (len, 1)
3767 || !can_store_by_pieces (tree_low_cst (len, 1),
3768 builtin_strncpy_read_str,
3769 (void *) p, dest_align))
3770 return NULL_RTX;
3771
3772 dest_mem = get_memory_rtx (dest, len);
3773 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3774 builtin_strncpy_read_str,
3775 (void *) p, dest_align, 0);
3776 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3777 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3778 return dest_mem;
3779 }
3780 }
3781 return NULL_RTX;
3782 }
3783
3784 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3785 bytes from constant string DATA + OFFSET and return it as target
3786 constant. */
3787
3788 rtx
3789 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3790 enum machine_mode mode)
3791 {
3792 const char *c = (const char *) data;
3793 char *p = alloca (GET_MODE_SIZE (mode));
3794
3795 memset (p, *c, GET_MODE_SIZE (mode));
3796
3797 return c_readstr (p, mode);
3798 }
3799
3800 /* Callback routine for store_by_pieces. Return the RTL of a register
3801 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3802 char value given in the RTL register data. For example, if mode is
3803 4 bytes wide, return the RTL for 0x01010101*data. */
3804
3805 static rtx
3806 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3807 enum machine_mode mode)
3808 {
3809 rtx target, coeff;
3810 size_t size;
3811 char *p;
3812
3813 size = GET_MODE_SIZE (mode);
3814 if (size == 1)
3815 return (rtx) data;
3816
3817 p = alloca (size);
3818 memset (p, 1, size);
3819 coeff = c_readstr (p, mode);
3820
3821 target = convert_to_mode (mode, (rtx) data, 1);
3822 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3823 return force_reg (mode, target);
3824 }
3825
3826 /* Expand expression EXP, which is a call to the memset builtin. Return
3827 NULL_RTX if we failed the caller should emit a normal call, otherwise
3828 try to get the result in TARGET, if convenient (and in mode MODE if that's
3829 convenient). */
3830
3831 static rtx
3832 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3833 {
3834 if (!validate_arglist (exp,
3835 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3836 return NULL_RTX;
3837 else
3838 {
3839 tree dest = CALL_EXPR_ARG (exp, 0);
3840 tree val = CALL_EXPR_ARG (exp, 1);
3841 tree len = CALL_EXPR_ARG (exp, 2);
3842 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3843 }
3844 }
3845
3846 /* Helper function to do the actual work for expand_builtin_memset. The
3847 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3848 so that this can also be called without constructing an actual CALL_EXPR.
3849 The other arguments and return value are the same as for
3850 expand_builtin_memset. */
3851
3852 static rtx
3853 expand_builtin_memset_args (tree dest, tree val, tree len,
3854 rtx target, enum machine_mode mode, tree orig_exp)
3855 {
3856 tree fndecl, fn;
3857 enum built_in_function fcode;
3858 char c;
3859 unsigned int dest_align;
3860 rtx dest_mem, dest_addr, len_rtx;
3861 HOST_WIDE_INT expected_size = -1;
3862 unsigned int expected_align = 0;
3863
3864 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3865
3866 /* If DEST is not a pointer type, don't do this operation in-line. */
3867 if (dest_align == 0)
3868 return NULL_RTX;
3869
3870 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3871 if (expected_align < dest_align)
3872 expected_align = dest_align;
3873
3874 /* If the LEN parameter is zero, return DEST. */
3875 if (integer_zerop (len))
3876 {
3877 /* Evaluate and ignore VAL in case it has side-effects. */
3878 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3879 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3880 }
3881
3882 /* Stabilize the arguments in case we fail. */
3883 dest = builtin_save_expr (dest);
3884 val = builtin_save_expr (val);
3885 len = builtin_save_expr (len);
3886
3887 len_rtx = expand_normal (len);
3888 dest_mem = get_memory_rtx (dest, len);
3889
3890 if (TREE_CODE (val) != INTEGER_CST)
3891 {
3892 rtx val_rtx;
3893
3894 val_rtx = expand_normal (val);
3895 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3896 val_rtx, 0);
3897
3898 /* Assume that we can memset by pieces if we can store
3899 * the coefficients by pieces (in the required modes).
3900 * We can't pass builtin_memset_gen_str as that emits RTL. */
3901 c = 1;
3902 if (host_integerp (len, 1)
3903 && !(optimize_size && tree_low_cst (len, 1) > 1)
3904 && can_store_by_pieces (tree_low_cst (len, 1),
3905 builtin_memset_read_str, &c, dest_align))
3906 {
3907 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3908 val_rtx);
3909 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3910 builtin_memset_gen_str, val_rtx, dest_align, 0);
3911 }
3912 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3913 dest_align, expected_align,
3914 expected_size))
3915 goto do_libcall;
3916
3917 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3919 return dest_mem;
3920 }
3921
3922 if (target_char_cast (val, &c))
3923 goto do_libcall;
3924
3925 if (c)
3926 {
3927 if (host_integerp (len, 1)
3928 && !(optimize_size && tree_low_cst (len, 1) > 1)
3929 && can_store_by_pieces (tree_low_cst (len, 1),
3930 builtin_memset_read_str, &c, dest_align))
3931 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3932 builtin_memset_read_str, &c, dest_align, 0);
3933 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3934 dest_align, expected_align,
3935 expected_size))
3936 goto do_libcall;
3937
3938 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3939 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3940 return dest_mem;
3941 }
3942
3943 set_mem_align (dest_mem, dest_align);
3944 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3945 CALL_EXPR_TAILCALL (orig_exp)
3946 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3947 expected_align, expected_size);
3948
3949 if (dest_addr == 0)
3950 {
3951 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3952 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3953 }
3954
3955 return dest_addr;
3956
3957 do_libcall:
3958 fndecl = get_callee_fndecl (orig_exp);
3959 fcode = DECL_FUNCTION_CODE (fndecl);
3960 if (fcode == BUILT_IN_MEMSET)
3961 fn = build_call_expr (fndecl, 3, dest, val, len);
3962 else if (fcode == BUILT_IN_BZERO)
3963 fn = build_call_expr (fndecl, 2, dest, len);
3964 else
3965 gcc_unreachable ();
3966 if (TREE_CODE (fn) == CALL_EXPR)
3967 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3968 return expand_call (fn, target, target == const0_rtx);
3969 }
3970
3971 /* Expand expression EXP, which is a call to the bzero builtin. Return
3972 NULL_RTX if we failed the caller should emit a normal call. */
3973
3974 static rtx
3975 expand_builtin_bzero (tree exp)
3976 {
3977 tree dest, size;
3978
3979 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3980 return NULL_RTX;
3981
3982 dest = CALL_EXPR_ARG (exp, 0);
3983 size = CALL_EXPR_ARG (exp, 1);
3984
3985 /* New argument list transforming bzero(ptr x, int y) to
3986 memset(ptr x, int 0, size_t y). This is done this way
3987 so that if it isn't expanded inline, we fallback to
3988 calling bzero instead of memset. */
3989
3990 return expand_builtin_memset_args (dest, integer_zero_node,
3991 fold_convert (sizetype, size),
3992 const0_rtx, VOIDmode, exp);
3993 }
3994
3995 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3996 caller should emit a normal call, otherwise try to get the result
3997 in TARGET, if convenient (and in mode MODE if that's convenient). */
3998
3999 static rtx
4000 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4001 {
4002 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4003 INTEGER_TYPE, VOID_TYPE))
4004 {
4005 tree type = TREE_TYPE (exp);
4006 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4007 CALL_EXPR_ARG (exp, 1),
4008 CALL_EXPR_ARG (exp, 2), type);
4009 if (result)
4010 return expand_expr (result, target, mode, EXPAND_NORMAL);
4011 }
4012 return NULL_RTX;
4013 }
4014
4015 /* Expand expression EXP, which is a call to the memcmp built-in function.
4016 Return NULL_RTX if we failed and the
4017 caller should emit a normal call, otherwise try to get the result in
4018 TARGET, if convenient (and in mode MODE, if that's convenient). */
4019
4020 static rtx
4021 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4022 {
4023 if (!validate_arglist (exp,
4024 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4025 return NULL_RTX;
4026 else
4027 {
4028 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4029 CALL_EXPR_ARG (exp, 1),
4030 CALL_EXPR_ARG (exp, 2));
4031 if (result)
4032 return expand_expr (result, target, mode, EXPAND_NORMAL);
4033 }
4034
4035 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4036 {
4037 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4038 rtx result;
4039 rtx insn;
4040 tree arg1 = CALL_EXPR_ARG (exp, 0);
4041 tree arg2 = CALL_EXPR_ARG (exp, 1);
4042 tree len = CALL_EXPR_ARG (exp, 2);
4043
4044 int arg1_align
4045 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4046 int arg2_align
4047 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4048 enum machine_mode insn_mode;
4049
4050 #ifdef HAVE_cmpmemsi
4051 if (HAVE_cmpmemsi)
4052 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4053 else
4054 #endif
4055 #ifdef HAVE_cmpstrnsi
4056 if (HAVE_cmpstrnsi)
4057 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4058 else
4059 #endif
4060 return NULL_RTX;
4061
4062 /* If we don't have POINTER_TYPE, call the function. */
4063 if (arg1_align == 0 || arg2_align == 0)
4064 return NULL_RTX;
4065
4066 /* Make a place to write the result of the instruction. */
4067 result = target;
4068 if (! (result != 0
4069 && REG_P (result) && GET_MODE (result) == insn_mode
4070 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4071 result = gen_reg_rtx (insn_mode);
4072
4073 arg1_rtx = get_memory_rtx (arg1, len);
4074 arg2_rtx = get_memory_rtx (arg2, len);
4075 arg3_rtx = expand_normal (len);
4076
4077 /* Set MEM_SIZE as appropriate. */
4078 if (GET_CODE (arg3_rtx) == CONST_INT)
4079 {
4080 set_mem_size (arg1_rtx, arg3_rtx);
4081 set_mem_size (arg2_rtx, arg3_rtx);
4082 }
4083
4084 #ifdef HAVE_cmpmemsi
4085 if (HAVE_cmpmemsi)
4086 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4087 GEN_INT (MIN (arg1_align, arg2_align)));
4088 else
4089 #endif
4090 #ifdef HAVE_cmpstrnsi
4091 if (HAVE_cmpstrnsi)
4092 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4093 GEN_INT (MIN (arg1_align, arg2_align)));
4094 else
4095 #endif
4096 gcc_unreachable ();
4097
4098 if (insn)
4099 emit_insn (insn);
4100 else
4101 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4102 TYPE_MODE (integer_type_node), 3,
4103 XEXP (arg1_rtx, 0), Pmode,
4104 XEXP (arg2_rtx, 0), Pmode,
4105 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4106 TYPE_UNSIGNED (sizetype)),
4107 TYPE_MODE (sizetype));
4108
4109 /* Return the value in the proper mode for this function. */
4110 mode = TYPE_MODE (TREE_TYPE (exp));
4111 if (GET_MODE (result) == mode)
4112 return result;
4113 else if (target != 0)
4114 {
4115 convert_move (target, result, 0);
4116 return target;
4117 }
4118 else
4119 return convert_to_mode (mode, result, 0);
4120 }
4121 #endif
4122
4123 return NULL_RTX;
4124 }
4125
4126 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4127 if we failed the caller should emit a normal call, otherwise try to get
4128 the result in TARGET, if convenient. */
4129
4130 static rtx
4131 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4132 {
4133 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4134 return NULL_RTX;
4135 else
4136 {
4137 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4138 CALL_EXPR_ARG (exp, 1));
4139 if (result)
4140 return expand_expr (result, target, mode, EXPAND_NORMAL);
4141 }
4142
4143 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4144 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4145 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4146 {
4147 rtx arg1_rtx, arg2_rtx;
4148 rtx result, insn = NULL_RTX;
4149 tree fndecl, fn;
4150 tree arg1 = CALL_EXPR_ARG (exp, 0);
4151 tree arg2 = CALL_EXPR_ARG (exp, 1);
4152
4153 int arg1_align
4154 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4155 int arg2_align
4156 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4157
4158 /* If we don't have POINTER_TYPE, call the function. */
4159 if (arg1_align == 0 || arg2_align == 0)
4160 return NULL_RTX;
4161
4162 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4163 arg1 = builtin_save_expr (arg1);
4164 arg2 = builtin_save_expr (arg2);
4165
4166 arg1_rtx = get_memory_rtx (arg1, NULL);
4167 arg2_rtx = get_memory_rtx (arg2, NULL);
4168
4169 #ifdef HAVE_cmpstrsi
4170 /* Try to call cmpstrsi. */
4171 if (HAVE_cmpstrsi)
4172 {
4173 enum machine_mode insn_mode
4174 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4175
4176 /* Make a place to write the result of the instruction. */
4177 result = target;
4178 if (! (result != 0
4179 && REG_P (result) && GET_MODE (result) == insn_mode
4180 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4181 result = gen_reg_rtx (insn_mode);
4182
4183 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4184 GEN_INT (MIN (arg1_align, arg2_align)));
4185 }
4186 #endif
4187 #ifdef HAVE_cmpstrnsi
4188 /* Try to determine at least one length and call cmpstrnsi. */
4189 if (!insn && HAVE_cmpstrnsi)
4190 {
4191 tree len;
4192 rtx arg3_rtx;
4193
4194 enum machine_mode insn_mode
4195 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4196 tree len1 = c_strlen (arg1, 1);
4197 tree len2 = c_strlen (arg2, 1);
4198
4199 if (len1)
4200 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4201 if (len2)
4202 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4203
4204 /* If we don't have a constant length for the first, use the length
4205 of the second, if we know it. We don't require a constant for
4206 this case; some cost analysis could be done if both are available
4207 but neither is constant. For now, assume they're equally cheap,
4208 unless one has side effects. If both strings have constant lengths,
4209 use the smaller. */
4210
4211 if (!len1)
4212 len = len2;
4213 else if (!len2)
4214 len = len1;
4215 else if (TREE_SIDE_EFFECTS (len1))
4216 len = len2;
4217 else if (TREE_SIDE_EFFECTS (len2))
4218 len = len1;
4219 else if (TREE_CODE (len1) != INTEGER_CST)
4220 len = len2;
4221 else if (TREE_CODE (len2) != INTEGER_CST)
4222 len = len1;
4223 else if (tree_int_cst_lt (len1, len2))
4224 len = len1;
4225 else
4226 len = len2;
4227
4228 /* If both arguments have side effects, we cannot optimize. */
4229 if (!len || TREE_SIDE_EFFECTS (len))
4230 goto do_libcall;
4231
4232 arg3_rtx = expand_normal (len);
4233
4234 /* Make a place to write the result of the instruction. */
4235 result = target;
4236 if (! (result != 0
4237 && REG_P (result) && GET_MODE (result) == insn_mode
4238 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4239 result = gen_reg_rtx (insn_mode);
4240
4241 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4242 GEN_INT (MIN (arg1_align, arg2_align)));
4243 }
4244 #endif
4245
4246 if (insn)
4247 {
4248 emit_insn (insn);
4249
4250 /* Return the value in the proper mode for this function. */
4251 mode = TYPE_MODE (TREE_TYPE (exp));
4252 if (GET_MODE (result) == mode)
4253 return result;
4254 if (target == 0)
4255 return convert_to_mode (mode, result, 0);
4256 convert_move (target, result, 0);
4257 return target;
4258 }
4259
4260 /* Expand the library call ourselves using a stabilized argument
4261 list to avoid re-evaluating the function's arguments twice. */
4262 #ifdef HAVE_cmpstrnsi
4263 do_libcall:
4264 #endif
4265 fndecl = get_callee_fndecl (exp);
4266 fn = build_call_expr (fndecl, 2, arg1, arg2);
4267 if (TREE_CODE (fn) == CALL_EXPR)
4268 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4269 return expand_call (fn, target, target == const0_rtx);
4270 }
4271 #endif
4272 return NULL_RTX;
4273 }
4274
4275 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4276 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4277 the result in TARGET, if convenient. */
4278
4279 static rtx
4280 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4281 {
4282 if (!validate_arglist (exp,
4283 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4284 return NULL_RTX;
4285 else
4286 {
4287 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4288 CALL_EXPR_ARG (exp, 1),
4289 CALL_EXPR_ARG (exp, 2));
4290 if (result)
4291 return expand_expr (result, target, mode, EXPAND_NORMAL);
4292 }
4293
4294 /* If c_strlen can determine an expression for one of the string
4295 lengths, and it doesn't have side effects, then emit cmpstrnsi
4296 using length MIN(strlen(string)+1, arg3). */
4297 #ifdef HAVE_cmpstrnsi
4298 if (HAVE_cmpstrnsi)
4299 {
4300 tree len, len1, len2;
4301 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4302 rtx result, insn;
4303 tree fndecl, fn;
4304 tree arg1 = CALL_EXPR_ARG (exp, 0);
4305 tree arg2 = CALL_EXPR_ARG (exp, 1);
4306 tree arg3 = CALL_EXPR_ARG (exp, 2);
4307
4308 int arg1_align
4309 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4310 int arg2_align
4311 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4312 enum machine_mode insn_mode
4313 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4314
4315 len1 = c_strlen (arg1, 1);
4316 len2 = c_strlen (arg2, 1);
4317
4318 if (len1)
4319 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4320 if (len2)
4321 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4322
4323 /* If we don't have a constant length for the first, use the length
4324 of the second, if we know it. We don't require a constant for
4325 this case; some cost analysis could be done if both are available
4326 but neither is constant. For now, assume they're equally cheap,
4327 unless one has side effects. If both strings have constant lengths,
4328 use the smaller. */
4329
4330 if (!len1)
4331 len = len2;
4332 else if (!len2)
4333 len = len1;
4334 else if (TREE_SIDE_EFFECTS (len1))
4335 len = len2;
4336 else if (TREE_SIDE_EFFECTS (len2))
4337 len = len1;
4338 else if (TREE_CODE (len1) != INTEGER_CST)
4339 len = len2;
4340 else if (TREE_CODE (len2) != INTEGER_CST)
4341 len = len1;
4342 else if (tree_int_cst_lt (len1, len2))
4343 len = len1;
4344 else
4345 len = len2;
4346
4347 /* If both arguments have side effects, we cannot optimize. */
4348 if (!len || TREE_SIDE_EFFECTS (len))
4349 return NULL_RTX;
4350
4351 /* The actual new length parameter is MIN(len,arg3). */
4352 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4353 fold_convert (TREE_TYPE (len), arg3));
4354
4355 /* If we don't have POINTER_TYPE, call the function. */
4356 if (arg1_align == 0 || arg2_align == 0)
4357 return NULL_RTX;
4358
4359 /* Make a place to write the result of the instruction. */
4360 result = target;
4361 if (! (result != 0
4362 && REG_P (result) && GET_MODE (result) == insn_mode
4363 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4364 result = gen_reg_rtx (insn_mode);
4365
4366 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4367 arg1 = builtin_save_expr (arg1);
4368 arg2 = builtin_save_expr (arg2);
4369 len = builtin_save_expr (len);
4370
4371 arg1_rtx = get_memory_rtx (arg1, len);
4372 arg2_rtx = get_memory_rtx (arg2, len);
4373 arg3_rtx = expand_normal (len);
4374 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4375 GEN_INT (MIN (arg1_align, arg2_align)));
4376 if (insn)
4377 {
4378 emit_insn (insn);
4379
4380 /* Return the value in the proper mode for this function. */
4381 mode = TYPE_MODE (TREE_TYPE (exp));
4382 if (GET_MODE (result) == mode)
4383 return result;
4384 if (target == 0)
4385 return convert_to_mode (mode, result, 0);
4386 convert_move (target, result, 0);
4387 return target;
4388 }
4389
4390 /* Expand the library call ourselves using a stabilized argument
4391 list to avoid re-evaluating the function's arguments twice. */
4392 fndecl = get_callee_fndecl (exp);
4393 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4394 if (TREE_CODE (fn) == CALL_EXPR)
4395 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4396 return expand_call (fn, target, target == const0_rtx);
4397 }
4398 #endif
4399 return NULL_RTX;
4400 }
4401
4402 /* Expand expression EXP, which is a call to the strcat builtin.
4403 Return NULL_RTX if we failed the caller should emit a normal call,
4404 otherwise try to get the result in TARGET, if convenient. */
4405
4406 static rtx
4407 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4408 {
4409 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4410 return NULL_RTX;
4411 else
4412 {
4413 tree dst = CALL_EXPR_ARG (exp, 0);
4414 tree src = CALL_EXPR_ARG (exp, 1);
4415 const char *p = c_getstr (src);
4416
4417 /* If the string length is zero, return the dst parameter. */
4418 if (p && *p == '\0')
4419 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4420
4421 if (!optimize_size)
4422 {
4423 /* See if we can store by pieces into (dst + strlen(dst)). */
4424 tree newsrc, newdst,
4425 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4426 rtx insns;
4427
4428 /* Stabilize the argument list. */
4429 newsrc = builtin_save_expr (src);
4430 dst = builtin_save_expr (dst);
4431
4432 start_sequence ();
4433
4434 /* Create strlen (dst). */
4435 newdst = build_call_expr (strlen_fn, 1, dst);
4436 /* Create (dst + (cast) strlen (dst)). */
4437 newdst = fold_convert (TREE_TYPE (dst), newdst);
4438 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4439
4440 newdst = builtin_save_expr (newdst);
4441
4442 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4443 {
4444 end_sequence (); /* Stop sequence. */
4445 return NULL_RTX;
4446 }
4447
4448 /* Output the entire sequence. */
4449 insns = get_insns ();
4450 end_sequence ();
4451 emit_insn (insns);
4452
4453 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4454 }
4455
4456 return NULL_RTX;
4457 }
4458 }
4459
4460 /* Expand expression EXP, which is a call to the strncat builtin.
4461 Return NULL_RTX if we failed the caller should emit a normal call,
4462 otherwise try to get the result in TARGET, if convenient. */
4463
4464 static rtx
4465 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4466 {
4467 if (validate_arglist (exp,
4468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4469 {
4470 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4471 CALL_EXPR_ARG (exp, 1),
4472 CALL_EXPR_ARG (exp, 2));
4473 if (result)
4474 return expand_expr (result, target, mode, EXPAND_NORMAL);
4475 }
4476 return NULL_RTX;
4477 }
4478
4479 /* Expand expression EXP, which is a call to the strspn builtin.
4480 Return NULL_RTX if we failed the caller should emit a normal call,
4481 otherwise try to get the result in TARGET, if convenient. */
4482
4483 static rtx
4484 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4485 {
4486 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4487 {
4488 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4489 CALL_EXPR_ARG (exp, 1));
4490 if (result)
4491 return expand_expr (result, target, mode, EXPAND_NORMAL);
4492 }
4493 return NULL_RTX;
4494 }
4495
4496 /* Expand expression EXP, which is a call to the strcspn builtin.
4497 Return NULL_RTX if we failed the caller should emit a normal call,
4498 otherwise try to get the result in TARGET, if convenient. */
4499
4500 static rtx
4501 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4502 {
4503 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4504 {
4505 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4506 CALL_EXPR_ARG (exp, 1));
4507 if (result)
4508 return expand_expr (result, target, mode, EXPAND_NORMAL);
4509 }
4510 return NULL_RTX;
4511 }
4512
4513 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4514 if that's convenient. */
4515
4516 rtx
4517 expand_builtin_saveregs (void)
4518 {
4519 rtx val, seq;
4520
4521 /* Don't do __builtin_saveregs more than once in a function.
4522 Save the result of the first call and reuse it. */
4523 if (saveregs_value != 0)
4524 return saveregs_value;
4525
4526 /* When this function is called, it means that registers must be
4527 saved on entry to this function. So we migrate the call to the
4528 first insn of this function. */
4529
4530 start_sequence ();
4531
4532 /* Do whatever the machine needs done in this case. */
4533 val = targetm.calls.expand_builtin_saveregs ();
4534
4535 seq = get_insns ();
4536 end_sequence ();
4537
4538 saveregs_value = val;
4539
4540 /* Put the insns after the NOTE that starts the function. If this
4541 is inside a start_sequence, make the outer-level insn chain current, so
4542 the code is placed at the start of the function. */
4543 push_topmost_sequence ();
4544 emit_insn_after (seq, entry_of_function ());
4545 pop_topmost_sequence ();
4546
4547 return val;
4548 }
4549
4550 /* __builtin_args_info (N) returns word N of the arg space info
4551 for the current function. The number and meanings of words
4552 is controlled by the definition of CUMULATIVE_ARGS. */
4553
4554 static rtx
4555 expand_builtin_args_info (tree exp)
4556 {
4557 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4558 int *word_ptr = (int *) &current_function_args_info;
4559
4560 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4561
4562 if (call_expr_nargs (exp) != 0)
4563 {
4564 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4565 error ("argument of %<__builtin_args_info%> must be constant");
4566 else
4567 {
4568 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4569
4570 if (wordnum < 0 || wordnum >= nwords)
4571 error ("argument of %<__builtin_args_info%> out of range");
4572 else
4573 return GEN_INT (word_ptr[wordnum]);
4574 }
4575 }
4576 else
4577 error ("missing argument in %<__builtin_args_info%>");
4578
4579 return const0_rtx;
4580 }
4581
4582 /* Expand a call to __builtin_next_arg. */
4583
4584 static rtx
4585 expand_builtin_next_arg (void)
4586 {
4587 /* Checking arguments is already done in fold_builtin_next_arg
4588 that must be called before this function. */
4589 return expand_binop (Pmode, add_optab,
4590 current_function_internal_arg_pointer,
4591 current_function_arg_offset_rtx,
4592 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4593 }
4594
4595 /* Make it easier for the backends by protecting the valist argument
4596 from multiple evaluations. */
4597
4598 static tree
4599 stabilize_va_list (tree valist, int needs_lvalue)
4600 {
4601 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4602 {
4603 if (TREE_SIDE_EFFECTS (valist))
4604 valist = save_expr (valist);
4605
4606 /* For this case, the backends will be expecting a pointer to
4607 TREE_TYPE (va_list_type_node), but it's possible we've
4608 actually been given an array (an actual va_list_type_node).
4609 So fix it. */
4610 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4611 {
4612 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4613 valist = build_fold_addr_expr_with_type (valist, p1);
4614 }
4615 }
4616 else
4617 {
4618 tree pt;
4619
4620 if (! needs_lvalue)
4621 {
4622 if (! TREE_SIDE_EFFECTS (valist))
4623 return valist;
4624
4625 pt = build_pointer_type (va_list_type_node);
4626 valist = fold_build1 (ADDR_EXPR, pt, valist);
4627 TREE_SIDE_EFFECTS (valist) = 1;
4628 }
4629
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4632 valist = build_fold_indirect_ref (valist);
4633 }
4634
4635 return valist;
4636 }
4637
4638 /* The "standard" definition of va_list is void*. */
4639
4640 tree
4641 std_build_builtin_va_list (void)
4642 {
4643 return ptr_type_node;
4644 }
4645
4646 /* The "standard" implementation of va_start: just assign `nextarg' to
4647 the variable. */
4648
4649 void
4650 std_expand_builtin_va_start (tree valist, rtx nextarg)
4651 {
4652 tree t;
4653
4654 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4655 make_tree (ptr_type_node, nextarg));
4656 TREE_SIDE_EFFECTS (t) = 1;
4657
4658 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4659 }
4660
4661 /* Expand EXP, a call to __builtin_va_start. */
4662
4663 static rtx
4664 expand_builtin_va_start (tree exp)
4665 {
4666 rtx nextarg;
4667 tree valist;
4668
4669 if (call_expr_nargs (exp) < 2)
4670 {
4671 error ("too few arguments to function %<va_start%>");
4672 return const0_rtx;
4673 }
4674
4675 if (fold_builtin_next_arg (exp, true))
4676 return const0_rtx;
4677
4678 nextarg = expand_builtin_next_arg ();
4679 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4680
4681 #ifdef EXPAND_BUILTIN_VA_START
4682 EXPAND_BUILTIN_VA_START (valist, nextarg);
4683 #else
4684 std_expand_builtin_va_start (valist, nextarg);
4685 #endif
4686
4687 return const0_rtx;
4688 }
4689
4690 /* The "standard" implementation of va_arg: read the value from the
4691 current (padded) address and increment by the (padded) size. */
4692
4693 tree
4694 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4695 {
4696 tree addr, t, type_size, rounded_size, valist_tmp;
4697 unsigned HOST_WIDE_INT align, boundary;
4698 bool indirect;
4699
4700 #ifdef ARGS_GROW_DOWNWARD
4701 /* All of the alignment and movement below is for args-grow-up machines.
4702 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4703 implement their own specialized gimplify_va_arg_expr routines. */
4704 gcc_unreachable ();
4705 #endif
4706
4707 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4708 if (indirect)
4709 type = build_pointer_type (type);
4710
4711 align = PARM_BOUNDARY / BITS_PER_UNIT;
4712 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4713
4714 /* Hoist the valist value into a temporary for the moment. */
4715 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4716
4717 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4718 requires greater alignment, we must perform dynamic alignment. */
4719 if (boundary > align
4720 && !integer_zerop (TYPE_SIZE (type)))
4721 {
4722 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4723 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4724 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4725 gimplify_and_add (t, pre_p);
4726
4727 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4728 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4729 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4730 gimplify_and_add (t, pre_p);
4731 }
4732 else
4733 boundary = align;
4734
4735 /* If the actual alignment is less than the alignment of the type,
4736 adjust the type accordingly so that we don't assume strict alignment
4737 when deferencing the pointer. */
4738 boundary *= BITS_PER_UNIT;
4739 if (boundary < TYPE_ALIGN (type))
4740 {
4741 type = build_variant_type_copy (type);
4742 TYPE_ALIGN (type) = boundary;
4743 }
4744
4745 /* Compute the rounded size of the type. */
4746 type_size = size_in_bytes (type);
4747 rounded_size = round_up (type_size, align);
4748
4749 /* Reduce rounded_size so it's sharable with the postqueue. */
4750 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4751
4752 /* Get AP. */
4753 addr = valist_tmp;
4754 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4755 {
4756 /* Small args are padded downward. */
4757 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4758 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4759 size_binop (MINUS_EXPR, rounded_size, type_size));
4760 t = fold_convert (TREE_TYPE (addr), t);
4761 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4762 }
4763
4764 /* Compute new value for AP. */
4765 t = fold_convert (TREE_TYPE (valist), rounded_size);
4766 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4767 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4768 gimplify_and_add (t, pre_p);
4769
4770 addr = fold_convert (build_pointer_type (type), addr);
4771
4772 if (indirect)
4773 addr = build_va_arg_indirect_ref (addr);
4774
4775 return build_va_arg_indirect_ref (addr);
4776 }
4777
4778 /* Build an indirect-ref expression over the given TREE, which represents a
4779 piece of a va_arg() expansion. */
4780 tree
4781 build_va_arg_indirect_ref (tree addr)
4782 {
4783 addr = build_fold_indirect_ref (addr);
4784
4785 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4786 mf_mark (addr);
4787
4788 return addr;
4789 }
4790
4791 /* Return a dummy expression of type TYPE in order to keep going after an
4792 error. */
4793
4794 static tree
4795 dummy_object (tree type)
4796 {
4797 tree t = build_int_cst (build_pointer_type (type), 0);
4798 return build1 (INDIRECT_REF, type, t);
4799 }
4800
4801 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4802 builtin function, but a very special sort of operator. */
4803
4804 enum gimplify_status
4805 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4806 {
4807 tree promoted_type, want_va_type, have_va_type;
4808 tree valist = TREE_OPERAND (*expr_p, 0);
4809 tree type = TREE_TYPE (*expr_p);
4810 tree t;
4811
4812 /* Verify that valist is of the proper type. */
4813 want_va_type = va_list_type_node;
4814 have_va_type = TREE_TYPE (valist);
4815
4816 if (have_va_type == error_mark_node)
4817 return GS_ERROR;
4818
4819 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4820 {
4821 /* If va_list is an array type, the argument may have decayed
4822 to a pointer type, e.g. by being passed to another function.
4823 In that case, unwrap both types so that we can compare the
4824 underlying records. */
4825 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4826 || POINTER_TYPE_P (have_va_type))
4827 {
4828 want_va_type = TREE_TYPE (want_va_type);
4829 have_va_type = TREE_TYPE (have_va_type);
4830 }
4831 }
4832
4833 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4834 {
4835 error ("first argument to %<va_arg%> not of type %<va_list%>");
4836 return GS_ERROR;
4837 }
4838
4839 /* Generate a diagnostic for requesting data of a type that cannot
4840 be passed through `...' due to type promotion at the call site. */
4841 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4842 != type)
4843 {
4844 static bool gave_help;
4845
4846 /* Unfortunately, this is merely undefined, rather than a constraint
4847 violation, so we cannot make this an error. If this call is never
4848 executed, the program is still strictly conforming. */
4849 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4850 type, promoted_type);
4851 if (! gave_help)
4852 {
4853 gave_help = true;
4854 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4855 promoted_type, type);
4856 }
4857
4858 /* We can, however, treat "undefined" any way we please.
4859 Call abort to encourage the user to fix the program. */
4860 inform ("if this code is reached, the program will abort");
4861 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4862 append_to_statement_list (t, pre_p);
4863
4864 /* This is dead code, but go ahead and finish so that the
4865 mode of the result comes out right. */
4866 *expr_p = dummy_object (type);
4867 return GS_ALL_DONE;
4868 }
4869 else
4870 {
4871 /* Make it easier for the backends by protecting the valist argument
4872 from multiple evaluations. */
4873 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4874 {
4875 /* For this case, the backends will be expecting a pointer to
4876 TREE_TYPE (va_list_type_node), but it's possible we've
4877 actually been given an array (an actual va_list_type_node).
4878 So fix it. */
4879 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4880 {
4881 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4882 valist = build_fold_addr_expr_with_type (valist, p1);
4883 }
4884 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4885 }
4886 else
4887 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4888
4889 if (!targetm.gimplify_va_arg_expr)
4890 /* FIXME:Once most targets are converted we should merely
4891 assert this is non-null. */
4892 return GS_ALL_DONE;
4893
4894 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4895 return GS_OK;
4896 }
4897 }
4898
4899 /* Expand EXP, a call to __builtin_va_end. */
4900
4901 static rtx
4902 expand_builtin_va_end (tree exp)
4903 {
4904 tree valist = CALL_EXPR_ARG (exp, 0);
4905
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4907 do that. */
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4910
4911 return const0_rtx;
4912 }
4913
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4917
4918 static rtx
4919 expand_builtin_va_copy (tree exp)
4920 {
4921 tree dst, src, t;
4922
4923 dst = CALL_EXPR_ARG (exp, 0);
4924 src = CALL_EXPR_ARG (exp, 1);
4925
4926 dst = stabilize_va_list (dst, 1);
4927 src = stabilize_va_list (src, 0);
4928
4929 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4930 {
4931 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4932 TREE_SIDE_EFFECTS (t) = 1;
4933 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4934 }
4935 else
4936 {
4937 rtx dstb, srcb, size;
4938
4939 /* Evaluate to pointers. */
4940 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4941 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4942 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4943 VOIDmode, EXPAND_NORMAL);
4944
4945 dstb = convert_memory_address (Pmode, dstb);
4946 srcb = convert_memory_address (Pmode, srcb);
4947
4948 /* "Dereference" to BLKmode memories. */
4949 dstb = gen_rtx_MEM (BLKmode, dstb);
4950 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4951 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4952 srcb = gen_rtx_MEM (BLKmode, srcb);
4953 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4954 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4955
4956 /* Copy. */
4957 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4958 }
4959
4960 return const0_rtx;
4961 }
4962
4963 /* Expand a call to one of the builtin functions __builtin_frame_address or
4964 __builtin_return_address. */
4965
4966 static rtx
4967 expand_builtin_frame_address (tree fndecl, tree exp)
4968 {
4969 /* The argument must be a nonnegative integer constant.
4970 It counts the number of frames to scan up the stack.
4971 The value is the return address saved in that frame. */
4972 if (call_expr_nargs (exp) == 0)
4973 /* Warning about missing arg was already issued. */
4974 return const0_rtx;
4975 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4976 {
4977 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4978 error ("invalid argument to %<__builtin_frame_address%>");
4979 else
4980 error ("invalid argument to %<__builtin_return_address%>");
4981 return const0_rtx;
4982 }
4983 else
4984 {
4985 rtx tem
4986 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4987 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4988
4989 /* Some ports cannot access arbitrary stack frames. */
4990 if (tem == NULL)
4991 {
4992 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4993 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4994 else
4995 warning (0, "unsupported argument to %<__builtin_return_address%>");
4996 return const0_rtx;
4997 }
4998
4999 /* For __builtin_frame_address, return what we've got. */
5000 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5001 return tem;
5002
5003 if (!REG_P (tem)
5004 && ! CONSTANT_P (tem))
5005 tem = copy_to_mode_reg (Pmode, tem);
5006 return tem;
5007 }
5008 }
5009
5010 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5011 we failed and the caller should emit a normal call, otherwise try to get
5012 the result in TARGET, if convenient. */
5013
5014 static rtx
5015 expand_builtin_alloca (tree exp, rtx target)
5016 {
5017 rtx op0;
5018 rtx result;
5019
5020 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5021 should always expand to function calls. These can be intercepted
5022 in libmudflap. */
5023 if (flag_mudflap)
5024 return NULL_RTX;
5025
5026 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5027 return NULL_RTX;
5028
5029 /* Compute the argument. */
5030 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5031
5032 /* Allocate the desired space. */
5033 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5034 result = convert_memory_address (ptr_mode, result);
5035
5036 return result;
5037 }
5038
5039 /* Expand a call to a bswap builtin with argument ARG0. MODE
5040 is the mode to expand with. */
5041
5042 static rtx
5043 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5044 {
5045 enum machine_mode mode;
5046 tree arg;
5047 rtx op0;
5048
5049 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5050 return NULL_RTX;
5051
5052 arg = CALL_EXPR_ARG (exp, 0);
5053 mode = TYPE_MODE (TREE_TYPE (arg));
5054 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5055
5056 target = expand_unop (mode, bswap_optab, op0, target, 1);
5057
5058 gcc_assert (target);
5059
5060 return convert_to_mode (mode, target, 0);
5061 }
5062
5063 /* Expand a call to a unary builtin in EXP.
5064 Return NULL_RTX if a normal call should be emitted rather than expanding the
5065 function in-line. If convenient, the result should be placed in TARGET.
5066 SUBTARGET may be used as the target for computing one of EXP's operands. */
5067
5068 static rtx
5069 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5070 rtx subtarget, optab op_optab)
5071 {
5072 rtx op0;
5073
5074 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5075 return NULL_RTX;
5076
5077 /* Compute the argument. */
5078 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5079 VOIDmode, EXPAND_NORMAL);
5080 /* Compute op, into TARGET if possible.
5081 Set TARGET to wherever the result comes back. */
5082 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5083 op_optab, op0, target, 1);
5084 gcc_assert (target);
5085
5086 return convert_to_mode (target_mode, target, 0);
5087 }
5088
5089 /* If the string passed to fputs is a constant and is one character
5090 long, we attempt to transform this call into __builtin_fputc(). */
5091
5092 static rtx
5093 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5094 {
5095 /* Verify the arguments in the original call. */
5096 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5097 {
5098 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5099 CALL_EXPR_ARG (exp, 1),
5100 (target == const0_rtx),
5101 unlocked, NULL_TREE);
5102 if (result)
5103 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5104 }
5105 return NULL_RTX;
5106 }
5107
5108 /* Expand a call to __builtin_expect. We just return our argument
5109 as the builtin_expect semantic should've been already executed by
5110 tree branch prediction pass. */
5111
5112 static rtx
5113 expand_builtin_expect (tree exp, rtx target)
5114 {
5115 tree arg, c;
5116
5117 if (call_expr_nargs (exp) < 2)
5118 return const0_rtx;
5119 arg = CALL_EXPR_ARG (exp, 0);
5120 c = CALL_EXPR_ARG (exp, 1);
5121
5122 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5123 /* When guessing was done, the hints should be already stripped away. */
5124 gcc_assert (!flag_guess_branch_prob);
5125 return target;
5126 }
5127
5128 void
5129 expand_builtin_trap (void)
5130 {
5131 #ifdef HAVE_trap
5132 if (HAVE_trap)
5133 emit_insn (gen_trap ());
5134 else
5135 #endif
5136 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5137 emit_barrier ();
5138 }
5139
5140 /* Expand EXP, a call to fabs, fabsf or fabsl.
5141 Return NULL_RTX if a normal call should be emitted rather than expanding
5142 the function inline. If convenient, the result should be placed
5143 in TARGET. SUBTARGET may be used as the target for computing
5144 the operand. */
5145
5146 static rtx
5147 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5148 {
5149 enum machine_mode mode;
5150 tree arg;
5151 rtx op0;
5152
5153 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5154 return NULL_RTX;
5155
5156 arg = CALL_EXPR_ARG (exp, 0);
5157 mode = TYPE_MODE (TREE_TYPE (arg));
5158 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5159 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5160 }
5161
5162 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5163 Return NULL is a normal call should be emitted rather than expanding the
5164 function inline. If convenient, the result should be placed in TARGET.
5165 SUBTARGET may be used as the target for computing the operand. */
5166
5167 static rtx
5168 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5169 {
5170 rtx op0, op1;
5171 tree arg;
5172
5173 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5174 return NULL_RTX;
5175
5176 arg = CALL_EXPR_ARG (exp, 0);
5177 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5178
5179 arg = CALL_EXPR_ARG (exp, 1);
5180 op1 = expand_normal (arg);
5181
5182 return expand_copysign (op0, op1, target);
5183 }
5184
5185 /* Create a new constant string literal and return a char* pointer to it.
5186 The STRING_CST value is the LEN characters at STR. */
5187 tree
5188 build_string_literal (int len, const char *str)
5189 {
5190 tree t, elem, index, type;
5191
5192 t = build_string (len, str);
5193 elem = build_type_variant (char_type_node, 1, 0);
5194 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5195 type = build_array_type (elem, index);
5196 TREE_TYPE (t) = type;
5197 TREE_CONSTANT (t) = 1;
5198 TREE_INVARIANT (t) = 1;
5199 TREE_READONLY (t) = 1;
5200 TREE_STATIC (t) = 1;
5201
5202 type = build_pointer_type (type);
5203 t = build1 (ADDR_EXPR, type, t);
5204
5205 type = build_pointer_type (elem);
5206 t = build1 (NOP_EXPR, type, t);
5207 return t;
5208 }
5209
5210 /* Expand EXP, a call to printf or printf_unlocked.
5211 Return NULL_RTX if a normal call should be emitted rather than transforming
5212 the function inline. If convenient, the result should be placed in
5213 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5214 call. */
5215 static rtx
5216 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5217 bool unlocked)
5218 {
5219 /* If we're using an unlocked function, assume the other unlocked
5220 functions exist explicitly. */
5221 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5222 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5223 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5224 : implicit_built_in_decls[BUILT_IN_PUTS];
5225 const char *fmt_str;
5226 tree fn = 0;
5227 tree fmt, arg;
5228 int nargs = call_expr_nargs (exp);
5229
5230 /* If the return value is used, don't do the transformation. */
5231 if (target != const0_rtx)
5232 return NULL_RTX;
5233
5234 /* Verify the required arguments in the original call. */
5235 if (nargs == 0)
5236 return NULL_RTX;
5237 fmt = CALL_EXPR_ARG (exp, 0);
5238 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5239 return NULL_RTX;
5240
5241 /* Check whether the format is a literal string constant. */
5242 fmt_str = c_getstr (fmt);
5243 if (fmt_str == NULL)
5244 return NULL_RTX;
5245
5246 if (!init_target_chars ())
5247 return NULL_RTX;
5248
5249 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5250 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5251 {
5252 if ((nargs != 2)
5253 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5254 return NULL_RTX;
5255 if (fn_puts)
5256 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5257 }
5258 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5259 else if (strcmp (fmt_str, target_percent_c) == 0)
5260 {
5261 if ((nargs != 2)
5262 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5263 return NULL_RTX;
5264 if (fn_putchar)
5265 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5266 }
5267 else
5268 {
5269 /* We can't handle anything else with % args or %% ... yet. */
5270 if (strchr (fmt_str, target_percent))
5271 return NULL_RTX;
5272
5273 if (nargs > 1)
5274 return NULL_RTX;
5275
5276 /* If the format specifier was "", printf does nothing. */
5277 if (fmt_str[0] == '\0')
5278 return const0_rtx;
5279 /* If the format specifier has length of 1, call putchar. */
5280 if (fmt_str[1] == '\0')
5281 {
5282 /* Given printf("c"), (where c is any one character,)
5283 convert "c"[0] to an int and pass that to the replacement
5284 function. */
5285 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5286 if (fn_putchar)
5287 fn = build_call_expr (fn_putchar, 1, arg);
5288 }
5289 else
5290 {
5291 /* If the format specifier was "string\n", call puts("string"). */
5292 size_t len = strlen (fmt_str);
5293 if ((unsigned char)fmt_str[len - 1] == target_newline)
5294 {
5295 /* Create a NUL-terminated string that's one char shorter
5296 than the original, stripping off the trailing '\n'. */
5297 char *newstr = alloca (len);
5298 memcpy (newstr, fmt_str, len - 1);
5299 newstr[len - 1] = 0;
5300 arg = build_string_literal (len, newstr);
5301 if (fn_puts)
5302 fn = build_call_expr (fn_puts, 1, arg);
5303 }
5304 else
5305 /* We'd like to arrange to call fputs(string,stdout) here,
5306 but we need stdout and don't have a way to get it yet. */
5307 return NULL_RTX;
5308 }
5309 }
5310
5311 if (!fn)
5312 return NULL_RTX;
5313 if (TREE_CODE (fn) == CALL_EXPR)
5314 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5315 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5316 }
5317
5318 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5319 Return NULL_RTX if a normal call should be emitted rather than transforming
5320 the function inline. If convenient, the result should be placed in
5321 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5322 call. */
5323 static rtx
5324 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5325 bool unlocked)
5326 {
5327 /* If we're using an unlocked function, assume the other unlocked
5328 functions exist explicitly. */
5329 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5330 : implicit_built_in_decls[BUILT_IN_FPUTC];
5331 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_FPUTS];
5333 const char *fmt_str;
5334 tree fn = 0;
5335 tree fmt, fp, arg;
5336 int nargs = call_expr_nargs (exp);
5337
5338 /* If the return value is used, don't do the transformation. */
5339 if (target != const0_rtx)
5340 return NULL_RTX;
5341
5342 /* Verify the required arguments in the original call. */
5343 if (nargs < 2)
5344 return NULL_RTX;
5345 fp = CALL_EXPR_ARG (exp, 0);
5346 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5347 return NULL_RTX;
5348 fmt = CALL_EXPR_ARG (exp, 1);
5349 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5350 return NULL_RTX;
5351
5352 /* Check whether the format is a literal string constant. */
5353 fmt_str = c_getstr (fmt);
5354 if (fmt_str == NULL)
5355 return NULL_RTX;
5356
5357 if (!init_target_chars ())
5358 return NULL_RTX;
5359
5360 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5361 if (strcmp (fmt_str, target_percent_s) == 0)
5362 {
5363 if ((nargs != 3)
5364 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5365 return NULL_RTX;
5366 arg = CALL_EXPR_ARG (exp, 2);
5367 if (fn_fputs)
5368 fn = build_call_expr (fn_fputs, 2, arg, fp);
5369 }
5370 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5371 else if (strcmp (fmt_str, target_percent_c) == 0)
5372 {
5373 if ((nargs != 3)
5374 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5375 return NULL_RTX;
5376 arg = CALL_EXPR_ARG (exp, 2);
5377 if (fn_fputc)
5378 fn = build_call_expr (fn_fputc, 2, arg, fp);
5379 }
5380 else
5381 {
5382 /* We can't handle anything else with % args or %% ... yet. */
5383 if (strchr (fmt_str, target_percent))
5384 return NULL_RTX;
5385
5386 if (nargs > 2)
5387 return NULL_RTX;
5388
5389 /* If the format specifier was "", fprintf does nothing. */
5390 if (fmt_str[0] == '\0')
5391 {
5392 /* Evaluate and ignore FILE* argument for side-effects. */
5393 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5394 return const0_rtx;
5395 }
5396
5397 /* When "string" doesn't contain %, replace all cases of
5398 fprintf(stream,string) with fputs(string,stream). The fputs
5399 builtin will take care of special cases like length == 1. */
5400 if (fn_fputs)
5401 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5402 }
5403
5404 if (!fn)
5405 return NULL_RTX;
5406 if (TREE_CODE (fn) == CALL_EXPR)
5407 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5408 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5409 }
5410
5411 /* Expand a call EXP to sprintf. Return NULL_RTX if
5412 a normal call should be emitted rather than expanding the function
5413 inline. If convenient, the result should be placed in TARGET with
5414 mode MODE. */
5415
5416 static rtx
5417 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5418 {
5419 tree dest, fmt;
5420 const char *fmt_str;
5421 int nargs = call_expr_nargs (exp);
5422
5423 /* Verify the required arguments in the original call. */
5424 if (nargs < 2)
5425 return NULL_RTX;
5426 dest = CALL_EXPR_ARG (exp, 0);
5427 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5428 return NULL_RTX;
5429 fmt = CALL_EXPR_ARG (exp, 0);
5430 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5431 return NULL_RTX;
5432
5433 /* Check whether the format is a literal string constant. */
5434 fmt_str = c_getstr (fmt);
5435 if (fmt_str == NULL)
5436 return NULL_RTX;
5437
5438 if (!init_target_chars ())
5439 return NULL_RTX;
5440
5441 /* If the format doesn't contain % args or %%, use strcpy. */
5442 if (strchr (fmt_str, target_percent) == 0)
5443 {
5444 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5445 tree exp;
5446
5447 if ((nargs > 2) || ! fn)
5448 return NULL_RTX;
5449 expand_expr (build_call_expr (fn, 2, dest, fmt),
5450 const0_rtx, VOIDmode, EXPAND_NORMAL);
5451 if (target == const0_rtx)
5452 return const0_rtx;
5453 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5454 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5455 }
5456 /* If the format is "%s", use strcpy if the result isn't used. */
5457 else if (strcmp (fmt_str, target_percent_s) == 0)
5458 {
5459 tree fn, arg, len;
5460 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5461
5462 if (! fn)
5463 return NULL_RTX;
5464 if (nargs != 3)
5465 return NULL_RTX;
5466 arg = CALL_EXPR_ARG (exp, 2);
5467 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5468 return NULL_RTX;
5469
5470 if (target != const0_rtx)
5471 {
5472 len = c_strlen (arg, 1);
5473 if (! len || TREE_CODE (len) != INTEGER_CST)
5474 return NULL_RTX;
5475 }
5476 else
5477 len = NULL_TREE;
5478
5479 expand_expr (build_call_expr (fn, 2, dest, arg),
5480 const0_rtx, VOIDmode, EXPAND_NORMAL);
5481
5482 if (target == const0_rtx)
5483 return const0_rtx;
5484 return expand_expr (len, target, mode, EXPAND_NORMAL);
5485 }
5486
5487 return NULL_RTX;
5488 }
5489
5490 /* Expand a call to either the entry or exit function profiler. */
5491
5492 static rtx
5493 expand_builtin_profile_func (bool exitp)
5494 {
5495 rtx this, which;
5496
5497 this = DECL_RTL (current_function_decl);
5498 gcc_assert (MEM_P (this));
5499 this = XEXP (this, 0);
5500
5501 if (exitp)
5502 which = profile_function_exit_libfunc;
5503 else
5504 which = profile_function_entry_libfunc;
5505
5506 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5507 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5508 0),
5509 Pmode);
5510
5511 return const0_rtx;
5512 }
5513
5514 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5515
5516 static rtx
5517 round_trampoline_addr (rtx tramp)
5518 {
5519 rtx temp, addend, mask;
5520
5521 /* If we don't need too much alignment, we'll have been guaranteed
5522 proper alignment by get_trampoline_type. */
5523 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5524 return tramp;
5525
5526 /* Round address up to desired boundary. */
5527 temp = gen_reg_rtx (Pmode);
5528 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5529 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5530
5531 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5532 temp, 0, OPTAB_LIB_WIDEN);
5533 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5534 temp, 0, OPTAB_LIB_WIDEN);
5535
5536 return tramp;
5537 }
5538
5539 static rtx
5540 expand_builtin_init_trampoline (tree exp)
5541 {
5542 tree t_tramp, t_func, t_chain;
5543 rtx r_tramp, r_func, r_chain;
5544 #ifdef TRAMPOLINE_TEMPLATE
5545 rtx blktramp;
5546 #endif
5547
5548 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5549 POINTER_TYPE, VOID_TYPE))
5550 return NULL_RTX;
5551
5552 t_tramp = CALL_EXPR_ARG (exp, 0);
5553 t_func = CALL_EXPR_ARG (exp, 1);
5554 t_chain = CALL_EXPR_ARG (exp, 2);
5555
5556 r_tramp = expand_normal (t_tramp);
5557 r_func = expand_normal (t_func);
5558 r_chain = expand_normal (t_chain);
5559
5560 /* Generate insns to initialize the trampoline. */
5561 r_tramp = round_trampoline_addr (r_tramp);
5562 #ifdef TRAMPOLINE_TEMPLATE
5563 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5564 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5565 emit_block_move (blktramp, assemble_trampoline_template (),
5566 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5567 #endif
5568 trampolines_created = 1;
5569 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5570
5571 return const0_rtx;
5572 }
5573
5574 static rtx
5575 expand_builtin_adjust_trampoline (tree exp)
5576 {
5577 rtx tramp;
5578
5579 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5580 return NULL_RTX;
5581
5582 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5583 tramp = round_trampoline_addr (tramp);
5584 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5585 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5586 #endif
5587
5588 return tramp;
5589 }
5590
5591 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5592 signbitd64, or signbitd128 function.
5593 Return NULL_RTX if a normal call should be emitted rather than expanding
5594 the function in-line. EXP is the expression that is a call to the builtin
5595 function; if convenient, the result should be placed in TARGET. */
5596
5597 static rtx
5598 expand_builtin_signbit (tree exp, rtx target)
5599 {
5600 const struct real_format *fmt;
5601 enum machine_mode fmode, imode, rmode;
5602 HOST_WIDE_INT hi, lo;
5603 tree arg;
5604 int word, bitpos;
5605 rtx temp;
5606
5607 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5608 return NULL_RTX;
5609
5610 arg = CALL_EXPR_ARG (exp, 0);
5611 fmode = TYPE_MODE (TREE_TYPE (arg));
5612 rmode = TYPE_MODE (TREE_TYPE (exp));
5613 fmt = REAL_MODE_FORMAT (fmode);
5614
5615 /* For floating point formats without a sign bit, implement signbit
5616 as "ARG < 0.0". */
5617 bitpos = fmt->signbit_ro;
5618 if (bitpos < 0)
5619 {
5620 /* But we can't do this if the format supports signed zero. */
5621 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5622 return NULL_RTX;
5623
5624 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5625 build_real (TREE_TYPE (arg), dconst0));
5626 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5627 }
5628
5629 temp = expand_normal (arg);
5630 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5631 {
5632 imode = int_mode_for_mode (fmode);
5633 if (imode == BLKmode)
5634 return NULL_RTX;
5635 temp = gen_lowpart (imode, temp);
5636 }
5637 else
5638 {
5639 imode = word_mode;
5640 /* Handle targets with different FP word orders. */
5641 if (FLOAT_WORDS_BIG_ENDIAN)
5642 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5643 else
5644 word = bitpos / BITS_PER_WORD;
5645 temp = operand_subword_force (temp, word, fmode);
5646 bitpos = bitpos % BITS_PER_WORD;
5647 }
5648
5649 /* Force the intermediate word_mode (or narrower) result into a
5650 register. This avoids attempting to create paradoxical SUBREGs
5651 of floating point modes below. */
5652 temp = force_reg (imode, temp);
5653
5654 /* If the bitpos is within the "result mode" lowpart, the operation
5655 can be implement with a single bitwise AND. Otherwise, we need
5656 a right shift and an AND. */
5657
5658 if (bitpos < GET_MODE_BITSIZE (rmode))
5659 {
5660 if (bitpos < HOST_BITS_PER_WIDE_INT)
5661 {
5662 hi = 0;
5663 lo = (HOST_WIDE_INT) 1 << bitpos;
5664 }
5665 else
5666 {
5667 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5668 lo = 0;
5669 }
5670
5671 if (imode != rmode)
5672 temp = gen_lowpart (rmode, temp);
5673 temp = expand_binop (rmode, and_optab, temp,
5674 immed_double_const (lo, hi, rmode),
5675 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5676 }
5677 else
5678 {
5679 /* Perform a logical right shift to place the signbit in the least
5680 significant bit, then truncate the result to the desired mode
5681 and mask just this bit. */
5682 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5683 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5684 temp = gen_lowpart (rmode, temp);
5685 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5686 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5687 }
5688
5689 return temp;
5690 }
5691
5692 /* Expand fork or exec calls. TARGET is the desired target of the
5693 call. EXP is the call. FN is the
5694 identificator of the actual function. IGNORE is nonzero if the
5695 value is to be ignored. */
5696
5697 static rtx
5698 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5699 {
5700 tree id, decl;
5701 tree call;
5702
5703 /* If we are not profiling, just call the function. */
5704 if (!profile_arc_flag)
5705 return NULL_RTX;
5706
5707 /* Otherwise call the wrapper. This should be equivalent for the rest of
5708 compiler, so the code does not diverge, and the wrapper may run the
5709 code necessary for keeping the profiling sane. */
5710
5711 switch (DECL_FUNCTION_CODE (fn))
5712 {
5713 case BUILT_IN_FORK:
5714 id = get_identifier ("__gcov_fork");
5715 break;
5716
5717 case BUILT_IN_EXECL:
5718 id = get_identifier ("__gcov_execl");
5719 break;
5720
5721 case BUILT_IN_EXECV:
5722 id = get_identifier ("__gcov_execv");
5723 break;
5724
5725 case BUILT_IN_EXECLP:
5726 id = get_identifier ("__gcov_execlp");
5727 break;
5728
5729 case BUILT_IN_EXECLE:
5730 id = get_identifier ("__gcov_execle");
5731 break;
5732
5733 case BUILT_IN_EXECVP:
5734 id = get_identifier ("__gcov_execvp");
5735 break;
5736
5737 case BUILT_IN_EXECVE:
5738 id = get_identifier ("__gcov_execve");
5739 break;
5740
5741 default:
5742 gcc_unreachable ();
5743 }
5744
5745 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5746 DECL_EXTERNAL (decl) = 1;
5747 TREE_PUBLIC (decl) = 1;
5748 DECL_ARTIFICIAL (decl) = 1;
5749 TREE_NOTHROW (decl) = 1;
5750 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5751 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5752 call = rewrite_call_expr (exp, 0, decl, 0);
5753 return expand_call (call, target, ignore);
5754 }
5755
5756
5757 \f
5758 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5759 the pointer in these functions is void*, the tree optimizers may remove
5760 casts. The mode computed in expand_builtin isn't reliable either, due
5761 to __sync_bool_compare_and_swap.
5762
5763 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5764 group of builtins. This gives us log2 of the mode size. */
5765
5766 static inline enum machine_mode
5767 get_builtin_sync_mode (int fcode_diff)
5768 {
5769 /* The size is not negotiable, so ask not to get BLKmode in return
5770 if the target indicates that a smaller size would be better. */
5771 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5772 }
5773
5774 /* Expand the memory expression LOC and return the appropriate memory operand
5775 for the builtin_sync operations. */
5776
5777 static rtx
5778 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5779 {
5780 rtx addr, mem;
5781
5782 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5783
5784 /* Note that we explicitly do not want any alias information for this
5785 memory, so that we kill all other live memories. Otherwise we don't
5786 satisfy the full barrier semantics of the intrinsic. */
5787 mem = validize_mem (gen_rtx_MEM (mode, addr));
5788
5789 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5790 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5791 MEM_VOLATILE_P (mem) = 1;
5792
5793 return mem;
5794 }
5795
5796 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5797 EXP is the CALL_EXPR. CODE is the rtx code
5798 that corresponds to the arithmetic or logical operation from the name;
5799 an exception here is that NOT actually means NAND. TARGET is an optional
5800 place for us to store the results; AFTER is true if this is the
5801 fetch_and_xxx form. IGNORE is true if we don't actually care about
5802 the result of the operation at all. */
5803
5804 static rtx
5805 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5806 enum rtx_code code, bool after,
5807 rtx target, bool ignore)
5808 {
5809 rtx val, mem;
5810 enum machine_mode old_mode;
5811
5812 /* Expand the operands. */
5813 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5814
5815 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5816 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5817 of CONST_INTs, where we know the old_mode only from the call argument. */
5818 old_mode = GET_MODE (val);
5819 if (old_mode == VOIDmode)
5820 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5821 val = convert_modes (mode, old_mode, val, 1);
5822
5823 if (ignore)
5824 return expand_sync_operation (mem, val, code);
5825 else
5826 return expand_sync_fetch_operation (mem, val, code, after, target);
5827 }
5828
5829 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5830 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5831 true if this is the boolean form. TARGET is a place for us to store the
5832 results; this is NOT optional if IS_BOOL is true. */
5833
5834 static rtx
5835 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5836 bool is_bool, rtx target)
5837 {
5838 rtx old_val, new_val, mem;
5839 enum machine_mode old_mode;
5840
5841 /* Expand the operands. */
5842 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5843
5844
5845 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5846 mode, EXPAND_NORMAL);
5847 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5848 of CONST_INTs, where we know the old_mode only from the call argument. */
5849 old_mode = GET_MODE (old_val);
5850 if (old_mode == VOIDmode)
5851 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5852 old_val = convert_modes (mode, old_mode, old_val, 1);
5853
5854 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5855 mode, EXPAND_NORMAL);
5856 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5857 of CONST_INTs, where we know the old_mode only from the call argument. */
5858 old_mode = GET_MODE (new_val);
5859 if (old_mode == VOIDmode)
5860 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5861 new_val = convert_modes (mode, old_mode, new_val, 1);
5862
5863 if (is_bool)
5864 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5865 else
5866 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5867 }
5868
5869 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5870 general form is actually an atomic exchange, and some targets only
5871 support a reduced form with the second argument being a constant 1.
5872 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5873 the results. */
5874
5875 static rtx
5876 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5877 rtx target)
5878 {
5879 rtx val, mem;
5880 enum machine_mode old_mode;
5881
5882 /* Expand the operands. */
5883 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5884 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5885 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5886 of CONST_INTs, where we know the old_mode only from the call argument. */
5887 old_mode = GET_MODE (val);
5888 if (old_mode == VOIDmode)
5889 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5890 val = convert_modes (mode, old_mode, val, 1);
5891
5892 return expand_sync_lock_test_and_set (mem, val, target);
5893 }
5894
5895 /* Expand the __sync_synchronize intrinsic. */
5896
5897 static void
5898 expand_builtin_synchronize (void)
5899 {
5900 tree x;
5901
5902 #ifdef HAVE_memory_barrier
5903 if (HAVE_memory_barrier)
5904 {
5905 emit_insn (gen_memory_barrier ());
5906 return;
5907 }
5908 #endif
5909
5910 /* If no explicit memory barrier instruction is available, create an
5911 empty asm stmt with a memory clobber. */
5912 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5913 tree_cons (NULL, build_string (6, "memory"), NULL));
5914 ASM_VOLATILE_P (x) = 1;
5915 expand_asm_expr (x);
5916 }
5917
5918 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5919
5920 static void
5921 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5922 {
5923 enum insn_code icode;
5924 rtx mem, insn;
5925 rtx val = const0_rtx;
5926
5927 /* Expand the operands. */
5928 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5929
5930 /* If there is an explicit operation in the md file, use it. */
5931 icode = sync_lock_release[mode];
5932 if (icode != CODE_FOR_nothing)
5933 {
5934 if (!insn_data[icode].operand[1].predicate (val, mode))
5935 val = force_reg (mode, val);
5936
5937 insn = GEN_FCN (icode) (mem, val);
5938 if (insn)
5939 {
5940 emit_insn (insn);
5941 return;
5942 }
5943 }
5944
5945 /* Otherwise we can implement this operation by emitting a barrier
5946 followed by a store of zero. */
5947 expand_builtin_synchronize ();
5948 emit_move_insn (mem, val);
5949 }
5950 \f
5951 /* Expand an expression EXP that calls a built-in function,
5952 with result going to TARGET if that's convenient
5953 (and in mode MODE if that's convenient).
5954 SUBTARGET may be used as the target for computing one of EXP's operands.
5955 IGNORE is nonzero if the value is to be ignored. */
5956
5957 rtx
5958 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5959 int ignore)
5960 {
5961 tree fndecl = get_callee_fndecl (exp);
5962 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5963 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5964
5965 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5966 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5967
5968 /* When not optimizing, generate calls to library functions for a certain
5969 set of builtins. */
5970 if (!optimize
5971 && !called_as_built_in (fndecl)
5972 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5973 && fcode != BUILT_IN_ALLOCA)
5974 return expand_call (exp, target, ignore);
5975
5976 /* The built-in function expanders test for target == const0_rtx
5977 to determine whether the function's result will be ignored. */
5978 if (ignore)
5979 target = const0_rtx;
5980
5981 /* If the result of a pure or const built-in function is ignored, and
5982 none of its arguments are volatile, we can avoid expanding the
5983 built-in call and just evaluate the arguments for side-effects. */
5984 if (target == const0_rtx
5985 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5986 {
5987 bool volatilep = false;
5988 tree arg;
5989 call_expr_arg_iterator iter;
5990
5991 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5992 if (TREE_THIS_VOLATILE (arg))
5993 {
5994 volatilep = true;
5995 break;
5996 }
5997
5998 if (! volatilep)
5999 {
6000 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6001 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6002 return const0_rtx;
6003 }
6004 }
6005
6006 switch (fcode)
6007 {
6008 CASE_FLT_FN (BUILT_IN_FABS):
6009 target = expand_builtin_fabs (exp, target, subtarget);
6010 if (target)
6011 return target;
6012 break;
6013
6014 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6015 target = expand_builtin_copysign (exp, target, subtarget);
6016 if (target)
6017 return target;
6018 break;
6019
6020 /* Just do a normal library call if we were unable to fold
6021 the values. */
6022 CASE_FLT_FN (BUILT_IN_CABS):
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_EXP):
6026 CASE_FLT_FN (BUILT_IN_EXP10):
6027 CASE_FLT_FN (BUILT_IN_POW10):
6028 CASE_FLT_FN (BUILT_IN_EXP2):
6029 CASE_FLT_FN (BUILT_IN_EXPM1):
6030 CASE_FLT_FN (BUILT_IN_LOGB):
6031 CASE_FLT_FN (BUILT_IN_LOG):
6032 CASE_FLT_FN (BUILT_IN_LOG10):
6033 CASE_FLT_FN (BUILT_IN_LOG2):
6034 CASE_FLT_FN (BUILT_IN_LOG1P):
6035 CASE_FLT_FN (BUILT_IN_TAN):
6036 CASE_FLT_FN (BUILT_IN_ASIN):
6037 CASE_FLT_FN (BUILT_IN_ACOS):
6038 CASE_FLT_FN (BUILT_IN_ATAN):
6039 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6040 because of possible accuracy problems. */
6041 if (! flag_unsafe_math_optimizations)
6042 break;
6043 CASE_FLT_FN (BUILT_IN_SQRT):
6044 CASE_FLT_FN (BUILT_IN_FLOOR):
6045 CASE_FLT_FN (BUILT_IN_CEIL):
6046 CASE_FLT_FN (BUILT_IN_TRUNC):
6047 CASE_FLT_FN (BUILT_IN_ROUND):
6048 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6049 CASE_FLT_FN (BUILT_IN_RINT):
6050 target = expand_builtin_mathfn (exp, target, subtarget);
6051 if (target)
6052 return target;
6053 break;
6054
6055 CASE_FLT_FN (BUILT_IN_ILOGB):
6056 if (! flag_unsafe_math_optimizations)
6057 break;
6058 CASE_FLT_FN (BUILT_IN_ISINF):
6059 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_FLT_FN (BUILT_IN_LCEIL):
6065 CASE_FLT_FN (BUILT_IN_LLCEIL):
6066 CASE_FLT_FN (BUILT_IN_LFLOOR):
6067 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6068 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6069 if (target)
6070 return target;
6071 break;
6072
6073 CASE_FLT_FN (BUILT_IN_LRINT):
6074 CASE_FLT_FN (BUILT_IN_LLRINT):
6075 CASE_FLT_FN (BUILT_IN_LROUND):
6076 CASE_FLT_FN (BUILT_IN_LLROUND):
6077 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_FLT_FN (BUILT_IN_POW):
6083 target = expand_builtin_pow (exp, target, subtarget);
6084 if (target)
6085 return target;
6086 break;
6087
6088 CASE_FLT_FN (BUILT_IN_POWI):
6089 target = expand_builtin_powi (exp, target, subtarget);
6090 if (target)
6091 return target;
6092 break;
6093
6094 CASE_FLT_FN (BUILT_IN_ATAN2):
6095 CASE_FLT_FN (BUILT_IN_LDEXP):
6096 CASE_FLT_FN (BUILT_IN_SCALB):
6097 CASE_FLT_FN (BUILT_IN_SCALBN):
6098 CASE_FLT_FN (BUILT_IN_SCALBLN):
6099 if (! flag_unsafe_math_optimizations)
6100 break;
6101
6102 CASE_FLT_FN (BUILT_IN_FMOD):
6103 CASE_FLT_FN (BUILT_IN_REMAINDER):
6104 CASE_FLT_FN (BUILT_IN_DREM):
6105 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6106 if (target)
6107 return target;
6108 break;
6109
6110 CASE_FLT_FN (BUILT_IN_CEXPI):
6111 target = expand_builtin_cexpi (exp, target, subtarget);
6112 gcc_assert (target);
6113 return target;
6114
6115 CASE_FLT_FN (BUILT_IN_SIN):
6116 CASE_FLT_FN (BUILT_IN_COS):
6117 if (! flag_unsafe_math_optimizations)
6118 break;
6119 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6120 if (target)
6121 return target;
6122 break;
6123
6124 CASE_FLT_FN (BUILT_IN_SINCOS):
6125 if (! flag_unsafe_math_optimizations)
6126 break;
6127 target = expand_builtin_sincos (exp);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_APPLY_ARGS:
6133 return expand_builtin_apply_args ();
6134
6135 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6136 FUNCTION with a copy of the parameters described by
6137 ARGUMENTS, and ARGSIZE. It returns a block of memory
6138 allocated on the stack into which is stored all the registers
6139 that might possibly be used for returning the result of a
6140 function. ARGUMENTS is the value returned by
6141 __builtin_apply_args. ARGSIZE is the number of bytes of
6142 arguments that must be copied. ??? How should this value be
6143 computed? We'll also need a safe worst case value for varargs
6144 functions. */
6145 case BUILT_IN_APPLY:
6146 if (!validate_arglist (exp, POINTER_TYPE,
6147 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6148 && !validate_arglist (exp, REFERENCE_TYPE,
6149 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6150 return const0_rtx;
6151 else
6152 {
6153 rtx ops[3];
6154
6155 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6156 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6157 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6158
6159 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6160 }
6161
6162 /* __builtin_return (RESULT) causes the function to return the
6163 value described by RESULT. RESULT is address of the block of
6164 memory returned by __builtin_apply. */
6165 case BUILT_IN_RETURN:
6166 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6167 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6168 return const0_rtx;
6169
6170 case BUILT_IN_SAVEREGS:
6171 return expand_builtin_saveregs ();
6172
6173 case BUILT_IN_ARGS_INFO:
6174 return expand_builtin_args_info (exp);
6175
6176 /* Return the address of the first anonymous stack arg. */
6177 case BUILT_IN_NEXT_ARG:
6178 if (fold_builtin_next_arg (exp, false))
6179 return const0_rtx;
6180 return expand_builtin_next_arg ();
6181
6182 case BUILT_IN_CLASSIFY_TYPE:
6183 return expand_builtin_classify_type (exp);
6184
6185 case BUILT_IN_CONSTANT_P:
6186 return const0_rtx;
6187
6188 case BUILT_IN_FRAME_ADDRESS:
6189 case BUILT_IN_RETURN_ADDRESS:
6190 return expand_builtin_frame_address (fndecl, exp);
6191
6192 /* Returns the address of the area where the structure is returned.
6193 0 otherwise. */
6194 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6195 if (call_expr_nargs (exp) != 0
6196 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6197 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6198 return const0_rtx;
6199 else
6200 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6201
6202 case BUILT_IN_ALLOCA:
6203 target = expand_builtin_alloca (exp, target);
6204 if (target)
6205 return target;
6206 break;
6207
6208 case BUILT_IN_STACK_SAVE:
6209 return expand_stack_save ();
6210
6211 case BUILT_IN_STACK_RESTORE:
6212 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6213 return const0_rtx;
6214
6215 case BUILT_IN_BSWAP32:
6216 case BUILT_IN_BSWAP64:
6217 target = expand_builtin_bswap (exp, target, subtarget);
6218
6219 if (target)
6220 return target;
6221 break;
6222
6223 CASE_INT_FN (BUILT_IN_FFS):
6224 case BUILT_IN_FFSIMAX:
6225 target = expand_builtin_unop (target_mode, exp, target,
6226 subtarget, ffs_optab);
6227 if (target)
6228 return target;
6229 break;
6230
6231 CASE_INT_FN (BUILT_IN_CLZ):
6232 case BUILT_IN_CLZIMAX:
6233 target = expand_builtin_unop (target_mode, exp, target,
6234 subtarget, clz_optab);
6235 if (target)
6236 return target;
6237 break;
6238
6239 CASE_INT_FN (BUILT_IN_CTZ):
6240 case BUILT_IN_CTZIMAX:
6241 target = expand_builtin_unop (target_mode, exp, target,
6242 subtarget, ctz_optab);
6243 if (target)
6244 return target;
6245 break;
6246
6247 CASE_INT_FN (BUILT_IN_POPCOUNT):
6248 case BUILT_IN_POPCOUNTIMAX:
6249 target = expand_builtin_unop (target_mode, exp, target,
6250 subtarget, popcount_optab);
6251 if (target)
6252 return target;
6253 break;
6254
6255 CASE_INT_FN (BUILT_IN_PARITY):
6256 case BUILT_IN_PARITYIMAX:
6257 target = expand_builtin_unop (target_mode, exp, target,
6258 subtarget, parity_optab);
6259 if (target)
6260 return target;
6261 break;
6262
6263 case BUILT_IN_STRLEN:
6264 target = expand_builtin_strlen (exp, target, target_mode);
6265 if (target)
6266 return target;
6267 break;
6268
6269 case BUILT_IN_STRCPY:
6270 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6271 if (target)
6272 return target;
6273 break;
6274
6275 case BUILT_IN_STRNCPY:
6276 target = expand_builtin_strncpy (exp, target, mode);
6277 if (target)
6278 return target;
6279 break;
6280
6281 case BUILT_IN_STPCPY:
6282 target = expand_builtin_stpcpy (exp, target, mode);
6283 if (target)
6284 return target;
6285 break;
6286
6287 case BUILT_IN_STRCAT:
6288 target = expand_builtin_strcat (fndecl, exp, target, mode);
6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_STRNCAT:
6294 target = expand_builtin_strncat (exp, target, mode);
6295 if (target)
6296 return target;
6297 break;
6298
6299 case BUILT_IN_STRSPN:
6300 target = expand_builtin_strspn (exp, target, mode);
6301 if (target)
6302 return target;
6303 break;
6304
6305 case BUILT_IN_STRCSPN:
6306 target = expand_builtin_strcspn (exp, target, mode);
6307 if (target)
6308 return target;
6309 break;
6310
6311 case BUILT_IN_STRSTR:
6312 target = expand_builtin_strstr (exp, target, mode);
6313 if (target)
6314 return target;
6315 break;
6316
6317 case BUILT_IN_STRPBRK:
6318 target = expand_builtin_strpbrk (exp, target, mode);
6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_INDEX:
6324 case BUILT_IN_STRCHR:
6325 target = expand_builtin_strchr (exp, target, mode);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_RINDEX:
6331 case BUILT_IN_STRRCHR:
6332 target = expand_builtin_strrchr (exp, target, mode);
6333 if (target)
6334 return target;
6335 break;
6336
6337 case BUILT_IN_MEMCPY:
6338 target = expand_builtin_memcpy (exp, target, mode);
6339 if (target)
6340 return target;
6341 break;
6342
6343 case BUILT_IN_MEMPCPY:
6344 target = expand_builtin_mempcpy (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6348
6349 case BUILT_IN_MEMMOVE:
6350 target = expand_builtin_memmove (exp, target, mode, ignore);
6351 if (target)
6352 return target;
6353 break;
6354
6355 case BUILT_IN_BCOPY:
6356 target = expand_builtin_bcopy (exp, ignore);
6357 if (target)
6358 return target;
6359 break;
6360
6361 case BUILT_IN_MEMSET:
6362 target = expand_builtin_memset (exp, target, mode);
6363 if (target)
6364 return target;
6365 break;
6366
6367 case BUILT_IN_BZERO:
6368 target = expand_builtin_bzero (exp);
6369 if (target)
6370 return target;
6371 break;
6372
6373 case BUILT_IN_STRCMP:
6374 target = expand_builtin_strcmp (exp, target, mode);
6375 if (target)
6376 return target;
6377 break;
6378
6379 case BUILT_IN_STRNCMP:
6380 target = expand_builtin_strncmp (exp, target, mode);
6381 if (target)
6382 return target;
6383 break;
6384
6385 case BUILT_IN_MEMCHR:
6386 target = expand_builtin_memchr (exp, target, mode);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_BCMP:
6392 case BUILT_IN_MEMCMP:
6393 target = expand_builtin_memcmp (exp, target, mode);
6394 if (target)
6395 return target;
6396 break;
6397
6398 case BUILT_IN_SETJMP:
6399 /* This should have been lowered to the builtins below. */
6400 gcc_unreachable ();
6401
6402 case BUILT_IN_SETJMP_SETUP:
6403 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6404 and the receiver label. */
6405 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6406 {
6407 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6408 VOIDmode, EXPAND_NORMAL);
6409 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6410 rtx label_r = label_rtx (label);
6411
6412 /* This is copied from the handling of non-local gotos. */
6413 expand_builtin_setjmp_setup (buf_addr, label_r);
6414 nonlocal_goto_handler_labels
6415 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6416 nonlocal_goto_handler_labels);
6417 /* ??? Do not let expand_label treat us as such since we would
6418 not want to be both on the list of non-local labels and on
6419 the list of forced labels. */
6420 FORCED_LABEL (label) = 0;
6421 return const0_rtx;
6422 }
6423 break;
6424
6425 case BUILT_IN_SETJMP_DISPATCHER:
6426 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6427 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6428 {
6429 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6430 rtx label_r = label_rtx (label);
6431
6432 /* Remove the dispatcher label from the list of non-local labels
6433 since the receiver labels have been added to it above. */
6434 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6435 return const0_rtx;
6436 }
6437 break;
6438
6439 case BUILT_IN_SETJMP_RECEIVER:
6440 /* __builtin_setjmp_receiver is passed the receiver label. */
6441 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6442 {
6443 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6444 rtx label_r = label_rtx (label);
6445
6446 expand_builtin_setjmp_receiver (label_r);
6447 return const0_rtx;
6448 }
6449 break;
6450
6451 /* __builtin_longjmp is passed a pointer to an array of five words.
6452 It's similar to the C library longjmp function but works with
6453 __builtin_setjmp above. */
6454 case BUILT_IN_LONGJMP:
6455 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6456 {
6457 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6458 VOIDmode, EXPAND_NORMAL);
6459 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6460
6461 if (value != const1_rtx)
6462 {
6463 error ("%<__builtin_longjmp%> second argument must be 1");
6464 return const0_rtx;
6465 }
6466
6467 expand_builtin_longjmp (buf_addr, value);
6468 return const0_rtx;
6469 }
6470 break;
6471
6472 case BUILT_IN_NONLOCAL_GOTO:
6473 target = expand_builtin_nonlocal_goto (exp);
6474 if (target)
6475 return target;
6476 break;
6477
6478 /* This updates the setjmp buffer that is its argument with the value
6479 of the current stack pointer. */
6480 case BUILT_IN_UPDATE_SETJMP_BUF:
6481 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6482 {
6483 rtx buf_addr
6484 = expand_normal (CALL_EXPR_ARG (exp, 0));
6485
6486 expand_builtin_update_setjmp_buf (buf_addr);
6487 return const0_rtx;
6488 }
6489 break;
6490
6491 case BUILT_IN_TRAP:
6492 expand_builtin_trap ();
6493 return const0_rtx;
6494
6495 case BUILT_IN_PRINTF:
6496 target = expand_builtin_printf (exp, target, mode, false);
6497 if (target)
6498 return target;
6499 break;
6500
6501 case BUILT_IN_PRINTF_UNLOCKED:
6502 target = expand_builtin_printf (exp, target, mode, true);
6503 if (target)
6504 return target;
6505 break;
6506
6507 case BUILT_IN_FPUTS:
6508 target = expand_builtin_fputs (exp, target, false);
6509 if (target)
6510 return target;
6511 break;
6512 case BUILT_IN_FPUTS_UNLOCKED:
6513 target = expand_builtin_fputs (exp, target, true);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_FPRINTF:
6519 target = expand_builtin_fprintf (exp, target, mode, false);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_FPRINTF_UNLOCKED:
6525 target = expand_builtin_fprintf (exp, target, mode, true);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_SPRINTF:
6531 target = expand_builtin_sprintf (exp, target, mode);
6532 if (target)
6533 return target;
6534 break;
6535
6536 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6537 case BUILT_IN_SIGNBITD32:
6538 case BUILT_IN_SIGNBITD64:
6539 case BUILT_IN_SIGNBITD128:
6540 target = expand_builtin_signbit (exp, target);
6541 if (target)
6542 return target;
6543 break;
6544
6545 /* Various hooks for the DWARF 2 __throw routine. */
6546 case BUILT_IN_UNWIND_INIT:
6547 expand_builtin_unwind_init ();
6548 return const0_rtx;
6549 case BUILT_IN_DWARF_CFA:
6550 return virtual_cfa_rtx;
6551 #ifdef DWARF2_UNWIND_INFO
6552 case BUILT_IN_DWARF_SP_COLUMN:
6553 return expand_builtin_dwarf_sp_column ();
6554 case BUILT_IN_INIT_DWARF_REG_SIZES:
6555 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6556 return const0_rtx;
6557 #endif
6558 case BUILT_IN_FROB_RETURN_ADDR:
6559 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6560 case BUILT_IN_EXTRACT_RETURN_ADDR:
6561 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6562 case BUILT_IN_EH_RETURN:
6563 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6564 CALL_EXPR_ARG (exp, 1));
6565 return const0_rtx;
6566 #ifdef EH_RETURN_DATA_REGNO
6567 case BUILT_IN_EH_RETURN_DATA_REGNO:
6568 return expand_builtin_eh_return_data_regno (exp);
6569 #endif
6570 case BUILT_IN_EXTEND_POINTER:
6571 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6572
6573 case BUILT_IN_VA_START:
6574 case BUILT_IN_STDARG_START:
6575 return expand_builtin_va_start (exp);
6576 case BUILT_IN_VA_END:
6577 return expand_builtin_va_end (exp);
6578 case BUILT_IN_VA_COPY:
6579 return expand_builtin_va_copy (exp);
6580 case BUILT_IN_EXPECT:
6581 return expand_builtin_expect (exp, target);
6582 case BUILT_IN_PREFETCH:
6583 expand_builtin_prefetch (exp);
6584 return const0_rtx;
6585
6586 case BUILT_IN_PROFILE_FUNC_ENTER:
6587 return expand_builtin_profile_func (false);
6588 case BUILT_IN_PROFILE_FUNC_EXIT:
6589 return expand_builtin_profile_func (true);
6590
6591 case BUILT_IN_INIT_TRAMPOLINE:
6592 return expand_builtin_init_trampoline (exp);
6593 case BUILT_IN_ADJUST_TRAMPOLINE:
6594 return expand_builtin_adjust_trampoline (exp);
6595
6596 case BUILT_IN_FORK:
6597 case BUILT_IN_EXECL:
6598 case BUILT_IN_EXECV:
6599 case BUILT_IN_EXECLP:
6600 case BUILT_IN_EXECLE:
6601 case BUILT_IN_EXECVP:
6602 case BUILT_IN_EXECVE:
6603 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6604 if (target)
6605 return target;
6606 break;
6607
6608 case BUILT_IN_FETCH_AND_ADD_1:
6609 case BUILT_IN_FETCH_AND_ADD_2:
6610 case BUILT_IN_FETCH_AND_ADD_4:
6611 case BUILT_IN_FETCH_AND_ADD_8:
6612 case BUILT_IN_FETCH_AND_ADD_16:
6613 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6614 target = expand_builtin_sync_operation (mode, exp, PLUS,
6615 false, target, ignore);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_FETCH_AND_SUB_1:
6621 case BUILT_IN_FETCH_AND_SUB_2:
6622 case BUILT_IN_FETCH_AND_SUB_4:
6623 case BUILT_IN_FETCH_AND_SUB_8:
6624 case BUILT_IN_FETCH_AND_SUB_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6626 target = expand_builtin_sync_operation (mode, exp, MINUS,
6627 false, target, ignore);
6628 if (target)
6629 return target;
6630 break;
6631
6632 case BUILT_IN_FETCH_AND_OR_1:
6633 case BUILT_IN_FETCH_AND_OR_2:
6634 case BUILT_IN_FETCH_AND_OR_4:
6635 case BUILT_IN_FETCH_AND_OR_8:
6636 case BUILT_IN_FETCH_AND_OR_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6638 target = expand_builtin_sync_operation (mode, exp, IOR,
6639 false, target, ignore);
6640 if (target)
6641 return target;
6642 break;
6643
6644 case BUILT_IN_FETCH_AND_AND_1:
6645 case BUILT_IN_FETCH_AND_AND_2:
6646 case BUILT_IN_FETCH_AND_AND_4:
6647 case BUILT_IN_FETCH_AND_AND_8:
6648 case BUILT_IN_FETCH_AND_AND_16:
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6650 target = expand_builtin_sync_operation (mode, exp, AND,
6651 false, target, ignore);
6652 if (target)
6653 return target;
6654 break;
6655
6656 case BUILT_IN_FETCH_AND_XOR_1:
6657 case BUILT_IN_FETCH_AND_XOR_2:
6658 case BUILT_IN_FETCH_AND_XOR_4:
6659 case BUILT_IN_FETCH_AND_XOR_8:
6660 case BUILT_IN_FETCH_AND_XOR_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6662 target = expand_builtin_sync_operation (mode, exp, XOR,
6663 false, target, ignore);
6664 if (target)
6665 return target;
6666 break;
6667
6668 case BUILT_IN_FETCH_AND_NAND_1:
6669 case BUILT_IN_FETCH_AND_NAND_2:
6670 case BUILT_IN_FETCH_AND_NAND_4:
6671 case BUILT_IN_FETCH_AND_NAND_8:
6672 case BUILT_IN_FETCH_AND_NAND_16:
6673 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6674 target = expand_builtin_sync_operation (mode, exp, NOT,
6675 false, target, ignore);
6676 if (target)
6677 return target;
6678 break;
6679
6680 case BUILT_IN_ADD_AND_FETCH_1:
6681 case BUILT_IN_ADD_AND_FETCH_2:
6682 case BUILT_IN_ADD_AND_FETCH_4:
6683 case BUILT_IN_ADD_AND_FETCH_8:
6684 case BUILT_IN_ADD_AND_FETCH_16:
6685 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6686 target = expand_builtin_sync_operation (mode, exp, PLUS,
6687 true, target, ignore);
6688 if (target)
6689 return target;
6690 break;
6691
6692 case BUILT_IN_SUB_AND_FETCH_1:
6693 case BUILT_IN_SUB_AND_FETCH_2:
6694 case BUILT_IN_SUB_AND_FETCH_4:
6695 case BUILT_IN_SUB_AND_FETCH_8:
6696 case BUILT_IN_SUB_AND_FETCH_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6698 target = expand_builtin_sync_operation (mode, exp, MINUS,
6699 true, target, ignore);
6700 if (target)
6701 return target;
6702 break;
6703
6704 case BUILT_IN_OR_AND_FETCH_1:
6705 case BUILT_IN_OR_AND_FETCH_2:
6706 case BUILT_IN_OR_AND_FETCH_4:
6707 case BUILT_IN_OR_AND_FETCH_8:
6708 case BUILT_IN_OR_AND_FETCH_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6710 target = expand_builtin_sync_operation (mode, exp, IOR,
6711 true, target, ignore);
6712 if (target)
6713 return target;
6714 break;
6715
6716 case BUILT_IN_AND_AND_FETCH_1:
6717 case BUILT_IN_AND_AND_FETCH_2:
6718 case BUILT_IN_AND_AND_FETCH_4:
6719 case BUILT_IN_AND_AND_FETCH_8:
6720 case BUILT_IN_AND_AND_FETCH_16:
6721 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6722 target = expand_builtin_sync_operation (mode, exp, AND,
6723 true, target, ignore);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_XOR_AND_FETCH_1:
6729 case BUILT_IN_XOR_AND_FETCH_2:
6730 case BUILT_IN_XOR_AND_FETCH_4:
6731 case BUILT_IN_XOR_AND_FETCH_8:
6732 case BUILT_IN_XOR_AND_FETCH_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6734 target = expand_builtin_sync_operation (mode, exp, XOR,
6735 true, target, ignore);
6736 if (target)
6737 return target;
6738 break;
6739
6740 case BUILT_IN_NAND_AND_FETCH_1:
6741 case BUILT_IN_NAND_AND_FETCH_2:
6742 case BUILT_IN_NAND_AND_FETCH_4:
6743 case BUILT_IN_NAND_AND_FETCH_8:
6744 case BUILT_IN_NAND_AND_FETCH_16:
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6746 target = expand_builtin_sync_operation (mode, exp, NOT,
6747 true, target, ignore);
6748 if (target)
6749 return target;
6750 break;
6751
6752 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6753 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6754 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6755 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6756 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6757 if (mode == VOIDmode)
6758 mode = TYPE_MODE (boolean_type_node);
6759 if (!target || !register_operand (target, mode))
6760 target = gen_reg_rtx (mode);
6761
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6763 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6764 if (target)
6765 return target;
6766 break;
6767
6768 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6769 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6770 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6771 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6772 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6774 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6775 if (target)
6776 return target;
6777 break;
6778
6779 case BUILT_IN_LOCK_TEST_AND_SET_1:
6780 case BUILT_IN_LOCK_TEST_AND_SET_2:
6781 case BUILT_IN_LOCK_TEST_AND_SET_4:
6782 case BUILT_IN_LOCK_TEST_AND_SET_8:
6783 case BUILT_IN_LOCK_TEST_AND_SET_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6785 target = expand_builtin_lock_test_and_set (mode, exp, target);
6786 if (target)
6787 return target;
6788 break;
6789
6790 case BUILT_IN_LOCK_RELEASE_1:
6791 case BUILT_IN_LOCK_RELEASE_2:
6792 case BUILT_IN_LOCK_RELEASE_4:
6793 case BUILT_IN_LOCK_RELEASE_8:
6794 case BUILT_IN_LOCK_RELEASE_16:
6795 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6796 expand_builtin_lock_release (mode, exp);
6797 return const0_rtx;
6798
6799 case BUILT_IN_SYNCHRONIZE:
6800 expand_builtin_synchronize ();
6801 return const0_rtx;
6802
6803 case BUILT_IN_OBJECT_SIZE:
6804 return expand_builtin_object_size (exp);
6805
6806 case BUILT_IN_MEMCPY_CHK:
6807 case BUILT_IN_MEMPCPY_CHK:
6808 case BUILT_IN_MEMMOVE_CHK:
6809 case BUILT_IN_MEMSET_CHK:
6810 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_STRCPY_CHK:
6816 case BUILT_IN_STPCPY_CHK:
6817 case BUILT_IN_STRNCPY_CHK:
6818 case BUILT_IN_STRCAT_CHK:
6819 case BUILT_IN_STRNCAT_CHK:
6820 case BUILT_IN_SNPRINTF_CHK:
6821 case BUILT_IN_VSNPRINTF_CHK:
6822 maybe_emit_chk_warning (exp, fcode);
6823 break;
6824
6825 case BUILT_IN_SPRINTF_CHK:
6826 case BUILT_IN_VSPRINTF_CHK:
6827 maybe_emit_sprintf_chk_warning (exp, fcode);
6828 break;
6829
6830 default: /* just do library call, if unknown builtin */
6831 break;
6832 }
6833
6834 /* The switch statement above can drop through to cause the function
6835 to be called normally. */
6836 return expand_call (exp, target, ignore);
6837 }
6838
6839 /* Determine whether a tree node represents a call to a built-in
6840 function. If the tree T is a call to a built-in function with
6841 the right number of arguments of the appropriate types, return
6842 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6843 Otherwise the return value is END_BUILTINS. */
6844
6845 enum built_in_function
6846 builtin_mathfn_code (tree t)
6847 {
6848 tree fndecl, arg, parmlist;
6849 tree argtype, parmtype;
6850 call_expr_arg_iterator iter;
6851
6852 if (TREE_CODE (t) != CALL_EXPR
6853 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6854 return END_BUILTINS;
6855
6856 fndecl = get_callee_fndecl (t);
6857 if (fndecl == NULL_TREE
6858 || TREE_CODE (fndecl) != FUNCTION_DECL
6859 || ! DECL_BUILT_IN (fndecl)
6860 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6861 return END_BUILTINS;
6862
6863 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6864 init_call_expr_arg_iterator (t, &iter);
6865 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6866 {
6867 /* If a function doesn't take a variable number of arguments,
6868 the last element in the list will have type `void'. */
6869 parmtype = TREE_VALUE (parmlist);
6870 if (VOID_TYPE_P (parmtype))
6871 {
6872 if (more_call_expr_args_p (&iter))
6873 return END_BUILTINS;
6874 return DECL_FUNCTION_CODE (fndecl);
6875 }
6876
6877 if (! more_call_expr_args_p (&iter))
6878 return END_BUILTINS;
6879
6880 arg = next_call_expr_arg (&iter);
6881 argtype = TREE_TYPE (arg);
6882
6883 if (SCALAR_FLOAT_TYPE_P (parmtype))
6884 {
6885 if (! SCALAR_FLOAT_TYPE_P (argtype))
6886 return END_BUILTINS;
6887 }
6888 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6889 {
6890 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6891 return END_BUILTINS;
6892 }
6893 else if (POINTER_TYPE_P (parmtype))
6894 {
6895 if (! POINTER_TYPE_P (argtype))
6896 return END_BUILTINS;
6897 }
6898 else if (INTEGRAL_TYPE_P (parmtype))
6899 {
6900 if (! INTEGRAL_TYPE_P (argtype))
6901 return END_BUILTINS;
6902 }
6903 else
6904 return END_BUILTINS;
6905 }
6906
6907 /* Variable-length argument list. */
6908 return DECL_FUNCTION_CODE (fndecl);
6909 }
6910
6911 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6912 evaluate to a constant. */
6913
6914 static tree
6915 fold_builtin_constant_p (tree arg)
6916 {
6917 /* We return 1 for a numeric type that's known to be a constant
6918 value at compile-time or for an aggregate type that's a
6919 literal constant. */
6920 STRIP_NOPS (arg);
6921
6922 /* If we know this is a constant, emit the constant of one. */
6923 if (CONSTANT_CLASS_P (arg)
6924 || (TREE_CODE (arg) == CONSTRUCTOR
6925 && TREE_CONSTANT (arg)))
6926 return integer_one_node;
6927 if (TREE_CODE (arg) == ADDR_EXPR)
6928 {
6929 tree op = TREE_OPERAND (arg, 0);
6930 if (TREE_CODE (op) == STRING_CST
6931 || (TREE_CODE (op) == ARRAY_REF
6932 && integer_zerop (TREE_OPERAND (op, 1))
6933 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6934 return integer_one_node;
6935 }
6936
6937 /* If this expression has side effects, show we don't know it to be a
6938 constant. Likewise if it's a pointer or aggregate type since in
6939 those case we only want literals, since those are only optimized
6940 when generating RTL, not later.
6941 And finally, if we are compiling an initializer, not code, we
6942 need to return a definite result now; there's not going to be any
6943 more optimization done. */
6944 if (TREE_SIDE_EFFECTS (arg)
6945 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6946 || POINTER_TYPE_P (TREE_TYPE (arg))
6947 || cfun == 0
6948 || folding_initializer)
6949 return integer_zero_node;
6950
6951 return NULL_TREE;
6952 }
6953
6954 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6955 comparison against the argument will fold to a constant. In practice,
6956 this means a true constant or the address of a non-weak symbol. */
6957
6958 static tree
6959 fold_builtin_expect (tree arg)
6960 {
6961 tree inner;
6962
6963 /* If the argument isn't invariant, then there's nothing we can do. */
6964 if (!TREE_INVARIANT (arg))
6965 return NULL_TREE;
6966
6967 /* If we're looking at an address of a weak decl, then do not fold. */
6968 inner = arg;
6969 STRIP_NOPS (inner);
6970 if (TREE_CODE (inner) == ADDR_EXPR)
6971 {
6972 do
6973 {
6974 inner = TREE_OPERAND (inner, 0);
6975 }
6976 while (TREE_CODE (inner) == COMPONENT_REF
6977 || TREE_CODE (inner) == ARRAY_REF);
6978 if (DECL_P (inner) && DECL_WEAK (inner))
6979 return NULL_TREE;
6980 }
6981
6982 /* Otherwise, ARG already has the proper type for the return value. */
6983 return arg;
6984 }
6985
6986 /* Fold a call to __builtin_classify_type with argument ARG. */
6987
6988 static tree
6989 fold_builtin_classify_type (tree arg)
6990 {
6991 if (arg == 0)
6992 return build_int_cst (NULL_TREE, no_type_class);
6993
6994 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6995 }
6996
6997 /* Fold a call to __builtin_strlen with argument ARG. */
6998
6999 static tree
7000 fold_builtin_strlen (tree arg)
7001 {
7002 if (!validate_arg (arg, POINTER_TYPE))
7003 return NULL_TREE;
7004 else
7005 {
7006 tree len = c_strlen (arg, 0);
7007
7008 if (len)
7009 {
7010 /* Convert from the internal "sizetype" type to "size_t". */
7011 if (size_type_node)
7012 len = fold_convert (size_type_node, len);
7013 return len;
7014 }
7015
7016 return NULL_TREE;
7017 }
7018 }
7019
7020 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7021
7022 static tree
7023 fold_builtin_inf (tree type, int warn)
7024 {
7025 REAL_VALUE_TYPE real;
7026
7027 /* __builtin_inff is intended to be usable to define INFINITY on all
7028 targets. If an infinity is not available, INFINITY expands "to a
7029 positive constant of type float that overflows at translation
7030 time", footnote "In this case, using INFINITY will violate the
7031 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7032 Thus we pedwarn to ensure this constraint violation is
7033 diagnosed. */
7034 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7035 pedwarn ("target format does not support infinity");
7036
7037 real_inf (&real);
7038 return build_real (type, real);
7039 }
7040
7041 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7042
7043 static tree
7044 fold_builtin_nan (tree arg, tree type, int quiet)
7045 {
7046 REAL_VALUE_TYPE real;
7047 const char *str;
7048
7049 if (!validate_arg (arg, POINTER_TYPE))
7050 return NULL_TREE;
7051 str = c_getstr (arg);
7052 if (!str)
7053 return NULL_TREE;
7054
7055 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7056 return NULL_TREE;
7057
7058 return build_real (type, real);
7059 }
7060
7061 /* Return true if the floating point expression T has an integer value.
7062 We also allow +Inf, -Inf and NaN to be considered integer values. */
7063
7064 static bool
7065 integer_valued_real_p (tree t)
7066 {
7067 switch (TREE_CODE (t))
7068 {
7069 case FLOAT_EXPR:
7070 return true;
7071
7072 case ABS_EXPR:
7073 case SAVE_EXPR:
7074 case NON_LVALUE_EXPR:
7075 return integer_valued_real_p (TREE_OPERAND (t, 0));
7076
7077 case COMPOUND_EXPR:
7078 case MODIFY_EXPR:
7079 case BIND_EXPR:
7080 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7081
7082 case PLUS_EXPR:
7083 case MINUS_EXPR:
7084 case MULT_EXPR:
7085 case MIN_EXPR:
7086 case MAX_EXPR:
7087 return integer_valued_real_p (TREE_OPERAND (t, 0))
7088 && integer_valued_real_p (TREE_OPERAND (t, 1));
7089
7090 case COND_EXPR:
7091 return integer_valued_real_p (TREE_OPERAND (t, 1))
7092 && integer_valued_real_p (TREE_OPERAND (t, 2));
7093
7094 case REAL_CST:
7095 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7096
7097 case NOP_EXPR:
7098 {
7099 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7100 if (TREE_CODE (type) == INTEGER_TYPE)
7101 return true;
7102 if (TREE_CODE (type) == REAL_TYPE)
7103 return integer_valued_real_p (TREE_OPERAND (t, 0));
7104 break;
7105 }
7106
7107 case CALL_EXPR:
7108 switch (builtin_mathfn_code (t))
7109 {
7110 CASE_FLT_FN (BUILT_IN_CEIL):
7111 CASE_FLT_FN (BUILT_IN_FLOOR):
7112 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7113 CASE_FLT_FN (BUILT_IN_RINT):
7114 CASE_FLT_FN (BUILT_IN_ROUND):
7115 CASE_FLT_FN (BUILT_IN_TRUNC):
7116 return true;
7117
7118 CASE_FLT_FN (BUILT_IN_FMIN):
7119 CASE_FLT_FN (BUILT_IN_FMAX):
7120 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7121 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7122
7123 default:
7124 break;
7125 }
7126 break;
7127
7128 default:
7129 break;
7130 }
7131 return false;
7132 }
7133
7134 /* FNDECL is assumed to be a builtin where truncation can be propagated
7135 across (for instance floor((double)f) == (double)floorf (f).
7136 Do the transformation for a call with argument ARG. */
7137
7138 static tree
7139 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7140 {
7141 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7142
7143 if (!validate_arg (arg, REAL_TYPE))
7144 return NULL_TREE;
7145
7146 /* Integer rounding functions are idempotent. */
7147 if (fcode == builtin_mathfn_code (arg))
7148 return arg;
7149
7150 /* If argument is already integer valued, and we don't need to worry
7151 about setting errno, there's no need to perform rounding. */
7152 if (! flag_errno_math && integer_valued_real_p (arg))
7153 return arg;
7154
7155 if (optimize)
7156 {
7157 tree arg0 = strip_float_extensions (arg);
7158 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7159 tree newtype = TREE_TYPE (arg0);
7160 tree decl;
7161
7162 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7163 && (decl = mathfn_built_in (newtype, fcode)))
7164 return fold_convert (ftype,
7165 build_call_expr (decl, 1,
7166 fold_convert (newtype, arg0)));
7167 }
7168 return NULL_TREE;
7169 }
7170
7171 /* FNDECL is assumed to be builtin which can narrow the FP type of
7172 the argument, for instance lround((double)f) -> lroundf (f).
7173 Do the transformation for a call with argument ARG. */
7174
7175 static tree
7176 fold_fixed_mathfn (tree fndecl, tree arg)
7177 {
7178 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7179
7180 if (!validate_arg (arg, REAL_TYPE))
7181 return NULL_TREE;
7182
7183 /* If argument is already integer valued, and we don't need to worry
7184 about setting errno, there's no need to perform rounding. */
7185 if (! flag_errno_math && integer_valued_real_p (arg))
7186 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7187
7188 if (optimize)
7189 {
7190 tree ftype = TREE_TYPE (arg);
7191 tree arg0 = strip_float_extensions (arg);
7192 tree newtype = TREE_TYPE (arg0);
7193 tree decl;
7194
7195 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7196 && (decl = mathfn_built_in (newtype, fcode)))
7197 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7198 }
7199
7200 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7201 sizeof (long long) == sizeof (long). */
7202 if (TYPE_PRECISION (long_long_integer_type_node)
7203 == TYPE_PRECISION (long_integer_type_node))
7204 {
7205 tree newfn = NULL_TREE;
7206 switch (fcode)
7207 {
7208 CASE_FLT_FN (BUILT_IN_LLCEIL):
7209 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7210 break;
7211
7212 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7213 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7214 break;
7215
7216 CASE_FLT_FN (BUILT_IN_LLROUND):
7217 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7218 break;
7219
7220 CASE_FLT_FN (BUILT_IN_LLRINT):
7221 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7222 break;
7223
7224 default:
7225 break;
7226 }
7227
7228 if (newfn)
7229 {
7230 tree newcall = build_call_expr(newfn, 1, arg);
7231 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7232 }
7233 }
7234
7235 return NULL_TREE;
7236 }
7237
7238 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7239 return type. Return NULL_TREE if no simplification can be made. */
7240
7241 static tree
7242 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7243 {
7244 tree res;
7245
7246 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7247 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7248 return NULL_TREE;
7249
7250 /* Calculate the result when the argument is a constant. */
7251 if (TREE_CODE (arg) == COMPLEX_CST
7252 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7253 type, mpfr_hypot)))
7254 return res;
7255
7256 if (TREE_CODE (arg) == COMPLEX_EXPR)
7257 {
7258 tree real = TREE_OPERAND (arg, 0);
7259 tree imag = TREE_OPERAND (arg, 1);
7260
7261 /* If either part is zero, cabs is fabs of the other. */
7262 if (real_zerop (real))
7263 return fold_build1 (ABS_EXPR, type, imag);
7264 if (real_zerop (imag))
7265 return fold_build1 (ABS_EXPR, type, real);
7266
7267 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7268 if (flag_unsafe_math_optimizations
7269 && operand_equal_p (real, imag, OEP_PURE_SAME))
7270 {
7271 const REAL_VALUE_TYPE sqrt2_trunc
7272 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7273 STRIP_NOPS (real);
7274 return fold_build2 (MULT_EXPR, type,
7275 fold_build1 (ABS_EXPR, type, real),
7276 build_real (type, sqrt2_trunc));
7277 }
7278 }
7279
7280 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7281 if (TREE_CODE (arg) == NEGATE_EXPR
7282 || TREE_CODE (arg) == CONJ_EXPR)
7283 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7284
7285 /* Don't do this when optimizing for size. */
7286 if (flag_unsafe_math_optimizations
7287 && optimize && !optimize_size)
7288 {
7289 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7290
7291 if (sqrtfn != NULL_TREE)
7292 {
7293 tree rpart, ipart, result;
7294
7295 arg = builtin_save_expr (arg);
7296
7297 rpart = fold_build1 (REALPART_EXPR, type, arg);
7298 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7299
7300 rpart = builtin_save_expr (rpart);
7301 ipart = builtin_save_expr (ipart);
7302
7303 result = fold_build2 (PLUS_EXPR, type,
7304 fold_build2 (MULT_EXPR, type,
7305 rpart, rpart),
7306 fold_build2 (MULT_EXPR, type,
7307 ipart, ipart));
7308
7309 return build_call_expr (sqrtfn, 1, result);
7310 }
7311 }
7312
7313 return NULL_TREE;
7314 }
7315
7316 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7317 Return NULL_TREE if no simplification can be made. */
7318
7319 static tree
7320 fold_builtin_sqrt (tree arg, tree type)
7321 {
7322
7323 enum built_in_function fcode;
7324 tree res;
7325
7326 if (!validate_arg (arg, REAL_TYPE))
7327 return NULL_TREE;
7328
7329 /* Calculate the result when the argument is a constant. */
7330 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7331 return res;
7332
7333 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7334 fcode = builtin_mathfn_code (arg);
7335 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7336 {
7337 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7338 arg = fold_build2 (MULT_EXPR, type,
7339 CALL_EXPR_ARG (arg, 0),
7340 build_real (type, dconsthalf));
7341 return build_call_expr (expfn, 1, arg);
7342 }
7343
7344 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7345 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7346 {
7347 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7348
7349 if (powfn)
7350 {
7351 tree arg0 = CALL_EXPR_ARG (arg, 0);
7352 tree tree_root;
7353 /* The inner root was either sqrt or cbrt. */
7354 REAL_VALUE_TYPE dconstroot =
7355 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7356
7357 /* Adjust for the outer root. */
7358 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7359 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7360 tree_root = build_real (type, dconstroot);
7361 return build_call_expr (powfn, 2, arg0, tree_root);
7362 }
7363 }
7364
7365 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7366 if (flag_unsafe_math_optimizations
7367 && (fcode == BUILT_IN_POW
7368 || fcode == BUILT_IN_POWF
7369 || fcode == BUILT_IN_POWL))
7370 {
7371 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7372 tree arg0 = CALL_EXPR_ARG (arg, 0);
7373 tree arg1 = CALL_EXPR_ARG (arg, 1);
7374 tree narg1;
7375 if (!tree_expr_nonnegative_p (arg0))
7376 arg0 = build1 (ABS_EXPR, type, arg0);
7377 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7378 build_real (type, dconsthalf));
7379 return build_call_expr (powfn, 2, arg0, narg1);
7380 }
7381
7382 return NULL_TREE;
7383 }
7384
7385 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7386 Return NULL_TREE if no simplification can be made. */
7387
7388 static tree
7389 fold_builtin_cbrt (tree arg, tree type)
7390 {
7391 const enum built_in_function fcode = builtin_mathfn_code (arg);
7392 tree res;
7393
7394 if (!validate_arg (arg, REAL_TYPE))
7395 return NULL_TREE;
7396
7397 /* Calculate the result when the argument is a constant. */
7398 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7399 return res;
7400
7401 if (flag_unsafe_math_optimizations)
7402 {
7403 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7404 if (BUILTIN_EXPONENT_P (fcode))
7405 {
7406 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7407 const REAL_VALUE_TYPE third_trunc =
7408 real_value_truncate (TYPE_MODE (type), dconstthird);
7409 arg = fold_build2 (MULT_EXPR, type,
7410 CALL_EXPR_ARG (arg, 0),
7411 build_real (type, third_trunc));
7412 return build_call_expr (expfn, 1, arg);
7413 }
7414
7415 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7416 if (BUILTIN_SQRT_P (fcode))
7417 {
7418 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7419
7420 if (powfn)
7421 {
7422 tree arg0 = CALL_EXPR_ARG (arg, 0);
7423 tree tree_root;
7424 REAL_VALUE_TYPE dconstroot = dconstthird;
7425
7426 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7427 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7428 tree_root = build_real (type, dconstroot);
7429 return build_call_expr (powfn, 2, arg0, tree_root);
7430 }
7431 }
7432
7433 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7434 if (BUILTIN_CBRT_P (fcode))
7435 {
7436 tree arg0 = CALL_EXPR_ARG (arg, 0);
7437 if (tree_expr_nonnegative_p (arg0))
7438 {
7439 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7440
7441 if (powfn)
7442 {
7443 tree tree_root;
7444 REAL_VALUE_TYPE dconstroot;
7445
7446 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7447 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7448 tree_root = build_real (type, dconstroot);
7449 return build_call_expr (powfn, 2, arg0, tree_root);
7450 }
7451 }
7452 }
7453
7454 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7455 if (fcode == BUILT_IN_POW
7456 || fcode == BUILT_IN_POWF
7457 || fcode == BUILT_IN_POWL)
7458 {
7459 tree arg00 = CALL_EXPR_ARG (arg, 0);
7460 tree arg01 = CALL_EXPR_ARG (arg, 1);
7461 if (tree_expr_nonnegative_p (arg00))
7462 {
7463 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7464 const REAL_VALUE_TYPE dconstroot
7465 = real_value_truncate (TYPE_MODE (type), dconstthird);
7466 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7467 build_real (type, dconstroot));
7468 return build_call_expr (powfn, 2, arg00, narg01);
7469 }
7470 }
7471 }
7472 return NULL_TREE;
7473 }
7474
7475 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7476 TYPE is the type of the return value. Return NULL_TREE if no
7477 simplification can be made. */
7478
7479 static tree
7480 fold_builtin_cos (tree arg, tree type, tree fndecl)
7481 {
7482 tree res, narg;
7483
7484 if (!validate_arg (arg, REAL_TYPE))
7485 return NULL_TREE;
7486
7487 /* Calculate the result when the argument is a constant. */
7488 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7489 return res;
7490
7491 /* Optimize cos(-x) into cos (x). */
7492 if ((narg = fold_strip_sign_ops (arg)))
7493 return build_call_expr (fndecl, 1, narg);
7494
7495 return NULL_TREE;
7496 }
7497
7498 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7499 Return NULL_TREE if no simplification can be made. */
7500
7501 static tree
7502 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7503 {
7504 if (validate_arg (arg, REAL_TYPE))
7505 {
7506 tree res, narg;
7507
7508 /* Calculate the result when the argument is a constant. */
7509 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7510 return res;
7511
7512 /* Optimize cosh(-x) into cosh (x). */
7513 if ((narg = fold_strip_sign_ops (arg)))
7514 return build_call_expr (fndecl, 1, narg);
7515 }
7516
7517 return NULL_TREE;
7518 }
7519
7520 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7521 Return NULL_TREE if no simplification can be made. */
7522
7523 static tree
7524 fold_builtin_tan (tree arg, tree type)
7525 {
7526 enum built_in_function fcode;
7527 tree res;
7528
7529 if (!validate_arg (arg, REAL_TYPE))
7530 return NULL_TREE;
7531
7532 /* Calculate the result when the argument is a constant. */
7533 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7534 return res;
7535
7536 /* Optimize tan(atan(x)) = x. */
7537 fcode = builtin_mathfn_code (arg);
7538 if (flag_unsafe_math_optimizations
7539 && (fcode == BUILT_IN_ATAN
7540 || fcode == BUILT_IN_ATANF
7541 || fcode == BUILT_IN_ATANL))
7542 return CALL_EXPR_ARG (arg, 0);
7543
7544 return NULL_TREE;
7545 }
7546
7547 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7548 NULL_TREE if no simplification can be made. */
7549
7550 static tree
7551 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7552 {
7553 tree type;
7554 tree res, fn, call;
7555
7556 if (!validate_arg (arg0, REAL_TYPE)
7557 || !validate_arg (arg1, POINTER_TYPE)
7558 || !validate_arg (arg2, POINTER_TYPE))
7559 return NULL_TREE;
7560
7561 type = TREE_TYPE (arg0);
7562
7563 /* Calculate the result when the argument is a constant. */
7564 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7565 return res;
7566
7567 /* Canonicalize sincos to cexpi. */
7568 if (!TARGET_C99_FUNCTIONS)
7569 return NULL_TREE;
7570 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7571 if (!fn)
7572 return NULL_TREE;
7573
7574 call = build_call_expr (fn, 1, arg0);
7575 call = builtin_save_expr (call);
7576
7577 return build2 (COMPOUND_EXPR, type,
7578 build2 (MODIFY_EXPR, void_type_node,
7579 build_fold_indirect_ref (arg1),
7580 build1 (IMAGPART_EXPR, type, call)),
7581 build2 (MODIFY_EXPR, void_type_node,
7582 build_fold_indirect_ref (arg2),
7583 build1 (REALPART_EXPR, type, call)));
7584 }
7585
7586 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7587 NULL_TREE if no simplification can be made. */
7588
7589 static tree
7590 fold_builtin_cexp (tree arg0, tree type)
7591 {
7592 tree rtype;
7593 tree realp, imagp, ifn;
7594
7595 if (!validate_arg (arg0, COMPLEX_TYPE))
7596 return NULL_TREE;
7597
7598 rtype = TREE_TYPE (TREE_TYPE (arg0));
7599
7600 /* In case we can figure out the real part of arg0 and it is constant zero
7601 fold to cexpi. */
7602 if (!TARGET_C99_FUNCTIONS)
7603 return NULL_TREE;
7604 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7605 if (!ifn)
7606 return NULL_TREE;
7607
7608 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7609 && real_zerop (realp))
7610 {
7611 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7612 return build_call_expr (ifn, 1, narg);
7613 }
7614
7615 /* In case we can easily decompose real and imaginary parts split cexp
7616 to exp (r) * cexpi (i). */
7617 if (flag_unsafe_math_optimizations
7618 && realp)
7619 {
7620 tree rfn, rcall, icall;
7621
7622 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7623 if (!rfn)
7624 return NULL_TREE;
7625
7626 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7627 if (!imagp)
7628 return NULL_TREE;
7629
7630 icall = build_call_expr (ifn, 1, imagp);
7631 icall = builtin_save_expr (icall);
7632 rcall = build_call_expr (rfn, 1, realp);
7633 rcall = builtin_save_expr (rcall);
7634 return build2 (COMPLEX_EXPR, type,
7635 build2 (MULT_EXPR, rtype,
7636 rcall,
7637 build1 (REALPART_EXPR, rtype, icall)),
7638 build2 (MULT_EXPR, rtype,
7639 rcall,
7640 build1 (IMAGPART_EXPR, rtype, icall)));
7641 }
7642
7643 return NULL_TREE;
7644 }
7645
7646 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7647 Return NULL_TREE if no simplification can be made. */
7648
7649 static tree
7650 fold_builtin_trunc (tree fndecl, tree arg)
7651 {
7652 if (!validate_arg (arg, REAL_TYPE))
7653 return NULL_TREE;
7654
7655 /* Optimize trunc of constant value. */
7656 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7657 {
7658 REAL_VALUE_TYPE r, x;
7659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7660
7661 x = TREE_REAL_CST (arg);
7662 real_trunc (&r, TYPE_MODE (type), &x);
7663 return build_real (type, r);
7664 }
7665
7666 return fold_trunc_transparent_mathfn (fndecl, arg);
7667 }
7668
7669 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7670 Return NULL_TREE if no simplification can be made. */
7671
7672 static tree
7673 fold_builtin_floor (tree fndecl, tree arg)
7674 {
7675 if (!validate_arg (arg, REAL_TYPE))
7676 return NULL_TREE;
7677
7678 /* Optimize floor of constant value. */
7679 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7680 {
7681 REAL_VALUE_TYPE x;
7682
7683 x = TREE_REAL_CST (arg);
7684 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7685 {
7686 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7687 REAL_VALUE_TYPE r;
7688
7689 real_floor (&r, TYPE_MODE (type), &x);
7690 return build_real (type, r);
7691 }
7692 }
7693
7694 /* Fold floor (x) where x is nonnegative to trunc (x). */
7695 if (tree_expr_nonnegative_p (arg))
7696 {
7697 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7698 if (truncfn)
7699 return build_call_expr (truncfn, 1, arg);
7700 }
7701
7702 return fold_trunc_transparent_mathfn (fndecl, arg);
7703 }
7704
7705 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7706 Return NULL_TREE if no simplification can be made. */
7707
7708 static tree
7709 fold_builtin_ceil (tree fndecl, tree arg)
7710 {
7711 if (!validate_arg (arg, REAL_TYPE))
7712 return NULL_TREE;
7713
7714 /* Optimize ceil of constant value. */
7715 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7716 {
7717 REAL_VALUE_TYPE x;
7718
7719 x = TREE_REAL_CST (arg);
7720 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7721 {
7722 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7723 REAL_VALUE_TYPE r;
7724
7725 real_ceil (&r, TYPE_MODE (type), &x);
7726 return build_real (type, r);
7727 }
7728 }
7729
7730 return fold_trunc_transparent_mathfn (fndecl, arg);
7731 }
7732
7733 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7734 Return NULL_TREE if no simplification can be made. */
7735
7736 static tree
7737 fold_builtin_round (tree fndecl, tree arg)
7738 {
7739 if (!validate_arg (arg, REAL_TYPE))
7740 return NULL_TREE;
7741
7742 /* Optimize round of constant value. */
7743 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7744 {
7745 REAL_VALUE_TYPE x;
7746
7747 x = TREE_REAL_CST (arg);
7748 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7749 {
7750 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7751 REAL_VALUE_TYPE r;
7752
7753 real_round (&r, TYPE_MODE (type), &x);
7754 return build_real (type, r);
7755 }
7756 }
7757
7758 return fold_trunc_transparent_mathfn (fndecl, arg);
7759 }
7760
7761 /* Fold function call to builtin lround, lroundf or lroundl (or the
7762 corresponding long long versions) and other rounding functions. ARG
7763 is the argument to the call. Return NULL_TREE if no simplification
7764 can be made. */
7765
7766 static tree
7767 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7768 {
7769 if (!validate_arg (arg, REAL_TYPE))
7770 return NULL_TREE;
7771
7772 /* Optimize lround of constant value. */
7773 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7774 {
7775 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7776
7777 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7778 {
7779 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7780 tree ftype = TREE_TYPE (arg);
7781 unsigned HOST_WIDE_INT lo2;
7782 HOST_WIDE_INT hi, lo;
7783 REAL_VALUE_TYPE r;
7784
7785 switch (DECL_FUNCTION_CODE (fndecl))
7786 {
7787 CASE_FLT_FN (BUILT_IN_LFLOOR):
7788 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7789 real_floor (&r, TYPE_MODE (ftype), &x);
7790 break;
7791
7792 CASE_FLT_FN (BUILT_IN_LCEIL):
7793 CASE_FLT_FN (BUILT_IN_LLCEIL):
7794 real_ceil (&r, TYPE_MODE (ftype), &x);
7795 break;
7796
7797 CASE_FLT_FN (BUILT_IN_LROUND):
7798 CASE_FLT_FN (BUILT_IN_LLROUND):
7799 real_round (&r, TYPE_MODE (ftype), &x);
7800 break;
7801
7802 default:
7803 gcc_unreachable ();
7804 }
7805
7806 REAL_VALUE_TO_INT (&lo, &hi, r);
7807 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7808 return build_int_cst_wide (itype, lo2, hi);
7809 }
7810 }
7811
7812 switch (DECL_FUNCTION_CODE (fndecl))
7813 {
7814 CASE_FLT_FN (BUILT_IN_LFLOOR):
7815 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7816 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7817 if (tree_expr_nonnegative_p (arg))
7818 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7819 arg);
7820 break;
7821 default:;
7822 }
7823
7824 return fold_fixed_mathfn (fndecl, arg);
7825 }
7826
7827 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7828 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7829 the argument to the call. Return NULL_TREE if no simplification can
7830 be made. */
7831
7832 static tree
7833 fold_builtin_bitop (tree fndecl, tree arg)
7834 {
7835 if (!validate_arg (arg, INTEGER_TYPE))
7836 return NULL_TREE;
7837
7838 /* Optimize for constant argument. */
7839 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7840 {
7841 HOST_WIDE_INT hi, width, result;
7842 unsigned HOST_WIDE_INT lo;
7843 tree type;
7844
7845 type = TREE_TYPE (arg);
7846 width = TYPE_PRECISION (type);
7847 lo = TREE_INT_CST_LOW (arg);
7848
7849 /* Clear all the bits that are beyond the type's precision. */
7850 if (width > HOST_BITS_PER_WIDE_INT)
7851 {
7852 hi = TREE_INT_CST_HIGH (arg);
7853 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7854 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7855 }
7856 else
7857 {
7858 hi = 0;
7859 if (width < HOST_BITS_PER_WIDE_INT)
7860 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7861 }
7862
7863 switch (DECL_FUNCTION_CODE (fndecl))
7864 {
7865 CASE_INT_FN (BUILT_IN_FFS):
7866 if (lo != 0)
7867 result = exact_log2 (lo & -lo) + 1;
7868 else if (hi != 0)
7869 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7870 else
7871 result = 0;
7872 break;
7873
7874 CASE_INT_FN (BUILT_IN_CLZ):
7875 if (hi != 0)
7876 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7877 else if (lo != 0)
7878 result = width - floor_log2 (lo) - 1;
7879 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7880 result = width;
7881 break;
7882
7883 CASE_INT_FN (BUILT_IN_CTZ):
7884 if (lo != 0)
7885 result = exact_log2 (lo & -lo);
7886 else if (hi != 0)
7887 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7888 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7889 result = width;
7890 break;
7891
7892 CASE_INT_FN (BUILT_IN_POPCOUNT):
7893 result = 0;
7894 while (lo)
7895 result++, lo &= lo - 1;
7896 while (hi)
7897 result++, hi &= hi - 1;
7898 break;
7899
7900 CASE_INT_FN (BUILT_IN_PARITY):
7901 result = 0;
7902 while (lo)
7903 result++, lo &= lo - 1;
7904 while (hi)
7905 result++, hi &= hi - 1;
7906 result &= 1;
7907 break;
7908
7909 default:
7910 gcc_unreachable ();
7911 }
7912
7913 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7914 }
7915
7916 return NULL_TREE;
7917 }
7918
7919 /* Fold function call to builtin_bswap and the long and long long
7920 variants. Return NULL_TREE if no simplification can be made. */
7921 static tree
7922 fold_builtin_bswap (tree fndecl, tree arg)
7923 {
7924 if (! validate_arg (arg, INTEGER_TYPE))
7925 return NULL_TREE;
7926
7927 /* Optimize constant value. */
7928 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7929 {
7930 HOST_WIDE_INT hi, width, r_hi = 0;
7931 unsigned HOST_WIDE_INT lo, r_lo = 0;
7932 tree type;
7933
7934 type = TREE_TYPE (arg);
7935 width = TYPE_PRECISION (type);
7936 lo = TREE_INT_CST_LOW (arg);
7937 hi = TREE_INT_CST_HIGH (arg);
7938
7939 switch (DECL_FUNCTION_CODE (fndecl))
7940 {
7941 case BUILT_IN_BSWAP32:
7942 case BUILT_IN_BSWAP64:
7943 {
7944 int s;
7945
7946 for (s = 0; s < width; s += 8)
7947 {
7948 int d = width - s - 8;
7949 unsigned HOST_WIDE_INT byte;
7950
7951 if (s < HOST_BITS_PER_WIDE_INT)
7952 byte = (lo >> s) & 0xff;
7953 else
7954 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7955
7956 if (d < HOST_BITS_PER_WIDE_INT)
7957 r_lo |= byte << d;
7958 else
7959 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7960 }
7961 }
7962
7963 break;
7964
7965 default:
7966 gcc_unreachable ();
7967 }
7968
7969 if (width < HOST_BITS_PER_WIDE_INT)
7970 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7971 else
7972 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7973 }
7974
7975 return NULL_TREE;
7976 }
7977
7978 /* Return true if EXPR is the real constant contained in VALUE. */
7979
7980 static bool
7981 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7982 {
7983 STRIP_NOPS (expr);
7984
7985 return ((TREE_CODE (expr) == REAL_CST
7986 && !TREE_OVERFLOW (expr)
7987 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7988 || (TREE_CODE (expr) == COMPLEX_CST
7989 && real_dconstp (TREE_REALPART (expr), value)
7990 && real_zerop (TREE_IMAGPART (expr))));
7991 }
7992
7993 /* A subroutine of fold_builtin to fold the various logarithmic
7994 functions. Return NULL_TREE if no simplification can me made.
7995 FUNC is the corresponding MPFR logarithm function. */
7996
7997 static tree
7998 fold_builtin_logarithm (tree fndecl, tree arg,
7999 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8000 {
8001 if (validate_arg (arg, REAL_TYPE))
8002 {
8003 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8004 tree res;
8005 const enum built_in_function fcode = builtin_mathfn_code (arg);
8006
8007 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8008 instead we'll look for 'e' truncated to MODE. So only do
8009 this if flag_unsafe_math_optimizations is set. */
8010 if (flag_unsafe_math_optimizations && func == mpfr_log)
8011 {
8012 const REAL_VALUE_TYPE e_truncated =
8013 real_value_truncate (TYPE_MODE (type), dconste);
8014 if (real_dconstp (arg, &e_truncated))
8015 return build_real (type, dconst1);
8016 }
8017
8018 /* Calculate the result when the argument is a constant. */
8019 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8020 return res;
8021
8022 /* Special case, optimize logN(expN(x)) = x. */
8023 if (flag_unsafe_math_optimizations
8024 && ((func == mpfr_log
8025 && (fcode == BUILT_IN_EXP
8026 || fcode == BUILT_IN_EXPF
8027 || fcode == BUILT_IN_EXPL))
8028 || (func == mpfr_log2
8029 && (fcode == BUILT_IN_EXP2
8030 || fcode == BUILT_IN_EXP2F
8031 || fcode == BUILT_IN_EXP2L))
8032 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8033 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8034
8035 /* Optimize logN(func()) for various exponential functions. We
8036 want to determine the value "x" and the power "exponent" in
8037 order to transform logN(x**exponent) into exponent*logN(x). */
8038 if (flag_unsafe_math_optimizations)
8039 {
8040 tree exponent = 0, x = 0;
8041
8042 switch (fcode)
8043 {
8044 CASE_FLT_FN (BUILT_IN_EXP):
8045 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8046 x = build_real (type,
8047 real_value_truncate (TYPE_MODE (type), dconste));
8048 exponent = CALL_EXPR_ARG (arg, 0);
8049 break;
8050 CASE_FLT_FN (BUILT_IN_EXP2):
8051 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8052 x = build_real (type, dconst2);
8053 exponent = CALL_EXPR_ARG (arg, 0);
8054 break;
8055 CASE_FLT_FN (BUILT_IN_EXP10):
8056 CASE_FLT_FN (BUILT_IN_POW10):
8057 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8058 x = build_real (type, dconst10);
8059 exponent = CALL_EXPR_ARG (arg, 0);
8060 break;
8061 CASE_FLT_FN (BUILT_IN_SQRT):
8062 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8063 x = CALL_EXPR_ARG (arg, 0);
8064 exponent = build_real (type, dconsthalf);
8065 break;
8066 CASE_FLT_FN (BUILT_IN_CBRT):
8067 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8068 x = CALL_EXPR_ARG (arg, 0);
8069 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8070 dconstthird));
8071 break;
8072 CASE_FLT_FN (BUILT_IN_POW):
8073 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8074 x = CALL_EXPR_ARG (arg, 0);
8075 exponent = CALL_EXPR_ARG (arg, 1);
8076 break;
8077 default:
8078 break;
8079 }
8080
8081 /* Now perform the optimization. */
8082 if (x && exponent)
8083 {
8084 tree logfn = build_call_expr (fndecl, 1, x);
8085 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8086 }
8087 }
8088 }
8089
8090 return NULL_TREE;
8091 }
8092
8093 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8094 NULL_TREE if no simplification can be made. */
8095
8096 static tree
8097 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8098 {
8099 tree res, narg0, narg1;
8100
8101 if (!validate_arg (arg0, REAL_TYPE)
8102 || !validate_arg (arg1, REAL_TYPE))
8103 return NULL_TREE;
8104
8105 /* Calculate the result when the argument is a constant. */
8106 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8107 return res;
8108
8109 /* If either argument to hypot has a negate or abs, strip that off.
8110 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8111 narg0 = fold_strip_sign_ops (arg0);
8112 narg1 = fold_strip_sign_ops (arg1);
8113 if (narg0 || narg1)
8114 {
8115 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8116 narg1 ? narg1 : arg1);
8117 }
8118
8119 /* If either argument is zero, hypot is fabs of the other. */
8120 if (real_zerop (arg0))
8121 return fold_build1 (ABS_EXPR, type, arg1);
8122 else if (real_zerop (arg1))
8123 return fold_build1 (ABS_EXPR, type, arg0);
8124
8125 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8126 if (flag_unsafe_math_optimizations
8127 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8128 {
8129 const REAL_VALUE_TYPE sqrt2_trunc
8130 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8131 return fold_build2 (MULT_EXPR, type,
8132 fold_build1 (ABS_EXPR, type, arg0),
8133 build_real (type, sqrt2_trunc));
8134 }
8135
8136 return NULL_TREE;
8137 }
8138
8139
8140 /* Fold a builtin function call to pow, powf, or powl. Return
8141 NULL_TREE if no simplification can be made. */
8142 static tree
8143 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8144 {
8145 tree res;
8146
8147 if (!validate_arg (arg0, REAL_TYPE)
8148 || !validate_arg (arg1, REAL_TYPE))
8149 return NULL_TREE;
8150
8151 /* Calculate the result when the argument is a constant. */
8152 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8153 return res;
8154
8155 /* Optimize pow(1.0,y) = 1.0. */
8156 if (real_onep (arg0))
8157 return omit_one_operand (type, build_real (type, dconst1), arg1);
8158
8159 if (TREE_CODE (arg1) == REAL_CST
8160 && !TREE_OVERFLOW (arg1))
8161 {
8162 REAL_VALUE_TYPE cint;
8163 REAL_VALUE_TYPE c;
8164 HOST_WIDE_INT n;
8165
8166 c = TREE_REAL_CST (arg1);
8167
8168 /* Optimize pow(x,0.0) = 1.0. */
8169 if (REAL_VALUES_EQUAL (c, dconst0))
8170 return omit_one_operand (type, build_real (type, dconst1),
8171 arg0);
8172
8173 /* Optimize pow(x,1.0) = x. */
8174 if (REAL_VALUES_EQUAL (c, dconst1))
8175 return arg0;
8176
8177 /* Optimize pow(x,-1.0) = 1.0/x. */
8178 if (REAL_VALUES_EQUAL (c, dconstm1))
8179 return fold_build2 (RDIV_EXPR, type,
8180 build_real (type, dconst1), arg0);
8181
8182 /* Optimize pow(x,0.5) = sqrt(x). */
8183 if (flag_unsafe_math_optimizations
8184 && REAL_VALUES_EQUAL (c, dconsthalf))
8185 {
8186 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8187
8188 if (sqrtfn != NULL_TREE)
8189 return build_call_expr (sqrtfn, 1, arg0);
8190 }
8191
8192 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8193 if (flag_unsafe_math_optimizations)
8194 {
8195 const REAL_VALUE_TYPE dconstroot
8196 = real_value_truncate (TYPE_MODE (type), dconstthird);
8197
8198 if (REAL_VALUES_EQUAL (c, dconstroot))
8199 {
8200 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8201 if (cbrtfn != NULL_TREE)
8202 return build_call_expr (cbrtfn, 1, arg0);
8203 }
8204 }
8205
8206 /* Check for an integer exponent. */
8207 n = real_to_integer (&c);
8208 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8209 if (real_identical (&c, &cint))
8210 {
8211 /* Attempt to evaluate pow at compile-time. */
8212 if (TREE_CODE (arg0) == REAL_CST
8213 && !TREE_OVERFLOW (arg0))
8214 {
8215 REAL_VALUE_TYPE x;
8216 bool inexact;
8217
8218 x = TREE_REAL_CST (arg0);
8219 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8220 if (flag_unsafe_math_optimizations || !inexact)
8221 return build_real (type, x);
8222 }
8223
8224 /* Strip sign ops from even integer powers. */
8225 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8226 {
8227 tree narg0 = fold_strip_sign_ops (arg0);
8228 if (narg0)
8229 return build_call_expr (fndecl, 2, narg0, arg1);
8230 }
8231 }
8232 }
8233
8234 if (flag_unsafe_math_optimizations)
8235 {
8236 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8237
8238 /* Optimize pow(expN(x),y) = expN(x*y). */
8239 if (BUILTIN_EXPONENT_P (fcode))
8240 {
8241 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8242 tree arg = CALL_EXPR_ARG (arg0, 0);
8243 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8244 return build_call_expr (expfn, 1, arg);
8245 }
8246
8247 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8248 if (BUILTIN_SQRT_P (fcode))
8249 {
8250 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8251 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8252 build_real (type, dconsthalf));
8253 return build_call_expr (fndecl, 2, narg0, narg1);
8254 }
8255
8256 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8257 if (BUILTIN_CBRT_P (fcode))
8258 {
8259 tree arg = CALL_EXPR_ARG (arg0, 0);
8260 if (tree_expr_nonnegative_p (arg))
8261 {
8262 const REAL_VALUE_TYPE dconstroot
8263 = real_value_truncate (TYPE_MODE (type), dconstthird);
8264 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8265 build_real (type, dconstroot));
8266 return build_call_expr (fndecl, 2, arg, narg1);
8267 }
8268 }
8269
8270 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8271 if (fcode == BUILT_IN_POW
8272 || fcode == BUILT_IN_POWF
8273 || fcode == BUILT_IN_POWL)
8274 {
8275 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8276 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8277 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8278 return build_call_expr (fndecl, 2, arg00, narg1);
8279 }
8280 }
8281
8282 return NULL_TREE;
8283 }
8284
8285 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8286 Return NULL_TREE if no simplification can be made. */
8287 static tree
8288 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8289 tree arg0, tree arg1, tree type)
8290 {
8291 if (!validate_arg (arg0, REAL_TYPE)
8292 || !validate_arg (arg1, INTEGER_TYPE))
8293 return NULL_TREE;
8294
8295 /* Optimize pow(1.0,y) = 1.0. */
8296 if (real_onep (arg0))
8297 return omit_one_operand (type, build_real (type, dconst1), arg1);
8298
8299 if (host_integerp (arg1, 0))
8300 {
8301 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8302
8303 /* Evaluate powi at compile-time. */
8304 if (TREE_CODE (arg0) == REAL_CST
8305 && !TREE_OVERFLOW (arg0))
8306 {
8307 REAL_VALUE_TYPE x;
8308 x = TREE_REAL_CST (arg0);
8309 real_powi (&x, TYPE_MODE (type), &x, c);
8310 return build_real (type, x);
8311 }
8312
8313 /* Optimize pow(x,0) = 1.0. */
8314 if (c == 0)
8315 return omit_one_operand (type, build_real (type, dconst1),
8316 arg0);
8317
8318 /* Optimize pow(x,1) = x. */
8319 if (c == 1)
8320 return arg0;
8321
8322 /* Optimize pow(x,-1) = 1.0/x. */
8323 if (c == -1)
8324 return fold_build2 (RDIV_EXPR, type,
8325 build_real (type, dconst1), arg0);
8326 }
8327
8328 return NULL_TREE;
8329 }
8330
8331 /* A subroutine of fold_builtin to fold the various exponent
8332 functions. Return NULL_TREE if no simplification can be made.
8333 FUNC is the corresponding MPFR exponent function. */
8334
8335 static tree
8336 fold_builtin_exponent (tree fndecl, tree arg,
8337 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8338 {
8339 if (validate_arg (arg, REAL_TYPE))
8340 {
8341 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8342 tree res;
8343
8344 /* Calculate the result when the argument is a constant. */
8345 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8346 return res;
8347
8348 /* Optimize expN(logN(x)) = x. */
8349 if (flag_unsafe_math_optimizations)
8350 {
8351 const enum built_in_function fcode = builtin_mathfn_code (arg);
8352
8353 if ((func == mpfr_exp
8354 && (fcode == BUILT_IN_LOG
8355 || fcode == BUILT_IN_LOGF
8356 || fcode == BUILT_IN_LOGL))
8357 || (func == mpfr_exp2
8358 && (fcode == BUILT_IN_LOG2
8359 || fcode == BUILT_IN_LOG2F
8360 || fcode == BUILT_IN_LOG2L))
8361 || (func == mpfr_exp10
8362 && (fcode == BUILT_IN_LOG10
8363 || fcode == BUILT_IN_LOG10F
8364 || fcode == BUILT_IN_LOG10L)))
8365 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8366 }
8367 }
8368
8369 return NULL_TREE;
8370 }
8371
8372 /* Return true if VAR is a VAR_DECL or a component thereof. */
8373
8374 static bool
8375 var_decl_component_p (tree var)
8376 {
8377 tree inner = var;
8378 while (handled_component_p (inner))
8379 inner = TREE_OPERAND (inner, 0);
8380 return SSA_VAR_P (inner);
8381 }
8382
8383 /* Fold function call to builtin memset. Return
8384 NULL_TREE if no simplification can be made. */
8385
8386 static tree
8387 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8388 {
8389 tree var, ret;
8390 unsigned HOST_WIDE_INT length, cval;
8391
8392 if (! validate_arg (dest, POINTER_TYPE)
8393 || ! validate_arg (c, INTEGER_TYPE)
8394 || ! validate_arg (len, INTEGER_TYPE))
8395 return NULL_TREE;
8396
8397 if (! host_integerp (len, 1))
8398 return NULL_TREE;
8399
8400 /* If the LEN parameter is zero, return DEST. */
8401 if (integer_zerop (len))
8402 return omit_one_operand (type, dest, c);
8403
8404 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8405 return NULL_TREE;
8406
8407 var = dest;
8408 STRIP_NOPS (var);
8409 if (TREE_CODE (var) != ADDR_EXPR)
8410 return NULL_TREE;
8411
8412 var = TREE_OPERAND (var, 0);
8413 if (TREE_THIS_VOLATILE (var))
8414 return NULL_TREE;
8415
8416 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8417 && !POINTER_TYPE_P (TREE_TYPE (var)))
8418 return NULL_TREE;
8419
8420 if (! var_decl_component_p (var))
8421 return NULL_TREE;
8422
8423 length = tree_low_cst (len, 1);
8424 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8425 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8426 < (int) length)
8427 return NULL_TREE;
8428
8429 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8430 return NULL_TREE;
8431
8432 if (integer_zerop (c))
8433 cval = 0;
8434 else
8435 {
8436 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8437 return NULL_TREE;
8438
8439 cval = tree_low_cst (c, 1);
8440 cval &= 0xff;
8441 cval |= cval << 8;
8442 cval |= cval << 16;
8443 cval |= (cval << 31) << 1;
8444 }
8445
8446 ret = build_int_cst_type (TREE_TYPE (var), cval);
8447 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8448 if (ignore)
8449 return ret;
8450
8451 return omit_one_operand (type, dest, ret);
8452 }
8453
8454 /* Fold function call to builtin memset. Return
8455 NULL_TREE if no simplification can be made. */
8456
8457 static tree
8458 fold_builtin_bzero (tree dest, tree size, bool ignore)
8459 {
8460 if (! validate_arg (dest, POINTER_TYPE)
8461 || ! validate_arg (size, INTEGER_TYPE))
8462 return NULL_TREE;
8463
8464 if (!ignore)
8465 return NULL_TREE;
8466
8467 /* New argument list transforming bzero(ptr x, int y) to
8468 memset(ptr x, int 0, size_t y). This is done this way
8469 so that if it isn't expanded inline, we fallback to
8470 calling bzero instead of memset. */
8471
8472 return fold_builtin_memset (dest, integer_zero_node,
8473 fold_convert (sizetype, size),
8474 void_type_node, ignore);
8475 }
8476
8477 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8478 NULL_TREE if no simplification can be made.
8479 If ENDP is 0, return DEST (like memcpy).
8480 If ENDP is 1, return DEST+LEN (like mempcpy).
8481 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8482 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8483 (memmove). */
8484
8485 static tree
8486 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8487 {
8488 tree destvar, srcvar, expr;
8489
8490 if (! validate_arg (dest, POINTER_TYPE)
8491 || ! validate_arg (src, POINTER_TYPE)
8492 || ! validate_arg (len, INTEGER_TYPE))
8493 return NULL_TREE;
8494
8495 /* If the LEN parameter is zero, return DEST. */
8496 if (integer_zerop (len))
8497 return omit_one_operand (type, dest, src);
8498
8499 /* If SRC and DEST are the same (and not volatile), return
8500 DEST{,+LEN,+LEN-1}. */
8501 if (operand_equal_p (src, dest, 0))
8502 expr = len;
8503 else
8504 {
8505 tree srctype, desttype;
8506 if (endp == 3)
8507 {
8508 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8509 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8510
8511 /* Both DEST and SRC must be pointer types.
8512 ??? This is what old code did. Is the testing for pointer types
8513 really mandatory?
8514
8515 If either SRC is readonly or length is 1, we can use memcpy. */
8516 if (dest_align && src_align
8517 && (readonly_data_expr (src)
8518 || (host_integerp (len, 1)
8519 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8520 tree_low_cst (len, 1)))))
8521 {
8522 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8523 if (!fn)
8524 return NULL_TREE;
8525 return build_call_expr (fn, 3, dest, src, len);
8526 }
8527 return NULL_TREE;
8528 }
8529
8530 if (!host_integerp (len, 0))
8531 return NULL_TREE;
8532 /* FIXME:
8533 This logic lose for arguments like (type *)malloc (sizeof (type)),
8534 since we strip the casts of up to VOID return value from malloc.
8535 Perhaps we ought to inherit type from non-VOID argument here? */
8536 STRIP_NOPS (src);
8537 STRIP_NOPS (dest);
8538 srctype = TREE_TYPE (TREE_TYPE (src));
8539 desttype = TREE_TYPE (TREE_TYPE (dest));
8540 if (!srctype || !desttype
8541 || !TYPE_SIZE_UNIT (srctype)
8542 || !TYPE_SIZE_UNIT (desttype)
8543 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8544 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8545 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8546 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8547 return NULL_TREE;
8548
8549 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8550 < (int) TYPE_ALIGN (desttype)
8551 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8552 < (int) TYPE_ALIGN (srctype)))
8553 return NULL_TREE;
8554
8555 if (!ignore)
8556 dest = builtin_save_expr (dest);
8557
8558 srcvar = build_fold_indirect_ref (src);
8559 if (TREE_THIS_VOLATILE (srcvar))
8560 return NULL_TREE;
8561 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8562 return NULL_TREE;
8563 /* With memcpy, it is possible to bypass aliasing rules, so without
8564 this check i. e. execute/20060930-2.c would be misoptimized, because
8565 it use conflicting alias set to hold argument for the memcpy call.
8566 This check is probably unnecesary with -fno-strict-aliasing.
8567 Similarly for destvar. See also PR29286. */
8568 if (!var_decl_component_p (srcvar)
8569 /* Accept: memcpy (*char_var, "test", 1); that simplify
8570 to char_var='t'; */
8571 || is_gimple_min_invariant (srcvar)
8572 || readonly_data_expr (src))
8573 return NULL_TREE;
8574
8575 destvar = build_fold_indirect_ref (dest);
8576 if (TREE_THIS_VOLATILE (destvar))
8577 return NULL_TREE;
8578 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8579 return NULL_TREE;
8580 if (!var_decl_component_p (destvar))
8581 return NULL_TREE;
8582
8583 if (srctype == desttype
8584 || (gimple_in_ssa_p (cfun)
8585 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8586 expr = srcvar;
8587 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8588 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8589 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8590 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8591 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8592 else
8593 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8594 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8595 }
8596
8597 if (ignore)
8598 return expr;
8599
8600 if (endp == 0 || endp == 3)
8601 return omit_one_operand (type, dest, expr);
8602
8603 if (expr == len)
8604 expr = NULL_TREE;
8605
8606 if (endp == 2)
8607 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8608 ssize_int (1));
8609
8610 len = fold_convert (TREE_TYPE (dest), len);
8611 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8612 dest = fold_convert (type, dest);
8613 if (expr)
8614 dest = omit_one_operand (type, dest, expr);
8615 return dest;
8616 }
8617
8618 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8619 If LEN is not NULL, it represents the length of the string to be
8620 copied. Return NULL_TREE if no simplification can be made. */
8621
8622 tree
8623 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8624 {
8625 tree fn;
8626
8627 if (!validate_arg (dest, POINTER_TYPE)
8628 || !validate_arg (src, POINTER_TYPE))
8629 return NULL_TREE;
8630
8631 /* If SRC and DEST are the same (and not volatile), return DEST. */
8632 if (operand_equal_p (src, dest, 0))
8633 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8634
8635 if (optimize_size)
8636 return NULL_TREE;
8637
8638 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8639 if (!fn)
8640 return NULL_TREE;
8641
8642 if (!len)
8643 {
8644 len = c_strlen (src, 1);
8645 if (! len || TREE_SIDE_EFFECTS (len))
8646 return NULL_TREE;
8647 }
8648
8649 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8650 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8651 build_call_expr (fn, 3, dest, src, len));
8652 }
8653
8654 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8655 If SLEN is not NULL, it represents the length of the source string.
8656 Return NULL_TREE if no simplification can be made. */
8657
8658 tree
8659 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8660 {
8661 tree fn;
8662
8663 if (!validate_arg (dest, POINTER_TYPE)
8664 || !validate_arg (src, POINTER_TYPE)
8665 || !validate_arg (len, INTEGER_TYPE))
8666 return NULL_TREE;
8667
8668 /* If the LEN parameter is zero, return DEST. */
8669 if (integer_zerop (len))
8670 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8671
8672 /* We can't compare slen with len as constants below if len is not a
8673 constant. */
8674 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8675 return NULL_TREE;
8676
8677 if (!slen)
8678 slen = c_strlen (src, 1);
8679
8680 /* Now, we must be passed a constant src ptr parameter. */
8681 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8682 return NULL_TREE;
8683
8684 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8685
8686 /* We do not support simplification of this case, though we do
8687 support it when expanding trees into RTL. */
8688 /* FIXME: generate a call to __builtin_memset. */
8689 if (tree_int_cst_lt (slen, len))
8690 return NULL_TREE;
8691
8692 /* OK transform into builtin memcpy. */
8693 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8694 if (!fn)
8695 return NULL_TREE;
8696 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8697 build_call_expr (fn, 3, dest, src, len));
8698 }
8699
8700 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8701 arguments to the call, and TYPE is its return type.
8702 Return NULL_TREE if no simplification can be made. */
8703
8704 static tree
8705 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8706 {
8707 if (!validate_arg (arg1, POINTER_TYPE)
8708 || !validate_arg (arg2, INTEGER_TYPE)
8709 || !validate_arg (len, INTEGER_TYPE))
8710 return NULL_TREE;
8711 else
8712 {
8713 const char *p1;
8714
8715 if (TREE_CODE (arg2) != INTEGER_CST
8716 || !host_integerp (len, 1))
8717 return NULL_TREE;
8718
8719 p1 = c_getstr (arg1);
8720 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8721 {
8722 char c;
8723 const char *r;
8724 tree tem;
8725
8726 if (target_char_cast (arg2, &c))
8727 return NULL_TREE;
8728
8729 r = memchr (p1, c, tree_low_cst (len, 1));
8730
8731 if (r == NULL)
8732 return build_int_cst (TREE_TYPE (arg1), 0);
8733
8734 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
8735 build_int_cst (TREE_TYPE (arg1), r - p1));
8736 return fold_convert (type, tem);
8737 }
8738 return NULL_TREE;
8739 }
8740 }
8741
8742 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8743 Return NULL_TREE if no simplification can be made. */
8744
8745 static tree
8746 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8747 {
8748 const char *p1, *p2;
8749
8750 if (!validate_arg (arg1, POINTER_TYPE)
8751 || !validate_arg (arg2, POINTER_TYPE)
8752 || !validate_arg (len, INTEGER_TYPE))
8753 return NULL_TREE;
8754
8755 /* If the LEN parameter is zero, return zero. */
8756 if (integer_zerop (len))
8757 return omit_two_operands (integer_type_node, integer_zero_node,
8758 arg1, arg2);
8759
8760 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8761 if (operand_equal_p (arg1, arg2, 0))
8762 return omit_one_operand (integer_type_node, integer_zero_node, len);
8763
8764 p1 = c_getstr (arg1);
8765 p2 = c_getstr (arg2);
8766
8767 /* If all arguments are constant, and the value of len is not greater
8768 than the lengths of arg1 and arg2, evaluate at compile-time. */
8769 if (host_integerp (len, 1) && p1 && p2
8770 && compare_tree_int (len, strlen (p1) + 1) <= 0
8771 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8772 {
8773 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8774
8775 if (r > 0)
8776 return integer_one_node;
8777 else if (r < 0)
8778 return integer_minus_one_node;
8779 else
8780 return integer_zero_node;
8781 }
8782
8783 /* If len parameter is one, return an expression corresponding to
8784 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8785 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8786 {
8787 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8788 tree cst_uchar_ptr_node
8789 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8790
8791 tree ind1 = fold_convert (integer_type_node,
8792 build1 (INDIRECT_REF, cst_uchar_node,
8793 fold_convert (cst_uchar_ptr_node,
8794 arg1)));
8795 tree ind2 = fold_convert (integer_type_node,
8796 build1 (INDIRECT_REF, cst_uchar_node,
8797 fold_convert (cst_uchar_ptr_node,
8798 arg2)));
8799 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8800 }
8801
8802 return NULL_TREE;
8803 }
8804
8805 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8806 Return NULL_TREE if no simplification can be made. */
8807
8808 static tree
8809 fold_builtin_strcmp (tree arg1, tree arg2)
8810 {
8811 const char *p1, *p2;
8812
8813 if (!validate_arg (arg1, POINTER_TYPE)
8814 || !validate_arg (arg2, POINTER_TYPE))
8815 return NULL_TREE;
8816
8817 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8818 if (operand_equal_p (arg1, arg2, 0))
8819 return integer_zero_node;
8820
8821 p1 = c_getstr (arg1);
8822 p2 = c_getstr (arg2);
8823
8824 if (p1 && p2)
8825 {
8826 const int i = strcmp (p1, p2);
8827 if (i < 0)
8828 return integer_minus_one_node;
8829 else if (i > 0)
8830 return integer_one_node;
8831 else
8832 return integer_zero_node;
8833 }
8834
8835 /* If the second arg is "", return *(const unsigned char*)arg1. */
8836 if (p2 && *p2 == '\0')
8837 {
8838 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8839 tree cst_uchar_ptr_node
8840 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8841
8842 return fold_convert (integer_type_node,
8843 build1 (INDIRECT_REF, cst_uchar_node,
8844 fold_convert (cst_uchar_ptr_node,
8845 arg1)));
8846 }
8847
8848 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8849 if (p1 && *p1 == '\0')
8850 {
8851 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8852 tree cst_uchar_ptr_node
8853 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8854
8855 tree temp = fold_convert (integer_type_node,
8856 build1 (INDIRECT_REF, cst_uchar_node,
8857 fold_convert (cst_uchar_ptr_node,
8858 arg2)));
8859 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8860 }
8861
8862 return NULL_TREE;
8863 }
8864
8865 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8866 Return NULL_TREE if no simplification can be made. */
8867
8868 static tree
8869 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8870 {
8871 const char *p1, *p2;
8872
8873 if (!validate_arg (arg1, POINTER_TYPE)
8874 || !validate_arg (arg2, POINTER_TYPE)
8875 || !validate_arg (len, INTEGER_TYPE))
8876 return NULL_TREE;
8877
8878 /* If the LEN parameter is zero, return zero. */
8879 if (integer_zerop (len))
8880 return omit_two_operands (integer_type_node, integer_zero_node,
8881 arg1, arg2);
8882
8883 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8884 if (operand_equal_p (arg1, arg2, 0))
8885 return omit_one_operand (integer_type_node, integer_zero_node, len);
8886
8887 p1 = c_getstr (arg1);
8888 p2 = c_getstr (arg2);
8889
8890 if (host_integerp (len, 1) && p1 && p2)
8891 {
8892 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8893 if (i > 0)
8894 return integer_one_node;
8895 else if (i < 0)
8896 return integer_minus_one_node;
8897 else
8898 return integer_zero_node;
8899 }
8900
8901 /* If the second arg is "", and the length is greater than zero,
8902 return *(const unsigned char*)arg1. */
8903 if (p2 && *p2 == '\0'
8904 && TREE_CODE (len) == INTEGER_CST
8905 && tree_int_cst_sgn (len) == 1)
8906 {
8907 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8908 tree cst_uchar_ptr_node
8909 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8910
8911 return fold_convert (integer_type_node,
8912 build1 (INDIRECT_REF, cst_uchar_node,
8913 fold_convert (cst_uchar_ptr_node,
8914 arg1)));
8915 }
8916
8917 /* If the first arg is "", and the length is greater than zero,
8918 return -*(const unsigned char*)arg2. */
8919 if (p1 && *p1 == '\0'
8920 && TREE_CODE (len) == INTEGER_CST
8921 && tree_int_cst_sgn (len) == 1)
8922 {
8923 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8924 tree cst_uchar_ptr_node
8925 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8926
8927 tree temp = fold_convert (integer_type_node,
8928 build1 (INDIRECT_REF, cst_uchar_node,
8929 fold_convert (cst_uchar_ptr_node,
8930 arg2)));
8931 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8932 }
8933
8934 /* If len parameter is one, return an expression corresponding to
8935 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8936 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8937 {
8938 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8939 tree cst_uchar_ptr_node
8940 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8941
8942 tree ind1 = fold_convert (integer_type_node,
8943 build1 (INDIRECT_REF, cst_uchar_node,
8944 fold_convert (cst_uchar_ptr_node,
8945 arg1)));
8946 tree ind2 = fold_convert (integer_type_node,
8947 build1 (INDIRECT_REF, cst_uchar_node,
8948 fold_convert (cst_uchar_ptr_node,
8949 arg2)));
8950 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8951 }
8952
8953 return NULL_TREE;
8954 }
8955
8956 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8957 ARG. Return NULL_TREE if no simplification can be made. */
8958
8959 static tree
8960 fold_builtin_signbit (tree arg, tree type)
8961 {
8962 tree temp;
8963
8964 if (!validate_arg (arg, REAL_TYPE))
8965 return NULL_TREE;
8966
8967 /* If ARG is a compile-time constant, determine the result. */
8968 if (TREE_CODE (arg) == REAL_CST
8969 && !TREE_OVERFLOW (arg))
8970 {
8971 REAL_VALUE_TYPE c;
8972
8973 c = TREE_REAL_CST (arg);
8974 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8975 return fold_convert (type, temp);
8976 }
8977
8978 /* If ARG is non-negative, the result is always zero. */
8979 if (tree_expr_nonnegative_p (arg))
8980 return omit_one_operand (type, integer_zero_node, arg);
8981
8982 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8983 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8984 return fold_build2 (LT_EXPR, type, arg,
8985 build_real (TREE_TYPE (arg), dconst0));
8986
8987 return NULL_TREE;
8988 }
8989
8990 /* Fold function call to builtin copysign, copysignf or copysignl with
8991 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8992 be made. */
8993
8994 static tree
8995 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8996 {
8997 tree tem;
8998
8999 if (!validate_arg (arg1, REAL_TYPE)
9000 || !validate_arg (arg2, REAL_TYPE))
9001 return NULL_TREE;
9002
9003 /* copysign(X,X) is X. */
9004 if (operand_equal_p (arg1, arg2, 0))
9005 return fold_convert (type, arg1);
9006
9007 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9008 if (TREE_CODE (arg1) == REAL_CST
9009 && TREE_CODE (arg2) == REAL_CST
9010 && !TREE_OVERFLOW (arg1)
9011 && !TREE_OVERFLOW (arg2))
9012 {
9013 REAL_VALUE_TYPE c1, c2;
9014
9015 c1 = TREE_REAL_CST (arg1);
9016 c2 = TREE_REAL_CST (arg2);
9017 /* c1.sign := c2.sign. */
9018 real_copysign (&c1, &c2);
9019 return build_real (type, c1);
9020 }
9021
9022 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9023 Remember to evaluate Y for side-effects. */
9024 if (tree_expr_nonnegative_p (arg2))
9025 return omit_one_operand (type,
9026 fold_build1 (ABS_EXPR, type, arg1),
9027 arg2);
9028
9029 /* Strip sign changing operations for the first argument. */
9030 tem = fold_strip_sign_ops (arg1);
9031 if (tem)
9032 return build_call_expr (fndecl, 2, tem, arg2);
9033
9034 return NULL_TREE;
9035 }
9036
9037 /* Fold a call to builtin isascii with argument ARG. */
9038
9039 static tree
9040 fold_builtin_isascii (tree arg)
9041 {
9042 if (!validate_arg (arg, INTEGER_TYPE))
9043 return NULL_TREE;
9044 else
9045 {
9046 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9047 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9048 build_int_cst (NULL_TREE,
9049 ~ (unsigned HOST_WIDE_INT) 0x7f));
9050 return fold_build2 (EQ_EXPR, integer_type_node,
9051 arg, integer_zero_node);
9052 }
9053 }
9054
9055 /* Fold a call to builtin toascii with argument ARG. */
9056
9057 static tree
9058 fold_builtin_toascii (tree arg)
9059 {
9060 if (!validate_arg (arg, INTEGER_TYPE))
9061 return NULL_TREE;
9062
9063 /* Transform toascii(c) -> (c & 0x7f). */
9064 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9065 build_int_cst (NULL_TREE, 0x7f));
9066 }
9067
9068 /* Fold a call to builtin isdigit with argument ARG. */
9069
9070 static tree
9071 fold_builtin_isdigit (tree arg)
9072 {
9073 if (!validate_arg (arg, INTEGER_TYPE))
9074 return NULL_TREE;
9075 else
9076 {
9077 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9078 /* According to the C standard, isdigit is unaffected by locale.
9079 However, it definitely is affected by the target character set. */
9080 unsigned HOST_WIDE_INT target_digit0
9081 = lang_hooks.to_target_charset ('0');
9082
9083 if (target_digit0 == 0)
9084 return NULL_TREE;
9085
9086 arg = fold_convert (unsigned_type_node, arg);
9087 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9088 build_int_cst (unsigned_type_node, target_digit0));
9089 return fold_build2 (LE_EXPR, integer_type_node, arg,
9090 build_int_cst (unsigned_type_node, 9));
9091 }
9092 }
9093
9094 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9095
9096 static tree
9097 fold_builtin_fabs (tree arg, tree type)
9098 {
9099 if (!validate_arg (arg, REAL_TYPE))
9100 return NULL_TREE;
9101
9102 arg = fold_convert (type, arg);
9103 if (TREE_CODE (arg) == REAL_CST)
9104 return fold_abs_const (arg, type);
9105 return fold_build1 (ABS_EXPR, type, arg);
9106 }
9107
9108 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9109
9110 static tree
9111 fold_builtin_abs (tree arg, tree type)
9112 {
9113 if (!validate_arg (arg, INTEGER_TYPE))
9114 return NULL_TREE;
9115
9116 arg = fold_convert (type, arg);
9117 if (TREE_CODE (arg) == INTEGER_CST)
9118 return fold_abs_const (arg, type);
9119 return fold_build1 (ABS_EXPR, type, arg);
9120 }
9121
9122 /* Fold a call to builtin fmin or fmax. */
9123
9124 static tree
9125 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9126 {
9127 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9128 {
9129 /* Calculate the result when the argument is a constant. */
9130 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9131
9132 if (res)
9133 return res;
9134
9135 /* If either argument is NaN, return the other one. Avoid the
9136 transformation if we get (and honor) a signalling NaN. Using
9137 omit_one_operand() ensures we create a non-lvalue. */
9138 if (TREE_CODE (arg0) == REAL_CST
9139 && real_isnan (&TREE_REAL_CST (arg0))
9140 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9141 || ! TREE_REAL_CST (arg0).signalling))
9142 return omit_one_operand (type, arg1, arg0);
9143 if (TREE_CODE (arg1) == REAL_CST
9144 && real_isnan (&TREE_REAL_CST (arg1))
9145 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9146 || ! TREE_REAL_CST (arg1).signalling))
9147 return omit_one_operand (type, arg0, arg1);
9148
9149 /* Transform fmin/fmax(x,x) -> x. */
9150 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9151 return omit_one_operand (type, arg0, arg1);
9152
9153 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9154 functions to return the numeric arg if the other one is NaN.
9155 These tree codes don't honor that, so only transform if
9156 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9157 handled, so we don't have to worry about it either. */
9158 if (flag_finite_math_only)
9159 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9160 fold_convert (type, arg0),
9161 fold_convert (type, arg1));
9162 }
9163 return NULL_TREE;
9164 }
9165
9166 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9167
9168 static tree
9169 fold_builtin_carg (tree arg, tree type)
9170 {
9171 if (validate_arg (arg, COMPLEX_TYPE))
9172 {
9173 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9174
9175 if (atan2_fn)
9176 {
9177 tree new_arg = builtin_save_expr (arg);
9178 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9179 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9180 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9181 }
9182 }
9183
9184 return NULL_TREE;
9185 }
9186
9187 /* Fold a call to builtin logb/ilogb. */
9188
9189 static tree
9190 fold_builtin_logb (tree arg, tree rettype)
9191 {
9192 if (! validate_arg (arg, REAL_TYPE))
9193 return NULL_TREE;
9194
9195 STRIP_NOPS (arg);
9196
9197 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9198 {
9199 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9200
9201 switch (value->cl)
9202 {
9203 case rvc_nan:
9204 case rvc_inf:
9205 /* If arg is Inf or NaN and we're logb, return it. */
9206 if (TREE_CODE (rettype) == REAL_TYPE)
9207 return fold_convert (rettype, arg);
9208 /* Fall through... */
9209 case rvc_zero:
9210 /* Zero may set errno and/or raise an exception for logb, also
9211 for ilogb we don't know FP_ILOGB0. */
9212 return NULL_TREE;
9213 case rvc_normal:
9214 /* For normal numbers, proceed iff radix == 2. In GCC,
9215 normalized significands are in the range [0.5, 1.0). We
9216 want the exponent as if they were [1.0, 2.0) so get the
9217 exponent and subtract 1. */
9218 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9219 return fold_convert (rettype, build_int_cst (NULL_TREE,
9220 REAL_EXP (value)-1));
9221 break;
9222 }
9223 }
9224
9225 return NULL_TREE;
9226 }
9227
9228 /* Fold a call to builtin significand, if radix == 2. */
9229
9230 static tree
9231 fold_builtin_significand (tree arg, tree rettype)
9232 {
9233 if (! validate_arg (arg, REAL_TYPE))
9234 return NULL_TREE;
9235
9236 STRIP_NOPS (arg);
9237
9238 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9239 {
9240 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9241
9242 switch (value->cl)
9243 {
9244 case rvc_zero:
9245 case rvc_nan:
9246 case rvc_inf:
9247 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9248 return fold_convert (rettype, arg);
9249 case rvc_normal:
9250 /* For normal numbers, proceed iff radix == 2. */
9251 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9252 {
9253 REAL_VALUE_TYPE result = *value;
9254 /* In GCC, normalized significands are in the range [0.5,
9255 1.0). We want them to be [1.0, 2.0) so set the
9256 exponent to 1. */
9257 SET_REAL_EXP (&result, 1);
9258 return build_real (rettype, result);
9259 }
9260 break;
9261 }
9262 }
9263
9264 return NULL_TREE;
9265 }
9266
9267 /* Fold a call to builtin frexp, we can assume the base is 2. */
9268
9269 static tree
9270 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9271 {
9272 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9273 return NULL_TREE;
9274
9275 STRIP_NOPS (arg0);
9276
9277 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9278 return NULL_TREE;
9279
9280 arg1 = build_fold_indirect_ref (arg1);
9281
9282 /* Proceed if a valid pointer type was passed in. */
9283 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9284 {
9285 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9286 tree frac, exp;
9287
9288 switch (value->cl)
9289 {
9290 case rvc_zero:
9291 /* For +-0, return (*exp = 0, +-0). */
9292 exp = integer_zero_node;
9293 frac = arg0;
9294 break;
9295 case rvc_nan:
9296 case rvc_inf:
9297 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9298 return omit_one_operand (rettype, arg0, arg1);
9299 case rvc_normal:
9300 {
9301 /* Since the frexp function always expects base 2, and in
9302 GCC normalized significands are already in the range
9303 [0.5, 1.0), we have exactly what frexp wants. */
9304 REAL_VALUE_TYPE frac_rvt = *value;
9305 SET_REAL_EXP (&frac_rvt, 0);
9306 frac = build_real (rettype, frac_rvt);
9307 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9308 }
9309 break;
9310 default:
9311 gcc_unreachable ();
9312 }
9313
9314 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9315 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9316 TREE_SIDE_EFFECTS (arg1) = 1;
9317 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9318 }
9319
9320 return NULL_TREE;
9321 }
9322
9323 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9324 then we can assume the base is two. If it's false, then we have to
9325 check the mode of the TYPE parameter in certain cases. */
9326
9327 static tree
9328 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9329 {
9330 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9331 {
9332 STRIP_NOPS (arg0);
9333 STRIP_NOPS (arg1);
9334
9335 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9336 if (real_zerop (arg0) || integer_zerop (arg1)
9337 || (TREE_CODE (arg0) == REAL_CST
9338 && (real_isnan (&TREE_REAL_CST (arg0))
9339 || real_isinf (&TREE_REAL_CST (arg0)))))
9340 return omit_one_operand (type, arg0, arg1);
9341
9342 /* If both arguments are constant, then try to evaluate it. */
9343 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9344 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9345 && host_integerp (arg1, 0))
9346 {
9347 /* Bound the maximum adjustment to twice the range of the
9348 mode's valid exponents. Use abs to ensure the range is
9349 positive as a sanity check. */
9350 const long max_exp_adj = 2 *
9351 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9352 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9353
9354 /* Get the user-requested adjustment. */
9355 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9356
9357 /* The requested adjustment must be inside this range. This
9358 is a preliminary cap to avoid things like overflow, we
9359 may still fail to compute the result for other reasons. */
9360 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9361 {
9362 REAL_VALUE_TYPE initial_result;
9363
9364 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9365
9366 /* Ensure we didn't overflow. */
9367 if (! real_isinf (&initial_result))
9368 {
9369 const REAL_VALUE_TYPE trunc_result
9370 = real_value_truncate (TYPE_MODE (type), initial_result);
9371
9372 /* Only proceed if the target mode can hold the
9373 resulting value. */
9374 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9375 return build_real (type, trunc_result);
9376 }
9377 }
9378 }
9379 }
9380
9381 return NULL_TREE;
9382 }
9383
9384 /* Fold a call to builtin modf. */
9385
9386 static tree
9387 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9388 {
9389 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9390 return NULL_TREE;
9391
9392 STRIP_NOPS (arg0);
9393
9394 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9395 return NULL_TREE;
9396
9397 arg1 = build_fold_indirect_ref (arg1);
9398
9399 /* Proceed if a valid pointer type was passed in. */
9400 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9401 {
9402 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9403 REAL_VALUE_TYPE trunc, frac;
9404
9405 switch (value->cl)
9406 {
9407 case rvc_nan:
9408 case rvc_zero:
9409 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9410 trunc = frac = *value;
9411 break;
9412 case rvc_inf:
9413 /* For +-Inf, return (*arg1 = arg0, +-0). */
9414 frac = dconst0;
9415 frac.sign = value->sign;
9416 trunc = *value;
9417 break;
9418 case rvc_normal:
9419 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9420 real_trunc (&trunc, VOIDmode, value);
9421 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9422 /* If the original number was negative and already
9423 integral, then the fractional part is -0.0. */
9424 if (value->sign && frac.cl == rvc_zero)
9425 frac.sign = value->sign;
9426 break;
9427 }
9428
9429 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9430 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9431 build_real (rettype, trunc));
9432 TREE_SIDE_EFFECTS (arg1) = 1;
9433 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9434 build_real (rettype, frac));
9435 }
9436
9437 return NULL_TREE;
9438 }
9439
9440 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9441 ARG is the argument for the call. */
9442
9443 static tree
9444 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9445 {
9446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9447 REAL_VALUE_TYPE r;
9448
9449 if (!validate_arg (arg, REAL_TYPE))
9450 {
9451 error ("non-floating-point argument to function %qs",
9452 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9453 return error_mark_node;
9454 }
9455
9456 switch (builtin_index)
9457 {
9458 case BUILT_IN_ISINF:
9459 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9460 return omit_one_operand (type, integer_zero_node, arg);
9461
9462 if (TREE_CODE (arg) == REAL_CST)
9463 {
9464 r = TREE_REAL_CST (arg);
9465 if (real_isinf (&r))
9466 return real_compare (GT_EXPR, &r, &dconst0)
9467 ? integer_one_node : integer_minus_one_node;
9468 else
9469 return integer_zero_node;
9470 }
9471
9472 return NULL_TREE;
9473
9474 case BUILT_IN_FINITE:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9476 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9477 return omit_one_operand (type, integer_one_node, arg);
9478
9479 if (TREE_CODE (arg) == REAL_CST)
9480 {
9481 r = TREE_REAL_CST (arg);
9482 return real_isinf (&r) || real_isnan (&r)
9483 ? integer_zero_node : integer_one_node;
9484 }
9485
9486 return NULL_TREE;
9487
9488 case BUILT_IN_ISNAN:
9489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9490 return omit_one_operand (type, integer_zero_node, arg);
9491
9492 if (TREE_CODE (arg) == REAL_CST)
9493 {
9494 r = TREE_REAL_CST (arg);
9495 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9496 }
9497
9498 arg = builtin_save_expr (arg);
9499 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9500
9501 default:
9502 gcc_unreachable ();
9503 }
9504 }
9505
9506 /* Fold a call to an unordered comparison function such as
9507 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9508 being called and ARG0 and ARG1 are the arguments for the call.
9509 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9510 the opposite of the desired result. UNORDERED_CODE is used
9511 for modes that can hold NaNs and ORDERED_CODE is used for
9512 the rest. */
9513
9514 static tree
9515 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9516 enum tree_code unordered_code,
9517 enum tree_code ordered_code)
9518 {
9519 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9520 enum tree_code code;
9521 tree type0, type1;
9522 enum tree_code code0, code1;
9523 tree cmp_type = NULL_TREE;
9524
9525 type0 = TREE_TYPE (arg0);
9526 type1 = TREE_TYPE (arg1);
9527
9528 code0 = TREE_CODE (type0);
9529 code1 = TREE_CODE (type1);
9530
9531 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9532 /* Choose the wider of two real types. */
9533 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9534 ? type0 : type1;
9535 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9536 cmp_type = type0;
9537 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9538 cmp_type = type1;
9539 else
9540 {
9541 error ("non-floating-point argument to function %qs",
9542 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9543 return error_mark_node;
9544 }
9545
9546 arg0 = fold_convert (cmp_type, arg0);
9547 arg1 = fold_convert (cmp_type, arg1);
9548
9549 if (unordered_code == UNORDERED_EXPR)
9550 {
9551 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9552 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9553 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9554 }
9555
9556 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9557 : ordered_code;
9558 return fold_build1 (TRUTH_NOT_EXPR, type,
9559 fold_build2 (code, type, arg0, arg1));
9560 }
9561
9562 /* Fold a call to built-in function FNDECL with 0 arguments.
9563 IGNORE is true if the result of the function call is ignored. This
9564 function returns NULL_TREE if no simplification was possible. */
9565
9566 static tree
9567 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9568 {
9569 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9570 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9571 switch (fcode)
9572 {
9573 CASE_FLT_FN (BUILT_IN_INF):
9574 case BUILT_IN_INFD32:
9575 case BUILT_IN_INFD64:
9576 case BUILT_IN_INFD128:
9577 return fold_builtin_inf (type, true);
9578
9579 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9580 return fold_builtin_inf (type, false);
9581
9582 case BUILT_IN_CLASSIFY_TYPE:
9583 return fold_builtin_classify_type (NULL_TREE);
9584
9585 default:
9586 break;
9587 }
9588 return NULL_TREE;
9589 }
9590
9591 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9592 IGNORE is true if the result of the function call is ignored. This
9593 function returns NULL_TREE if no simplification was possible. */
9594
9595 static tree
9596 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9597 {
9598 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9599 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9600 switch (fcode)
9601 {
9602
9603 case BUILT_IN_CONSTANT_P:
9604 {
9605 tree val = fold_builtin_constant_p (arg0);
9606
9607 /* Gimplification will pull the CALL_EXPR for the builtin out of
9608 an if condition. When not optimizing, we'll not CSE it back.
9609 To avoid link error types of regressions, return false now. */
9610 if (!val && !optimize)
9611 val = integer_zero_node;
9612
9613 return val;
9614 }
9615
9616 case BUILT_IN_CLASSIFY_TYPE:
9617 return fold_builtin_classify_type (arg0);
9618
9619 case BUILT_IN_STRLEN:
9620 return fold_builtin_strlen (arg0);
9621
9622 CASE_FLT_FN (BUILT_IN_FABS):
9623 return fold_builtin_fabs (arg0, type);
9624
9625 case BUILT_IN_ABS:
9626 case BUILT_IN_LABS:
9627 case BUILT_IN_LLABS:
9628 case BUILT_IN_IMAXABS:
9629 return fold_builtin_abs (arg0, type);
9630
9631 CASE_FLT_FN (BUILT_IN_CONJ):
9632 if (validate_arg (arg0, COMPLEX_TYPE))
9633 return fold_build1 (CONJ_EXPR, type, arg0);
9634 break;
9635
9636 CASE_FLT_FN (BUILT_IN_CREAL):
9637 if (validate_arg (arg0, COMPLEX_TYPE))
9638 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9639 break;
9640
9641 CASE_FLT_FN (BUILT_IN_CIMAG):
9642 if (validate_arg (arg0, COMPLEX_TYPE))
9643 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9644 break;
9645
9646 CASE_FLT_FN (BUILT_IN_CCOS):
9647 CASE_FLT_FN (BUILT_IN_CCOSH):
9648 /* These functions are "even", i.e. f(x) == f(-x). */
9649 if (validate_arg (arg0, COMPLEX_TYPE))
9650 {
9651 tree narg = fold_strip_sign_ops (arg0);
9652 if (narg)
9653 return build_call_expr (fndecl, 1, narg);
9654 }
9655 break;
9656
9657 CASE_FLT_FN (BUILT_IN_CABS):
9658 return fold_builtin_cabs (arg0, type, fndecl);
9659
9660 CASE_FLT_FN (BUILT_IN_CARG):
9661 return fold_builtin_carg (arg0, type);
9662
9663 CASE_FLT_FN (BUILT_IN_SQRT):
9664 return fold_builtin_sqrt (arg0, type);
9665
9666 CASE_FLT_FN (BUILT_IN_CBRT):
9667 return fold_builtin_cbrt (arg0, type);
9668
9669 CASE_FLT_FN (BUILT_IN_ASIN):
9670 if (validate_arg (arg0, REAL_TYPE))
9671 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9672 &dconstm1, &dconst1, true);
9673 break;
9674
9675 CASE_FLT_FN (BUILT_IN_ACOS):
9676 if (validate_arg (arg0, REAL_TYPE))
9677 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9678 &dconstm1, &dconst1, true);
9679 break;
9680
9681 CASE_FLT_FN (BUILT_IN_ATAN):
9682 if (validate_arg (arg0, REAL_TYPE))
9683 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9684 break;
9685
9686 CASE_FLT_FN (BUILT_IN_ASINH):
9687 if (validate_arg (arg0, REAL_TYPE))
9688 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9689 break;
9690
9691 CASE_FLT_FN (BUILT_IN_ACOSH):
9692 if (validate_arg (arg0, REAL_TYPE))
9693 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9694 &dconst1, NULL, true);
9695 break;
9696
9697 CASE_FLT_FN (BUILT_IN_ATANH):
9698 if (validate_arg (arg0, REAL_TYPE))
9699 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9700 &dconstm1, &dconst1, false);
9701 break;
9702
9703 CASE_FLT_FN (BUILT_IN_SIN):
9704 if (validate_arg (arg0, REAL_TYPE))
9705 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9706 break;
9707
9708 CASE_FLT_FN (BUILT_IN_COS):
9709 return fold_builtin_cos (arg0, type, fndecl);
9710 break;
9711
9712 CASE_FLT_FN (BUILT_IN_TAN):
9713 return fold_builtin_tan (arg0, type);
9714
9715 CASE_FLT_FN (BUILT_IN_CEXP):
9716 return fold_builtin_cexp (arg0, type);
9717
9718 CASE_FLT_FN (BUILT_IN_CEXPI):
9719 if (validate_arg (arg0, REAL_TYPE))
9720 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9721 break;
9722
9723 CASE_FLT_FN (BUILT_IN_SINH):
9724 if (validate_arg (arg0, REAL_TYPE))
9725 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9726 break;
9727
9728 CASE_FLT_FN (BUILT_IN_COSH):
9729 return fold_builtin_cosh (arg0, type, fndecl);
9730
9731 CASE_FLT_FN (BUILT_IN_TANH):
9732 if (validate_arg (arg0, REAL_TYPE))
9733 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9734 break;
9735
9736 CASE_FLT_FN (BUILT_IN_ERF):
9737 if (validate_arg (arg0, REAL_TYPE))
9738 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9739 break;
9740
9741 CASE_FLT_FN (BUILT_IN_ERFC):
9742 if (validate_arg (arg0, REAL_TYPE))
9743 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9744 break;
9745
9746 CASE_FLT_FN (BUILT_IN_TGAMMA):
9747 if (validate_arg (arg0, REAL_TYPE))
9748 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9749 break;
9750
9751 CASE_FLT_FN (BUILT_IN_EXP):
9752 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9753
9754 CASE_FLT_FN (BUILT_IN_EXP2):
9755 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9756
9757 CASE_FLT_FN (BUILT_IN_EXP10):
9758 CASE_FLT_FN (BUILT_IN_POW10):
9759 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9760
9761 CASE_FLT_FN (BUILT_IN_EXPM1):
9762 if (validate_arg (arg0, REAL_TYPE))
9763 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9764 break;
9765
9766 CASE_FLT_FN (BUILT_IN_LOG):
9767 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9768
9769 CASE_FLT_FN (BUILT_IN_LOG2):
9770 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9771
9772 CASE_FLT_FN (BUILT_IN_LOG10):
9773 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9774
9775 CASE_FLT_FN (BUILT_IN_LOG1P):
9776 if (validate_arg (arg0, REAL_TYPE))
9777 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9778 &dconstm1, NULL, false);
9779 break;
9780
9781 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9782 CASE_FLT_FN (BUILT_IN_J0):
9783 if (validate_arg (arg0, REAL_TYPE))
9784 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9785 NULL, NULL, 0);
9786 break;
9787
9788 CASE_FLT_FN (BUILT_IN_J1):
9789 if (validate_arg (arg0, REAL_TYPE))
9790 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9791 NULL, NULL, 0);
9792 break;
9793
9794 CASE_FLT_FN (BUILT_IN_Y0):
9795 if (validate_arg (arg0, REAL_TYPE))
9796 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9797 &dconst0, NULL, false);
9798 break;
9799
9800 CASE_FLT_FN (BUILT_IN_Y1):
9801 if (validate_arg (arg0, REAL_TYPE))
9802 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9803 &dconst0, NULL, false);
9804 break;
9805 #endif
9806
9807 CASE_FLT_FN (BUILT_IN_NAN):
9808 case BUILT_IN_NAND32:
9809 case BUILT_IN_NAND64:
9810 case BUILT_IN_NAND128:
9811 return fold_builtin_nan (arg0, type, true);
9812
9813 CASE_FLT_FN (BUILT_IN_NANS):
9814 return fold_builtin_nan (arg0, type, false);
9815
9816 CASE_FLT_FN (BUILT_IN_FLOOR):
9817 return fold_builtin_floor (fndecl, arg0);
9818
9819 CASE_FLT_FN (BUILT_IN_CEIL):
9820 return fold_builtin_ceil (fndecl, arg0);
9821
9822 CASE_FLT_FN (BUILT_IN_TRUNC):
9823 return fold_builtin_trunc (fndecl, arg0);
9824
9825 CASE_FLT_FN (BUILT_IN_ROUND):
9826 return fold_builtin_round (fndecl, arg0);
9827
9828 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9829 CASE_FLT_FN (BUILT_IN_RINT):
9830 return fold_trunc_transparent_mathfn (fndecl, arg0);
9831
9832 CASE_FLT_FN (BUILT_IN_LCEIL):
9833 CASE_FLT_FN (BUILT_IN_LLCEIL):
9834 CASE_FLT_FN (BUILT_IN_LFLOOR):
9835 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9836 CASE_FLT_FN (BUILT_IN_LROUND):
9837 CASE_FLT_FN (BUILT_IN_LLROUND):
9838 return fold_builtin_int_roundingfn (fndecl, arg0);
9839
9840 CASE_FLT_FN (BUILT_IN_LRINT):
9841 CASE_FLT_FN (BUILT_IN_LLRINT):
9842 return fold_fixed_mathfn (fndecl, arg0);
9843
9844 case BUILT_IN_BSWAP32:
9845 case BUILT_IN_BSWAP64:
9846 return fold_builtin_bswap (fndecl, arg0);
9847
9848 CASE_INT_FN (BUILT_IN_FFS):
9849 CASE_INT_FN (BUILT_IN_CLZ):
9850 CASE_INT_FN (BUILT_IN_CTZ):
9851 CASE_INT_FN (BUILT_IN_POPCOUNT):
9852 CASE_INT_FN (BUILT_IN_PARITY):
9853 return fold_builtin_bitop (fndecl, arg0);
9854
9855 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9856 return fold_builtin_signbit (arg0, type);
9857
9858 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9859 return fold_builtin_significand (arg0, type);
9860
9861 CASE_FLT_FN (BUILT_IN_ILOGB):
9862 CASE_FLT_FN (BUILT_IN_LOGB):
9863 return fold_builtin_logb (arg0, type);
9864
9865 case BUILT_IN_ISASCII:
9866 return fold_builtin_isascii (arg0);
9867
9868 case BUILT_IN_TOASCII:
9869 return fold_builtin_toascii (arg0);
9870
9871 case BUILT_IN_ISDIGIT:
9872 return fold_builtin_isdigit (arg0);
9873
9874 CASE_FLT_FN (BUILT_IN_FINITE):
9875 case BUILT_IN_FINITED32:
9876 case BUILT_IN_FINITED64:
9877 case BUILT_IN_FINITED128:
9878 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9879
9880 CASE_FLT_FN (BUILT_IN_ISINF):
9881 case BUILT_IN_ISINFD32:
9882 case BUILT_IN_ISINFD64:
9883 case BUILT_IN_ISINFD128:
9884 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9885
9886 CASE_FLT_FN (BUILT_IN_ISNAN):
9887 case BUILT_IN_ISNAND32:
9888 case BUILT_IN_ISNAND64:
9889 case BUILT_IN_ISNAND128:
9890 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9891
9892 case BUILT_IN_PRINTF:
9893 case BUILT_IN_PRINTF_UNLOCKED:
9894 case BUILT_IN_VPRINTF:
9895 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9896
9897 default:
9898 break;
9899 }
9900
9901 return NULL_TREE;
9902
9903 }
9904
9905 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9906 IGNORE is true if the result of the function call is ignored. This
9907 function returns NULL_TREE if no simplification was possible. */
9908
9909 static tree
9910 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9911 {
9912 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9913 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9914
9915 switch (fcode)
9916 {
9917 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9918 CASE_FLT_FN (BUILT_IN_JN):
9919 if (validate_arg (arg0, INTEGER_TYPE)
9920 && validate_arg (arg1, REAL_TYPE))
9921 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9922 break;
9923
9924 CASE_FLT_FN (BUILT_IN_YN):
9925 if (validate_arg (arg0, INTEGER_TYPE)
9926 && validate_arg (arg1, REAL_TYPE))
9927 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9928 &dconst0, false);
9929 break;
9930
9931 CASE_FLT_FN (BUILT_IN_DREM):
9932 CASE_FLT_FN (BUILT_IN_REMAINDER):
9933 if (validate_arg (arg0, REAL_TYPE)
9934 && validate_arg(arg1, REAL_TYPE))
9935 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9936 break;
9937
9938 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9939 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9940 if (validate_arg (arg0, REAL_TYPE)
9941 && validate_arg(arg1, POINTER_TYPE))
9942 return do_mpfr_lgamma_r (arg0, arg1, type);
9943 break;
9944 #endif
9945
9946 CASE_FLT_FN (BUILT_IN_ATAN2):
9947 if (validate_arg (arg0, REAL_TYPE)
9948 && validate_arg(arg1, REAL_TYPE))
9949 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9950 break;
9951
9952 CASE_FLT_FN (BUILT_IN_FDIM):
9953 if (validate_arg (arg0, REAL_TYPE)
9954 && validate_arg(arg1, REAL_TYPE))
9955 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9956 break;
9957
9958 CASE_FLT_FN (BUILT_IN_HYPOT):
9959 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9960
9961 CASE_FLT_FN (BUILT_IN_LDEXP):
9962 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9963 CASE_FLT_FN (BUILT_IN_SCALBN):
9964 CASE_FLT_FN (BUILT_IN_SCALBLN):
9965 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9966
9967 CASE_FLT_FN (BUILT_IN_FREXP):
9968 return fold_builtin_frexp (arg0, arg1, type);
9969
9970 CASE_FLT_FN (BUILT_IN_MODF):
9971 return fold_builtin_modf (arg0, arg1, type);
9972
9973 case BUILT_IN_BZERO:
9974 return fold_builtin_bzero (arg0, arg1, ignore);
9975
9976 case BUILT_IN_FPUTS:
9977 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9978
9979 case BUILT_IN_FPUTS_UNLOCKED:
9980 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9981
9982 case BUILT_IN_STRSTR:
9983 return fold_builtin_strstr (arg0, arg1, type);
9984
9985 case BUILT_IN_STRCAT:
9986 return fold_builtin_strcat (arg0, arg1);
9987
9988 case BUILT_IN_STRSPN:
9989 return fold_builtin_strspn (arg0, arg1);
9990
9991 case BUILT_IN_STRCSPN:
9992 return fold_builtin_strcspn (arg0, arg1);
9993
9994 case BUILT_IN_STRCHR:
9995 case BUILT_IN_INDEX:
9996 return fold_builtin_strchr (arg0, arg1, type);
9997
9998 case BUILT_IN_STRRCHR:
9999 case BUILT_IN_RINDEX:
10000 return fold_builtin_strrchr (arg0, arg1, type);
10001
10002 case BUILT_IN_STRCPY:
10003 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10004
10005 case BUILT_IN_STRCMP:
10006 return fold_builtin_strcmp (arg0, arg1);
10007
10008 case BUILT_IN_STRPBRK:
10009 return fold_builtin_strpbrk (arg0, arg1, type);
10010
10011 case BUILT_IN_EXPECT:
10012 return fold_builtin_expect (arg0);
10013
10014 CASE_FLT_FN (BUILT_IN_POW):
10015 return fold_builtin_pow (fndecl, arg0, arg1, type);
10016
10017 CASE_FLT_FN (BUILT_IN_POWI):
10018 return fold_builtin_powi (fndecl, arg0, arg1, type);
10019
10020 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10021 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10022
10023 CASE_FLT_FN (BUILT_IN_FMIN):
10024 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10025
10026 CASE_FLT_FN (BUILT_IN_FMAX):
10027 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10028
10029 case BUILT_IN_ISGREATER:
10030 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10031 case BUILT_IN_ISGREATEREQUAL:
10032 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10033 case BUILT_IN_ISLESS:
10034 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10035 case BUILT_IN_ISLESSEQUAL:
10036 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10037 case BUILT_IN_ISLESSGREATER:
10038 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10039 case BUILT_IN_ISUNORDERED:
10040 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10041 NOP_EXPR);
10042
10043 /* We do the folding for va_start in the expander. */
10044 case BUILT_IN_VA_START:
10045 break;
10046
10047 case BUILT_IN_SPRINTF:
10048 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10049
10050 case BUILT_IN_OBJECT_SIZE:
10051 return fold_builtin_object_size (arg0, arg1);
10052
10053 case BUILT_IN_PRINTF:
10054 case BUILT_IN_PRINTF_UNLOCKED:
10055 case BUILT_IN_VPRINTF:
10056 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10057
10058 case BUILT_IN_PRINTF_CHK:
10059 case BUILT_IN_VPRINTF_CHK:
10060 if (!validate_arg (arg0, INTEGER_TYPE)
10061 || TREE_SIDE_EFFECTS (arg0))
10062 return NULL_TREE;
10063 else
10064 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10065 break;
10066
10067 case BUILT_IN_FPRINTF:
10068 case BUILT_IN_FPRINTF_UNLOCKED:
10069 case BUILT_IN_VFPRINTF:
10070 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10071 ignore, fcode);
10072
10073 default:
10074 break;
10075 }
10076 return NULL_TREE;
10077 }
10078
10079 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10080 and ARG2. IGNORE is true if the result of the function call is ignored.
10081 This function returns NULL_TREE if no simplification was possible. */
10082
10083 static tree
10084 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10085 {
10086 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10087 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10088 switch (fcode)
10089 {
10090
10091 CASE_FLT_FN (BUILT_IN_SINCOS):
10092 return fold_builtin_sincos (arg0, arg1, arg2);
10093
10094 CASE_FLT_FN (BUILT_IN_FMA):
10095 if (validate_arg (arg0, REAL_TYPE)
10096 && validate_arg(arg1, REAL_TYPE)
10097 && validate_arg(arg2, REAL_TYPE))
10098 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10099 break;
10100
10101 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10102 CASE_FLT_FN (BUILT_IN_REMQUO):
10103 if (validate_arg (arg0, REAL_TYPE)
10104 && validate_arg(arg1, REAL_TYPE)
10105 && validate_arg(arg2, POINTER_TYPE))
10106 return do_mpfr_remquo (arg0, arg1, arg2);
10107 break;
10108 #endif
10109
10110 case BUILT_IN_MEMSET:
10111 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10112
10113 case BUILT_IN_BCOPY:
10114 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10115
10116 case BUILT_IN_MEMCPY:
10117 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10118
10119 case BUILT_IN_MEMPCPY:
10120 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10121
10122 case BUILT_IN_MEMMOVE:
10123 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10124
10125 case BUILT_IN_STRNCAT:
10126 return fold_builtin_strncat (arg0, arg1, arg2);
10127
10128 case BUILT_IN_STRNCPY:
10129 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10130
10131 case BUILT_IN_STRNCMP:
10132 return fold_builtin_strncmp (arg0, arg1, arg2);
10133
10134 case BUILT_IN_MEMCHR:
10135 return fold_builtin_memchr (arg0, arg1, arg2, type);
10136
10137 case BUILT_IN_BCMP:
10138 case BUILT_IN_MEMCMP:
10139 return fold_builtin_memcmp (arg0, arg1, arg2);;
10140
10141 case BUILT_IN_SPRINTF:
10142 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10143
10144 case BUILT_IN_STRCPY_CHK:
10145 case BUILT_IN_STPCPY_CHK:
10146 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10147 ignore, fcode);
10148
10149 case BUILT_IN_STRCAT_CHK:
10150 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10151
10152 case BUILT_IN_PRINTF_CHK:
10153 case BUILT_IN_VPRINTF_CHK:
10154 if (!validate_arg (arg0, INTEGER_TYPE)
10155 || TREE_SIDE_EFFECTS (arg0))
10156 return NULL_TREE;
10157 else
10158 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10159 break;
10160
10161 case BUILT_IN_FPRINTF:
10162 case BUILT_IN_FPRINTF_UNLOCKED:
10163 case BUILT_IN_VFPRINTF:
10164 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10165
10166 case BUILT_IN_FPRINTF_CHK:
10167 case BUILT_IN_VFPRINTF_CHK:
10168 if (!validate_arg (arg1, INTEGER_TYPE)
10169 || TREE_SIDE_EFFECTS (arg1))
10170 return NULL_TREE;
10171 else
10172 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10173 ignore, fcode);
10174
10175 default:
10176 break;
10177 }
10178 return NULL_TREE;
10179 }
10180
10181 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10182 ARG2, and ARG3. IGNORE is true if the result of the function call is
10183 ignored. This function returns NULL_TREE if no simplification was
10184 possible. */
10185
10186 static tree
10187 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10188 bool ignore)
10189 {
10190 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10191
10192 switch (fcode)
10193 {
10194 case BUILT_IN_MEMCPY_CHK:
10195 case BUILT_IN_MEMPCPY_CHK:
10196 case BUILT_IN_MEMMOVE_CHK:
10197 case BUILT_IN_MEMSET_CHK:
10198 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10199 NULL_TREE, ignore,
10200 DECL_FUNCTION_CODE (fndecl));
10201
10202 case BUILT_IN_STRNCPY_CHK:
10203 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10204
10205 case BUILT_IN_STRNCAT_CHK:
10206 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10207
10208 case BUILT_IN_FPRINTF_CHK:
10209 case BUILT_IN_VFPRINTF_CHK:
10210 if (!validate_arg (arg1, INTEGER_TYPE)
10211 || TREE_SIDE_EFFECTS (arg1))
10212 return NULL_TREE;
10213 else
10214 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10215 ignore, fcode);
10216 break;
10217
10218 default:
10219 break;
10220 }
10221 return NULL_TREE;
10222 }
10223
10224 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10225 arguments, where NARGS <= 4. IGNORE is true if the result of the
10226 function call is ignored. This function returns NULL_TREE if no
10227 simplification was possible. Note that this only folds builtins with
10228 fixed argument patterns. Foldings that do varargs-to-varargs
10229 transformations, or that match calls with more than 4 arguments,
10230 need to be handled with fold_builtin_varargs instead. */
10231
10232 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10233
10234 static tree
10235 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10236 {
10237 tree ret = NULL_TREE;
10238 switch (nargs)
10239 {
10240 case 0:
10241 ret = fold_builtin_0 (fndecl, ignore);
10242 break;
10243 case 1:
10244 ret = fold_builtin_1 (fndecl, args[0], ignore);
10245 break;
10246 case 2:
10247 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10248 break;
10249 case 3:
10250 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10251 break;
10252 case 4:
10253 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10254 ignore);
10255 break;
10256 default:
10257 break;
10258 }
10259 if (ret)
10260 {
10261 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10262 TREE_NO_WARNING (ret) = 1;
10263 return ret;
10264 }
10265 return NULL_TREE;
10266 }
10267
10268 /* Builtins with folding operations that operate on "..." arguments
10269 need special handling; we need to store the arguments in a convenient
10270 data structure before attempting any folding. Fortunately there are
10271 only a few builtins that fall into this category. FNDECL is the
10272 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10273 result of the function call is ignored. */
10274
10275 static tree
10276 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10277 {
10278 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10279 tree ret = NULL_TREE;
10280
10281 switch (fcode)
10282 {
10283 case BUILT_IN_SPRINTF_CHK:
10284 case BUILT_IN_VSPRINTF_CHK:
10285 ret = fold_builtin_sprintf_chk (exp, fcode);
10286 break;
10287
10288 case BUILT_IN_SNPRINTF_CHK:
10289 case BUILT_IN_VSNPRINTF_CHK:
10290 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10291
10292 default:
10293 break;
10294 }
10295 if (ret)
10296 {
10297 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10298 TREE_NO_WARNING (ret) = 1;
10299 return ret;
10300 }
10301 return NULL_TREE;
10302 }
10303
10304 /* A wrapper function for builtin folding that prevents warnings for
10305 "statement without effect" and the like, caused by removing the
10306 call node earlier than the warning is generated. */
10307
10308 tree
10309 fold_call_expr (tree exp, bool ignore)
10310 {
10311 tree ret = NULL_TREE;
10312 tree fndecl = get_callee_fndecl (exp);
10313 if (fndecl
10314 && TREE_CODE (fndecl) == FUNCTION_DECL
10315 && DECL_BUILT_IN (fndecl))
10316 {
10317 /* FIXME: Don't use a list in this interface. */
10318 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10319 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10320 else
10321 {
10322 int nargs = call_expr_nargs (exp);
10323 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10324 {
10325 tree *args = CALL_EXPR_ARGP (exp);
10326 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10327 }
10328 if (!ret)
10329 ret = fold_builtin_varargs (fndecl, exp, ignore);
10330 if (ret)
10331 {
10332 /* Propagate location information from original call to
10333 expansion of builtin. Otherwise things like
10334 maybe_emit_chk_warning, that operate on the expansion
10335 of a builtin, will use the wrong location information. */
10336 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10337 {
10338 tree realret = ret;
10339 if (TREE_CODE (ret) == NOP_EXPR)
10340 realret = TREE_OPERAND (ret, 0);
10341 if (CAN_HAVE_LOCATION_P (realret)
10342 && !EXPR_HAS_LOCATION (realret))
10343 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10344 }
10345 return ret;
10346 }
10347 }
10348 }
10349 return NULL_TREE;
10350 }
10351
10352 /* Conveniently construct a function call expression. FNDECL names the
10353 function to be called and ARGLIST is a TREE_LIST of arguments. */
10354
10355 tree
10356 build_function_call_expr (tree fndecl, tree arglist)
10357 {
10358 tree fntype = TREE_TYPE (fndecl);
10359 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10360 int n = list_length (arglist);
10361 tree *argarray = (tree *) alloca (n * sizeof (tree));
10362 int i;
10363
10364 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10365 argarray[i] = TREE_VALUE (arglist);
10366 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10367 }
10368
10369 /* Conveniently construct a function call expression. FNDECL names the
10370 function to be called, N is the number of arguments, and the "..."
10371 parameters are the argument expressions. */
10372
10373 tree
10374 build_call_expr (tree fndecl, int n, ...)
10375 {
10376 va_list ap;
10377 tree fntype = TREE_TYPE (fndecl);
10378 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10379 tree *argarray = (tree *) alloca (n * sizeof (tree));
10380 int i;
10381
10382 va_start (ap, n);
10383 for (i = 0; i < n; i++)
10384 argarray[i] = va_arg (ap, tree);
10385 va_end (ap);
10386 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10387 }
10388
10389 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10390 N arguments are passed in the array ARGARRAY. */
10391
10392 tree
10393 fold_builtin_call_array (tree type,
10394 tree fn,
10395 int n,
10396 tree *argarray)
10397 {
10398 tree ret = NULL_TREE;
10399 int i;
10400 tree exp;
10401
10402 if (TREE_CODE (fn) == ADDR_EXPR)
10403 {
10404 tree fndecl = TREE_OPERAND (fn, 0);
10405 if (TREE_CODE (fndecl) == FUNCTION_DECL
10406 && DECL_BUILT_IN (fndecl))
10407 {
10408 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10409 {
10410 tree arglist = NULL_TREE;
10411 for (i = n - 1; i >= 0; i--)
10412 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10413 ret = targetm.fold_builtin (fndecl, arglist, false);
10414 if (ret)
10415 return ret;
10416 }
10417 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10418 {
10419 /* First try the transformations that don't require consing up
10420 an exp. */
10421 ret = fold_builtin_n (fndecl, argarray, n, false);
10422 if (ret)
10423 return ret;
10424 }
10425
10426 /* If we got this far, we need to build an exp. */
10427 exp = build_call_array (type, fn, n, argarray);
10428 ret = fold_builtin_varargs (fndecl, exp, false);
10429 return ret ? ret : exp;
10430 }
10431 }
10432
10433 return build_call_array (type, fn, n, argarray);
10434 }
10435
10436 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10437 along with N new arguments specified as the "..." parameters. SKIP
10438 is the number of arguments in EXP to be omitted. This function is used
10439 to do varargs-to-varargs transformations. */
10440
10441 static tree
10442 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10443 {
10444 int oldnargs = call_expr_nargs (exp);
10445 int nargs = oldnargs - skip + n;
10446 tree fntype = TREE_TYPE (fndecl);
10447 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10448 tree *buffer;
10449
10450 if (n > 0)
10451 {
10452 int i, j;
10453 va_list ap;
10454
10455 buffer = alloca (nargs * sizeof (tree));
10456 va_start (ap, n);
10457 for (i = 0; i < n; i++)
10458 buffer[i] = va_arg (ap, tree);
10459 va_end (ap);
10460 for (j = skip; j < oldnargs; j++, i++)
10461 buffer[i] = CALL_EXPR_ARG (exp, j);
10462 }
10463 else
10464 buffer = CALL_EXPR_ARGP (exp) + skip;
10465
10466 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10467 }
10468
10469 /* Validate a single argument ARG against a tree code CODE representing
10470 a type. */
10471
10472 static bool
10473 validate_arg (tree arg, enum tree_code code)
10474 {
10475 if (!arg)
10476 return false;
10477 else if (code == POINTER_TYPE)
10478 return POINTER_TYPE_P (TREE_TYPE (arg));
10479 return code == TREE_CODE (TREE_TYPE (arg));
10480 }
10481
10482 /* This function validates the types of a function call argument list
10483 against a specified list of tree_codes. If the last specifier is a 0,
10484 that represents an ellipses, otherwise the last specifier must be a
10485 VOID_TYPE. */
10486
10487 bool
10488 validate_arglist (tree callexpr, ...)
10489 {
10490 enum tree_code code;
10491 bool res = 0;
10492 va_list ap;
10493 call_expr_arg_iterator iter;
10494 tree arg;
10495
10496 va_start (ap, callexpr);
10497 init_call_expr_arg_iterator (callexpr, &iter);
10498
10499 do
10500 {
10501 code = va_arg (ap, enum tree_code);
10502 switch (code)
10503 {
10504 case 0:
10505 /* This signifies an ellipses, any further arguments are all ok. */
10506 res = true;
10507 goto end;
10508 case VOID_TYPE:
10509 /* This signifies an endlink, if no arguments remain, return
10510 true, otherwise return false. */
10511 res = !more_call_expr_args_p (&iter);
10512 goto end;
10513 default:
10514 /* If no parameters remain or the parameter's code does not
10515 match the specified code, return false. Otherwise continue
10516 checking any remaining arguments. */
10517 arg = next_call_expr_arg (&iter);
10518 if (!validate_arg (arg, code))
10519 goto end;
10520 break;
10521 }
10522 }
10523 while (1);
10524
10525 /* We need gotos here since we can only have one VA_CLOSE in a
10526 function. */
10527 end: ;
10528 va_end (ap);
10529
10530 return res;
10531 }
10532
10533 /* Default target-specific builtin expander that does nothing. */
10534
10535 rtx
10536 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10537 rtx target ATTRIBUTE_UNUSED,
10538 rtx subtarget ATTRIBUTE_UNUSED,
10539 enum machine_mode mode ATTRIBUTE_UNUSED,
10540 int ignore ATTRIBUTE_UNUSED)
10541 {
10542 return NULL_RTX;
10543 }
10544
10545 /* Returns true is EXP represents data that would potentially reside
10546 in a readonly section. */
10547
10548 static bool
10549 readonly_data_expr (tree exp)
10550 {
10551 STRIP_NOPS (exp);
10552
10553 if (TREE_CODE (exp) != ADDR_EXPR)
10554 return false;
10555
10556 exp = get_base_address (TREE_OPERAND (exp, 0));
10557 if (!exp)
10558 return false;
10559
10560 /* Make sure we call decl_readonly_section only for trees it
10561 can handle (since it returns true for everything it doesn't
10562 understand). */
10563 if (TREE_CODE (exp) == STRING_CST
10564 || TREE_CODE (exp) == CONSTRUCTOR
10565 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10566 return decl_readonly_section (exp, 0);
10567 else
10568 return false;
10569 }
10570
10571 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10572 to the call, and TYPE is its return type.
10573
10574 Return NULL_TREE if no simplification was possible, otherwise return the
10575 simplified form of the call as a tree.
10576
10577 The simplified form may be a constant or other expression which
10578 computes the same value, but in a more efficient manner (including
10579 calls to other builtin functions).
10580
10581 The call may contain arguments which need to be evaluated, but
10582 which are not useful to determine the result of the call. In
10583 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10584 COMPOUND_EXPR will be an argument which must be evaluated.
10585 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10586 COMPOUND_EXPR in the chain will contain the tree for the simplified
10587 form of the builtin function call. */
10588
10589 static tree
10590 fold_builtin_strstr (tree s1, tree s2, tree type)
10591 {
10592 if (!validate_arg (s1, POINTER_TYPE)
10593 || !validate_arg (s2, POINTER_TYPE))
10594 return NULL_TREE;
10595 else
10596 {
10597 tree fn;
10598 const char *p1, *p2;
10599
10600 p2 = c_getstr (s2);
10601 if (p2 == NULL)
10602 return NULL_TREE;
10603
10604 p1 = c_getstr (s1);
10605 if (p1 != NULL)
10606 {
10607 const char *r = strstr (p1, p2);
10608 tree tem;
10609
10610 if (r == NULL)
10611 return build_int_cst (TREE_TYPE (s1), 0);
10612
10613 /* Return an offset into the constant string argument. */
10614 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10615 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10616 return fold_convert (type, tem);
10617 }
10618
10619 /* The argument is const char *, and the result is char *, so we need
10620 a type conversion here to avoid a warning. */
10621 if (p2[0] == '\0')
10622 return fold_convert (type, s1);
10623
10624 if (p2[1] != '\0')
10625 return NULL_TREE;
10626
10627 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10628 if (!fn)
10629 return NULL_TREE;
10630
10631 /* New argument list transforming strstr(s1, s2) to
10632 strchr(s1, s2[0]). */
10633 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10634 }
10635 }
10636
10637 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10638 the call, and TYPE is its return type.
10639
10640 Return NULL_TREE if no simplification was possible, otherwise return the
10641 simplified form of the call as a tree.
10642
10643 The simplified form may be a constant or other expression which
10644 computes the same value, but in a more efficient manner (including
10645 calls to other builtin functions).
10646
10647 The call may contain arguments which need to be evaluated, but
10648 which are not useful to determine the result of the call. In
10649 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10650 COMPOUND_EXPR will be an argument which must be evaluated.
10651 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10652 COMPOUND_EXPR in the chain will contain the tree for the simplified
10653 form of the builtin function call. */
10654
10655 static tree
10656 fold_builtin_strchr (tree s1, tree s2, tree type)
10657 {
10658 if (!validate_arg (s1, POINTER_TYPE)
10659 || !validate_arg (s2, INTEGER_TYPE))
10660 return NULL_TREE;
10661 else
10662 {
10663 const char *p1;
10664
10665 if (TREE_CODE (s2) != INTEGER_CST)
10666 return NULL_TREE;
10667
10668 p1 = c_getstr (s1);
10669 if (p1 != NULL)
10670 {
10671 char c;
10672 const char *r;
10673 tree tem;
10674
10675 if (target_char_cast (s2, &c))
10676 return NULL_TREE;
10677
10678 r = strchr (p1, c);
10679
10680 if (r == NULL)
10681 return build_int_cst (TREE_TYPE (s1), 0);
10682
10683 /* Return an offset into the constant string argument. */
10684 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10685 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10686 return fold_convert (type, tem);
10687 }
10688 return NULL_TREE;
10689 }
10690 }
10691
10692 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10693 the call, and TYPE is its return type.
10694
10695 Return NULL_TREE if no simplification was possible, otherwise return the
10696 simplified form of the call as a tree.
10697
10698 The simplified form may be a constant or other expression which
10699 computes the same value, but in a more efficient manner (including
10700 calls to other builtin functions).
10701
10702 The call may contain arguments which need to be evaluated, but
10703 which are not useful to determine the result of the call. In
10704 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10705 COMPOUND_EXPR will be an argument which must be evaluated.
10706 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10707 COMPOUND_EXPR in the chain will contain the tree for the simplified
10708 form of the builtin function call. */
10709
10710 static tree
10711 fold_builtin_strrchr (tree s1, tree s2, tree type)
10712 {
10713 if (!validate_arg (s1, POINTER_TYPE)
10714 || !validate_arg (s2, INTEGER_TYPE))
10715 return NULL_TREE;
10716 else
10717 {
10718 tree fn;
10719 const char *p1;
10720
10721 if (TREE_CODE (s2) != INTEGER_CST)
10722 return NULL_TREE;
10723
10724 p1 = c_getstr (s1);
10725 if (p1 != NULL)
10726 {
10727 char c;
10728 const char *r;
10729 tree tem;
10730
10731 if (target_char_cast (s2, &c))
10732 return NULL_TREE;
10733
10734 r = strrchr (p1, c);
10735
10736 if (r == NULL)
10737 return build_int_cst (TREE_TYPE (s1), 0);
10738
10739 /* Return an offset into the constant string argument. */
10740 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10741 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10742 return fold_convert (type, tem);
10743 }
10744
10745 if (! integer_zerop (s2))
10746 return NULL_TREE;
10747
10748 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10749 if (!fn)
10750 return NULL_TREE;
10751
10752 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10753 return build_call_expr (fn, 2, s1, s2);
10754 }
10755 }
10756
10757 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10758 to the call, and TYPE is its return type.
10759
10760 Return NULL_TREE if no simplification was possible, otherwise return the
10761 simplified form of the call as a tree.
10762
10763 The simplified form may be a constant or other expression which
10764 computes the same value, but in a more efficient manner (including
10765 calls to other builtin functions).
10766
10767 The call may contain arguments which need to be evaluated, but
10768 which are not useful to determine the result of the call. In
10769 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10770 COMPOUND_EXPR will be an argument which must be evaluated.
10771 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10772 COMPOUND_EXPR in the chain will contain the tree for the simplified
10773 form of the builtin function call. */
10774
10775 static tree
10776 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10777 {
10778 if (!validate_arg (s1, POINTER_TYPE)
10779 || !validate_arg (s2, POINTER_TYPE))
10780 return NULL_TREE;
10781 else
10782 {
10783 tree fn;
10784 const char *p1, *p2;
10785
10786 p2 = c_getstr (s2);
10787 if (p2 == NULL)
10788 return NULL_TREE;
10789
10790 p1 = c_getstr (s1);
10791 if (p1 != NULL)
10792 {
10793 const char *r = strpbrk (p1, p2);
10794 tree tem;
10795
10796 if (r == NULL)
10797 return build_int_cst (TREE_TYPE (s1), 0);
10798
10799 /* Return an offset into the constant string argument. */
10800 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10801 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10802 return fold_convert (type, tem);
10803 }
10804
10805 if (p2[0] == '\0')
10806 /* strpbrk(x, "") == NULL.
10807 Evaluate and ignore s1 in case it had side-effects. */
10808 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10809
10810 if (p2[1] != '\0')
10811 return NULL_TREE; /* Really call strpbrk. */
10812
10813 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10814 if (!fn)
10815 return NULL_TREE;
10816
10817 /* New argument list transforming strpbrk(s1, s2) to
10818 strchr(s1, s2[0]). */
10819 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10820 }
10821 }
10822
10823 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10824 to the call.
10825
10826 Return NULL_TREE if no simplification was possible, otherwise return the
10827 simplified form of the call as a tree.
10828
10829 The simplified form may be a constant or other expression which
10830 computes the same value, but in a more efficient manner (including
10831 calls to other builtin functions).
10832
10833 The call may contain arguments which need to be evaluated, but
10834 which are not useful to determine the result of the call. In
10835 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10836 COMPOUND_EXPR will be an argument which must be evaluated.
10837 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10838 COMPOUND_EXPR in the chain will contain the tree for the simplified
10839 form of the builtin function call. */
10840
10841 static tree
10842 fold_builtin_strcat (tree dst, tree src)
10843 {
10844 if (!validate_arg (dst, POINTER_TYPE)
10845 || !validate_arg (src, POINTER_TYPE))
10846 return NULL_TREE;
10847 else
10848 {
10849 const char *p = c_getstr (src);
10850
10851 /* If the string length is zero, return the dst parameter. */
10852 if (p && *p == '\0')
10853 return dst;
10854
10855 return NULL_TREE;
10856 }
10857 }
10858
10859 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10860 arguments to the call.
10861
10862 Return NULL_TREE if no simplification was possible, otherwise return the
10863 simplified form of the call as a tree.
10864
10865 The simplified form may be a constant or other expression which
10866 computes the same value, but in a more efficient manner (including
10867 calls to other builtin functions).
10868
10869 The call may contain arguments which need to be evaluated, but
10870 which are not useful to determine the result of the call. In
10871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10872 COMPOUND_EXPR will be an argument which must be evaluated.
10873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10874 COMPOUND_EXPR in the chain will contain the tree for the simplified
10875 form of the builtin function call. */
10876
10877 static tree
10878 fold_builtin_strncat (tree dst, tree src, tree len)
10879 {
10880 if (!validate_arg (dst, POINTER_TYPE)
10881 || !validate_arg (src, POINTER_TYPE)
10882 || !validate_arg (len, INTEGER_TYPE))
10883 return NULL_TREE;
10884 else
10885 {
10886 const char *p = c_getstr (src);
10887
10888 /* If the requested length is zero, or the src parameter string
10889 length is zero, return the dst parameter. */
10890 if (integer_zerop (len) || (p && *p == '\0'))
10891 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10892
10893 /* If the requested len is greater than or equal to the string
10894 length, call strcat. */
10895 if (TREE_CODE (len) == INTEGER_CST && p
10896 && compare_tree_int (len, strlen (p)) >= 0)
10897 {
10898 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10899
10900 /* If the replacement _DECL isn't initialized, don't do the
10901 transformation. */
10902 if (!fn)
10903 return NULL_TREE;
10904
10905 return build_call_expr (fn, 2, dst, src);
10906 }
10907 return NULL_TREE;
10908 }
10909 }
10910
10911 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10912 to the call.
10913
10914 Return NULL_TREE if no simplification was possible, otherwise return the
10915 simplified form of the call as a tree.
10916
10917 The simplified form may be a constant or other expression which
10918 computes the same value, but in a more efficient manner (including
10919 calls to other builtin functions).
10920
10921 The call may contain arguments which need to be evaluated, but
10922 which are not useful to determine the result of the call. In
10923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10924 COMPOUND_EXPR will be an argument which must be evaluated.
10925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10926 COMPOUND_EXPR in the chain will contain the tree for the simplified
10927 form of the builtin function call. */
10928
10929 static tree
10930 fold_builtin_strspn (tree s1, tree s2)
10931 {
10932 if (!validate_arg (s1, POINTER_TYPE)
10933 || !validate_arg (s2, POINTER_TYPE))
10934 return NULL_TREE;
10935 else
10936 {
10937 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10938
10939 /* If both arguments are constants, evaluate at compile-time. */
10940 if (p1 && p2)
10941 {
10942 const size_t r = strspn (p1, p2);
10943 return size_int (r);
10944 }
10945
10946 /* If either argument is "", return NULL_TREE. */
10947 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10948 /* Evaluate and ignore both arguments in case either one has
10949 side-effects. */
10950 return omit_two_operands (integer_type_node, integer_zero_node,
10951 s1, s2);
10952 return NULL_TREE;
10953 }
10954 }
10955
10956 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10957 to the call.
10958
10959 Return NULL_TREE if no simplification was possible, otherwise return the
10960 simplified form of the call as a tree.
10961
10962 The simplified form may be a constant or other expression which
10963 computes the same value, but in a more efficient manner (including
10964 calls to other builtin functions).
10965
10966 The call may contain arguments which need to be evaluated, but
10967 which are not useful to determine the result of the call. In
10968 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10969 COMPOUND_EXPR will be an argument which must be evaluated.
10970 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10971 COMPOUND_EXPR in the chain will contain the tree for the simplified
10972 form of the builtin function call. */
10973
10974 static tree
10975 fold_builtin_strcspn (tree s1, tree s2)
10976 {
10977 if (!validate_arg (s1, POINTER_TYPE)
10978 || !validate_arg (s2, POINTER_TYPE))
10979 return NULL_TREE;
10980 else
10981 {
10982 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10983
10984 /* If both arguments are constants, evaluate at compile-time. */
10985 if (p1 && p2)
10986 {
10987 const size_t r = strcspn (p1, p2);
10988 return size_int (r);
10989 }
10990
10991 /* If the first argument is "", return NULL_TREE. */
10992 if (p1 && *p1 == '\0')
10993 {
10994 /* Evaluate and ignore argument s2 in case it has
10995 side-effects. */
10996 return omit_one_operand (integer_type_node,
10997 integer_zero_node, s2);
10998 }
10999
11000 /* If the second argument is "", return __builtin_strlen(s1). */
11001 if (p2 && *p2 == '\0')
11002 {
11003 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11004
11005 /* If the replacement _DECL isn't initialized, don't do the
11006 transformation. */
11007 if (!fn)
11008 return NULL_TREE;
11009
11010 return build_call_expr (fn, 1, s1);
11011 }
11012 return NULL_TREE;
11013 }
11014 }
11015
11016 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11017 to the call. IGNORE is true if the value returned
11018 by the builtin will be ignored. UNLOCKED is true is true if this
11019 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11020 the known length of the string. Return NULL_TREE if no simplification
11021 was possible. */
11022
11023 tree
11024 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11025 {
11026 /* If we're using an unlocked function, assume the other unlocked
11027 functions exist explicitly. */
11028 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11029 : implicit_built_in_decls[BUILT_IN_FPUTC];
11030 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11031 : implicit_built_in_decls[BUILT_IN_FWRITE];
11032
11033 /* If the return value is used, don't do the transformation. */
11034 if (!ignore)
11035 return NULL_TREE;
11036
11037 /* Verify the arguments in the original call. */
11038 if (!validate_arg (arg0, POINTER_TYPE)
11039 || !validate_arg (arg1, POINTER_TYPE))
11040 return NULL_TREE;
11041
11042 if (! len)
11043 len = c_strlen (arg0, 0);
11044
11045 /* Get the length of the string passed to fputs. If the length
11046 can't be determined, punt. */
11047 if (!len
11048 || TREE_CODE (len) != INTEGER_CST)
11049 return NULL_TREE;
11050
11051 switch (compare_tree_int (len, 1))
11052 {
11053 case -1: /* length is 0, delete the call entirely . */
11054 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11055
11056 case 0: /* length is 1, call fputc. */
11057 {
11058 const char *p = c_getstr (arg0);
11059
11060 if (p != NULL)
11061 {
11062 if (fn_fputc)
11063 return build_call_expr (fn_fputc, 2,
11064 build_int_cst (NULL_TREE, p[0]), arg1);
11065 else
11066 return NULL_TREE;
11067 }
11068 }
11069 /* FALLTHROUGH */
11070 case 1: /* length is greater than 1, call fwrite. */
11071 {
11072 /* If optimizing for size keep fputs. */
11073 if (optimize_size)
11074 return NULL_TREE;
11075 /* New argument list transforming fputs(string, stream) to
11076 fwrite(string, 1, len, stream). */
11077 if (fn_fwrite)
11078 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11079 else
11080 return NULL_TREE;
11081 }
11082 default:
11083 gcc_unreachable ();
11084 }
11085 return NULL_TREE;
11086 }
11087
11088 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11089 produced. False otherwise. This is done so that we don't output the error
11090 or warning twice or three times. */
11091 bool
11092 fold_builtin_next_arg (tree exp, bool va_start_p)
11093 {
11094 tree fntype = TREE_TYPE (current_function_decl);
11095 int nargs = call_expr_nargs (exp);
11096 tree arg;
11097
11098 if (TYPE_ARG_TYPES (fntype) == 0
11099 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11100 == void_type_node))
11101 {
11102 error ("%<va_start%> used in function with fixed args");
11103 return true;
11104 }
11105
11106 if (va_start_p)
11107 {
11108 if (va_start_p && (nargs != 2))
11109 {
11110 error ("wrong number of arguments to function %<va_start%>");
11111 return true;
11112 }
11113 arg = CALL_EXPR_ARG (exp, 1);
11114 }
11115 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11116 when we checked the arguments and if needed issued a warning. */
11117 else
11118 {
11119 if (nargs == 0)
11120 {
11121 /* Evidently an out of date version of <stdarg.h>; can't validate
11122 va_start's second argument, but can still work as intended. */
11123 warning (0, "%<__builtin_next_arg%> called without an argument");
11124 return true;
11125 }
11126 else if (nargs > 1)
11127 {
11128 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11129 return true;
11130 }
11131 arg = CALL_EXPR_ARG (exp, 0);
11132 }
11133
11134 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11135 or __builtin_next_arg (0) the first time we see it, after checking
11136 the arguments and if needed issuing a warning. */
11137 if (!integer_zerop (arg))
11138 {
11139 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11140
11141 /* Strip off all nops for the sake of the comparison. This
11142 is not quite the same as STRIP_NOPS. It does more.
11143 We must also strip off INDIRECT_EXPR for C++ reference
11144 parameters. */
11145 while (TREE_CODE (arg) == NOP_EXPR
11146 || TREE_CODE (arg) == CONVERT_EXPR
11147 || TREE_CODE (arg) == NON_LVALUE_EXPR
11148 || TREE_CODE (arg) == INDIRECT_REF)
11149 arg = TREE_OPERAND (arg, 0);
11150 if (arg != last_parm)
11151 {
11152 /* FIXME: Sometimes with the tree optimizers we can get the
11153 not the last argument even though the user used the last
11154 argument. We just warn and set the arg to be the last
11155 argument so that we will get wrong-code because of
11156 it. */
11157 warning (0, "second parameter of %<va_start%> not last named argument");
11158 }
11159 /* We want to verify the second parameter just once before the tree
11160 optimizers are run and then avoid keeping it in the tree,
11161 as otherwise we could warn even for correct code like:
11162 void foo (int i, ...)
11163 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11164 if (va_start_p)
11165 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11166 else
11167 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11168 }
11169 return false;
11170 }
11171
11172
11173 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11174 ORIG may be null if this is a 2-argument call. We don't attempt to
11175 simplify calls with more than 3 arguments.
11176
11177 Return NULL_TREE if no simplification was possible, otherwise return the
11178 simplified form of the call as a tree. If IGNORED is true, it means that
11179 the caller does not use the returned value of the function. */
11180
11181 static tree
11182 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11183 {
11184 tree call, retval;
11185 const char *fmt_str = NULL;
11186
11187 /* Verify the required arguments in the original call. We deal with two
11188 types of sprintf() calls: 'sprintf (str, fmt)' and
11189 'sprintf (dest, "%s", orig)'. */
11190 if (!validate_arg (dest, POINTER_TYPE)
11191 || !validate_arg (fmt, POINTER_TYPE))
11192 return NULL_TREE;
11193 if (orig && !validate_arg (orig, POINTER_TYPE))
11194 return NULL_TREE;
11195
11196 /* Check whether the format is a literal string constant. */
11197 fmt_str = c_getstr (fmt);
11198 if (fmt_str == NULL)
11199 return NULL_TREE;
11200
11201 call = NULL_TREE;
11202 retval = NULL_TREE;
11203
11204 if (!init_target_chars ())
11205 return NULL_TREE;
11206
11207 /* If the format doesn't contain % args or %%, use strcpy. */
11208 if (strchr (fmt_str, target_percent) == NULL)
11209 {
11210 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11211
11212 if (!fn)
11213 return NULL_TREE;
11214
11215 /* Don't optimize sprintf (buf, "abc", ptr++). */
11216 if (orig)
11217 return NULL_TREE;
11218
11219 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11220 'format' is known to contain no % formats. */
11221 call = build_call_expr (fn, 2, dest, fmt);
11222 if (!ignored)
11223 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11224 }
11225
11226 /* If the format is "%s", use strcpy if the result isn't used. */
11227 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11228 {
11229 tree fn;
11230 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11231
11232 if (!fn)
11233 return NULL_TREE;
11234
11235 /* Don't crash on sprintf (str1, "%s"). */
11236 if (!orig)
11237 return NULL_TREE;
11238
11239 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11240 if (!ignored)
11241 {
11242 retval = c_strlen (orig, 1);
11243 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11244 return NULL_TREE;
11245 }
11246 call = build_call_expr (fn, 2, dest, orig);
11247 }
11248
11249 if (call && retval)
11250 {
11251 retval = fold_convert
11252 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11253 retval);
11254 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11255 }
11256 else
11257 return call;
11258 }
11259
11260 /* Expand a call EXP to __builtin_object_size. */
11261
11262 rtx
11263 expand_builtin_object_size (tree exp)
11264 {
11265 tree ost;
11266 int object_size_type;
11267 tree fndecl = get_callee_fndecl (exp);
11268 location_t locus = EXPR_LOCATION (exp);
11269
11270 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11271 {
11272 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11273 &locus, fndecl);
11274 expand_builtin_trap ();
11275 return const0_rtx;
11276 }
11277
11278 ost = CALL_EXPR_ARG (exp, 1);
11279 STRIP_NOPS (ost);
11280
11281 if (TREE_CODE (ost) != INTEGER_CST
11282 || tree_int_cst_sgn (ost) < 0
11283 || compare_tree_int (ost, 3) > 0)
11284 {
11285 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11286 &locus, fndecl);
11287 expand_builtin_trap ();
11288 return const0_rtx;
11289 }
11290
11291 object_size_type = tree_low_cst (ost, 0);
11292
11293 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11294 }
11295
11296 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11297 FCODE is the BUILT_IN_* to use.
11298 Return NULL_RTX if we failed; the caller should emit a normal call,
11299 otherwise try to get the result in TARGET, if convenient (and in
11300 mode MODE if that's convenient). */
11301
11302 static rtx
11303 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11304 enum built_in_function fcode)
11305 {
11306 tree dest, src, len, size;
11307
11308 if (!validate_arglist (exp,
11309 POINTER_TYPE,
11310 fcode == BUILT_IN_MEMSET_CHK
11311 ? INTEGER_TYPE : POINTER_TYPE,
11312 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11313 return NULL_RTX;
11314
11315 dest = CALL_EXPR_ARG (exp, 0);
11316 src = CALL_EXPR_ARG (exp, 1);
11317 len = CALL_EXPR_ARG (exp, 2);
11318 size = CALL_EXPR_ARG (exp, 3);
11319
11320 if (! host_integerp (size, 1))
11321 return NULL_RTX;
11322
11323 if (host_integerp (len, 1) || integer_all_onesp (size))
11324 {
11325 tree fn;
11326
11327 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11328 {
11329 location_t locus = EXPR_LOCATION (exp);
11330 warning (0, "%Hcall to %D will always overflow destination buffer",
11331 &locus, get_callee_fndecl (exp));
11332 return NULL_RTX;
11333 }
11334
11335 fn = NULL_TREE;
11336 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11337 mem{cpy,pcpy,move,set} is available. */
11338 switch (fcode)
11339 {
11340 case BUILT_IN_MEMCPY_CHK:
11341 fn = built_in_decls[BUILT_IN_MEMCPY];
11342 break;
11343 case BUILT_IN_MEMPCPY_CHK:
11344 fn = built_in_decls[BUILT_IN_MEMPCPY];
11345 break;
11346 case BUILT_IN_MEMMOVE_CHK:
11347 fn = built_in_decls[BUILT_IN_MEMMOVE];
11348 break;
11349 case BUILT_IN_MEMSET_CHK:
11350 fn = built_in_decls[BUILT_IN_MEMSET];
11351 break;
11352 default:
11353 break;
11354 }
11355
11356 if (! fn)
11357 return NULL_RTX;
11358
11359 fn = build_call_expr (fn, 3, dest, src, len);
11360 if (TREE_CODE (fn) == CALL_EXPR)
11361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11362 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11363 }
11364 else if (fcode == BUILT_IN_MEMSET_CHK)
11365 return NULL_RTX;
11366 else
11367 {
11368 unsigned int dest_align
11369 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11370
11371 /* If DEST is not a pointer type, call the normal function. */
11372 if (dest_align == 0)
11373 return NULL_RTX;
11374
11375 /* If SRC and DEST are the same (and not volatile), do nothing. */
11376 if (operand_equal_p (src, dest, 0))
11377 {
11378 tree expr;
11379
11380 if (fcode != BUILT_IN_MEMPCPY_CHK)
11381 {
11382 /* Evaluate and ignore LEN in case it has side-effects. */
11383 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11384 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11385 }
11386
11387 len = fold_convert (TREE_TYPE (dest), len);
11388 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11389 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11390 }
11391
11392 /* __memmove_chk special case. */
11393 if (fcode == BUILT_IN_MEMMOVE_CHK)
11394 {
11395 unsigned int src_align
11396 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11397
11398 if (src_align == 0)
11399 return NULL_RTX;
11400
11401 /* If src is categorized for a readonly section we can use
11402 normal __memcpy_chk. */
11403 if (readonly_data_expr (src))
11404 {
11405 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11406 if (!fn)
11407 return NULL_RTX;
11408 fn = build_call_expr (fn, 4, dest, src, len, size);
11409 if (TREE_CODE (fn) == CALL_EXPR)
11410 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11411 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11412 }
11413 }
11414 return NULL_RTX;
11415 }
11416 }
11417
11418 /* Emit warning if a buffer overflow is detected at compile time. */
11419
11420 static void
11421 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11422 {
11423 int is_strlen = 0;
11424 tree len, size;
11425 location_t locus;
11426
11427 switch (fcode)
11428 {
11429 case BUILT_IN_STRCPY_CHK:
11430 case BUILT_IN_STPCPY_CHK:
11431 /* For __strcat_chk the warning will be emitted only if overflowing
11432 by at least strlen (dest) + 1 bytes. */
11433 case BUILT_IN_STRCAT_CHK:
11434 len = CALL_EXPR_ARG (exp, 1);
11435 size = CALL_EXPR_ARG (exp, 2);
11436 is_strlen = 1;
11437 break;
11438 case BUILT_IN_STRNCAT_CHK:
11439 case BUILT_IN_STRNCPY_CHK:
11440 len = CALL_EXPR_ARG (exp, 2);
11441 size = CALL_EXPR_ARG (exp, 3);
11442 break;
11443 case BUILT_IN_SNPRINTF_CHK:
11444 case BUILT_IN_VSNPRINTF_CHK:
11445 len = CALL_EXPR_ARG (exp, 1);
11446 size = CALL_EXPR_ARG (exp, 3);
11447 break;
11448 default:
11449 gcc_unreachable ();
11450 }
11451
11452 if (!len || !size)
11453 return;
11454
11455 if (! host_integerp (size, 1) || integer_all_onesp (size))
11456 return;
11457
11458 if (is_strlen)
11459 {
11460 len = c_strlen (len, 1);
11461 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11462 return;
11463 }
11464 else if (fcode == BUILT_IN_STRNCAT_CHK)
11465 {
11466 tree src = CALL_EXPR_ARG (exp, 1);
11467 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11468 return;
11469 src = c_strlen (src, 1);
11470 if (! src || ! host_integerp (src, 1))
11471 {
11472 locus = EXPR_LOCATION (exp);
11473 warning (0, "%Hcall to %D might overflow destination buffer",
11474 &locus, get_callee_fndecl (exp));
11475 return;
11476 }
11477 else if (tree_int_cst_lt (src, size))
11478 return;
11479 }
11480 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11481 return;
11482
11483 locus = EXPR_LOCATION (exp);
11484 warning (0, "%Hcall to %D will always overflow destination buffer",
11485 &locus, get_callee_fndecl (exp));
11486 }
11487
11488 /* Emit warning if a buffer overflow is detected at compile time
11489 in __sprintf_chk/__vsprintf_chk calls. */
11490
11491 static void
11492 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11493 {
11494 tree dest, size, len, fmt, flag;
11495 const char *fmt_str;
11496 int nargs = call_expr_nargs (exp);
11497
11498 /* Verify the required arguments in the original call. */
11499
11500 if (nargs < 4)
11501 return;
11502 dest = CALL_EXPR_ARG (exp, 0);
11503 flag = CALL_EXPR_ARG (exp, 1);
11504 size = CALL_EXPR_ARG (exp, 2);
11505 fmt = CALL_EXPR_ARG (exp, 3);
11506
11507 if (! host_integerp (size, 1) || integer_all_onesp (size))
11508 return;
11509
11510 /* Check whether the format is a literal string constant. */
11511 fmt_str = c_getstr (fmt);
11512 if (fmt_str == NULL)
11513 return;
11514
11515 if (!init_target_chars ())
11516 return;
11517
11518 /* If the format doesn't contain % args or %%, we know its size. */
11519 if (strchr (fmt_str, target_percent) == 0)
11520 len = build_int_cstu (size_type_node, strlen (fmt_str));
11521 /* If the format is "%s" and first ... argument is a string literal,
11522 we know it too. */
11523 else if (fcode == BUILT_IN_SPRINTF_CHK
11524 && strcmp (fmt_str, target_percent_s) == 0)
11525 {
11526 tree arg;
11527
11528 if (nargs < 5)
11529 return;
11530 arg = CALL_EXPR_ARG (exp, 4);
11531 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11532 return;
11533
11534 len = c_strlen (arg, 1);
11535 if (!len || ! host_integerp (len, 1))
11536 return;
11537 }
11538 else
11539 return;
11540
11541 if (! tree_int_cst_lt (len, size))
11542 {
11543 location_t locus = EXPR_LOCATION (exp);
11544 warning (0, "%Hcall to %D will always overflow destination buffer",
11545 &locus, get_callee_fndecl (exp));
11546 }
11547 }
11548
11549 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11550 if possible. */
11551
11552 tree
11553 fold_builtin_object_size (tree ptr, tree ost)
11554 {
11555 tree ret = NULL_TREE;
11556 int object_size_type;
11557
11558 if (!validate_arg (ptr, POINTER_TYPE)
11559 || !validate_arg (ost, INTEGER_TYPE))
11560 return NULL_TREE;
11561
11562 STRIP_NOPS (ost);
11563
11564 if (TREE_CODE (ost) != INTEGER_CST
11565 || tree_int_cst_sgn (ost) < 0
11566 || compare_tree_int (ost, 3) > 0)
11567 return NULL_TREE;
11568
11569 object_size_type = tree_low_cst (ost, 0);
11570
11571 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11572 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11573 and (size_t) 0 for types 2 and 3. */
11574 if (TREE_SIDE_EFFECTS (ptr))
11575 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11576
11577 if (TREE_CODE (ptr) == ADDR_EXPR)
11578 ret = build_int_cstu (size_type_node,
11579 compute_builtin_object_size (ptr, object_size_type));
11580
11581 else if (TREE_CODE (ptr) == SSA_NAME)
11582 {
11583 unsigned HOST_WIDE_INT bytes;
11584
11585 /* If object size is not known yet, delay folding until
11586 later. Maybe subsequent passes will help determining
11587 it. */
11588 bytes = compute_builtin_object_size (ptr, object_size_type);
11589 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11590 ? -1 : 0))
11591 ret = build_int_cstu (size_type_node, bytes);
11592 }
11593
11594 if (ret)
11595 {
11596 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11597 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11598 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11599 ret = NULL_TREE;
11600 }
11601
11602 return ret;
11603 }
11604
11605 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11606 DEST, SRC, LEN, and SIZE are the arguments to the call.
11607 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11608 code of the builtin. If MAXLEN is not NULL, it is maximum length
11609 passed as third argument. */
11610
11611 tree
11612 fold_builtin_memory_chk (tree fndecl,
11613 tree dest, tree src, tree len, tree size,
11614 tree maxlen, bool ignore,
11615 enum built_in_function fcode)
11616 {
11617 tree fn;
11618
11619 if (!validate_arg (dest, POINTER_TYPE)
11620 || !validate_arg (src,
11621 (fcode == BUILT_IN_MEMSET_CHK
11622 ? INTEGER_TYPE : POINTER_TYPE))
11623 || !validate_arg (len, INTEGER_TYPE)
11624 || !validate_arg (size, INTEGER_TYPE))
11625 return NULL_TREE;
11626
11627 /* If SRC and DEST are the same (and not volatile), return DEST
11628 (resp. DEST+LEN for __mempcpy_chk). */
11629 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11630 {
11631 if (fcode != BUILT_IN_MEMPCPY_CHK)
11632 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11633 else
11634 {
11635 tree temp = fold_convert (TREE_TYPE (dest), len);
11636 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11637 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11638 }
11639 }
11640
11641 if (! host_integerp (size, 1))
11642 return NULL_TREE;
11643
11644 if (! integer_all_onesp (size))
11645 {
11646 if (! host_integerp (len, 1))
11647 {
11648 /* If LEN is not constant, try MAXLEN too.
11649 For MAXLEN only allow optimizing into non-_ocs function
11650 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11651 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11652 {
11653 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11654 {
11655 /* (void) __mempcpy_chk () can be optimized into
11656 (void) __memcpy_chk (). */
11657 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11658 if (!fn)
11659 return NULL_TREE;
11660
11661 return build_call_expr (fn, 4, dest, src, len, size);
11662 }
11663 return NULL_TREE;
11664 }
11665 }
11666 else
11667 maxlen = len;
11668
11669 if (tree_int_cst_lt (size, maxlen))
11670 return NULL_TREE;
11671 }
11672
11673 fn = NULL_TREE;
11674 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11675 mem{cpy,pcpy,move,set} is available. */
11676 switch (fcode)
11677 {
11678 case BUILT_IN_MEMCPY_CHK:
11679 fn = built_in_decls[BUILT_IN_MEMCPY];
11680 break;
11681 case BUILT_IN_MEMPCPY_CHK:
11682 fn = built_in_decls[BUILT_IN_MEMPCPY];
11683 break;
11684 case BUILT_IN_MEMMOVE_CHK:
11685 fn = built_in_decls[BUILT_IN_MEMMOVE];
11686 break;
11687 case BUILT_IN_MEMSET_CHK:
11688 fn = built_in_decls[BUILT_IN_MEMSET];
11689 break;
11690 default:
11691 break;
11692 }
11693
11694 if (!fn)
11695 return NULL_TREE;
11696
11697 return build_call_expr (fn, 3, dest, src, len);
11698 }
11699
11700 /* Fold a call to the __st[rp]cpy_chk builtin.
11701 DEST, SRC, and SIZE are the arguments to the call.
11702 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11703 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11704 strings passed as second argument. */
11705
11706 tree
11707 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11708 tree maxlen, bool ignore,
11709 enum built_in_function fcode)
11710 {
11711 tree len, fn;
11712
11713 if (!validate_arg (dest, POINTER_TYPE)
11714 || !validate_arg (src, POINTER_TYPE)
11715 || !validate_arg (size, INTEGER_TYPE))
11716 return NULL_TREE;
11717
11718 /* If SRC and DEST are the same (and not volatile), return DEST. */
11719 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11720 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11721
11722 if (! host_integerp (size, 1))
11723 return NULL_TREE;
11724
11725 if (! integer_all_onesp (size))
11726 {
11727 len = c_strlen (src, 1);
11728 if (! len || ! host_integerp (len, 1))
11729 {
11730 /* If LEN is not constant, try MAXLEN too.
11731 For MAXLEN only allow optimizing into non-_ocs function
11732 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11733 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11734 {
11735 if (fcode == BUILT_IN_STPCPY_CHK)
11736 {
11737 if (! ignore)
11738 return NULL_TREE;
11739
11740 /* If return value of __stpcpy_chk is ignored,
11741 optimize into __strcpy_chk. */
11742 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11743 if (!fn)
11744 return NULL_TREE;
11745
11746 return build_call_expr (fn, 3, dest, src, size);
11747 }
11748
11749 if (! len || TREE_SIDE_EFFECTS (len))
11750 return NULL_TREE;
11751
11752 /* If c_strlen returned something, but not a constant,
11753 transform __strcpy_chk into __memcpy_chk. */
11754 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11755 if (!fn)
11756 return NULL_TREE;
11757
11758 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11759 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11760 build_call_expr (fn, 4,
11761 dest, src, len, size));
11762 }
11763 }
11764 else
11765 maxlen = len;
11766
11767 if (! tree_int_cst_lt (maxlen, size))
11768 return NULL_TREE;
11769 }
11770
11771 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11772 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11773 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11774 if (!fn)
11775 return NULL_TREE;
11776
11777 return build_call_expr (fn, 2, dest, src);
11778 }
11779
11780 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11781 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11782 length passed as third argument. */
11783
11784 tree
11785 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11786 tree maxlen)
11787 {
11788 tree fn;
11789
11790 if (!validate_arg (dest, POINTER_TYPE)
11791 || !validate_arg (src, POINTER_TYPE)
11792 || !validate_arg (len, INTEGER_TYPE)
11793 || !validate_arg (size, INTEGER_TYPE))
11794 return NULL_TREE;
11795
11796 if (! host_integerp (size, 1))
11797 return NULL_TREE;
11798
11799 if (! integer_all_onesp (size))
11800 {
11801 if (! host_integerp (len, 1))
11802 {
11803 /* If LEN is not constant, try MAXLEN too.
11804 For MAXLEN only allow optimizing into non-_ocs function
11805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11806 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11807 return NULL_TREE;
11808 }
11809 else
11810 maxlen = len;
11811
11812 if (tree_int_cst_lt (size, maxlen))
11813 return NULL_TREE;
11814 }
11815
11816 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11817 fn = built_in_decls[BUILT_IN_STRNCPY];
11818 if (!fn)
11819 return NULL_TREE;
11820
11821 return build_call_expr (fn, 3, dest, src, len);
11822 }
11823
11824 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11825 are the arguments to the call. */
11826
11827 static tree
11828 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11829 {
11830 tree fn;
11831 const char *p;
11832
11833 if (!validate_arg (dest, POINTER_TYPE)
11834 || !validate_arg (src, POINTER_TYPE)
11835 || !validate_arg (size, INTEGER_TYPE))
11836 return NULL_TREE;
11837
11838 p = c_getstr (src);
11839 /* If the SRC parameter is "", return DEST. */
11840 if (p && *p == '\0')
11841 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11842
11843 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11844 return NULL_TREE;
11845
11846 /* If __builtin_strcat_chk is used, assume strcat is available. */
11847 fn = built_in_decls[BUILT_IN_STRCAT];
11848 if (!fn)
11849 return NULL_TREE;
11850
11851 return build_call_expr (fn, 2, dest, src);
11852 }
11853
11854 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11855 LEN, and SIZE. */
11856
11857 static tree
11858 fold_builtin_strncat_chk (tree fndecl,
11859 tree dest, tree src, tree len, tree size)
11860 {
11861 tree fn;
11862 const char *p;
11863
11864 if (!validate_arg (dest, POINTER_TYPE)
11865 || !validate_arg (src, POINTER_TYPE)
11866 || !validate_arg (size, INTEGER_TYPE)
11867 || !validate_arg (size, INTEGER_TYPE))
11868 return NULL_TREE;
11869
11870 p = c_getstr (src);
11871 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11872 if (p && *p == '\0')
11873 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11874 else if (integer_zerop (len))
11875 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11876
11877 if (! host_integerp (size, 1))
11878 return NULL_TREE;
11879
11880 if (! integer_all_onesp (size))
11881 {
11882 tree src_len = c_strlen (src, 1);
11883 if (src_len
11884 && host_integerp (src_len, 1)
11885 && host_integerp (len, 1)
11886 && ! tree_int_cst_lt (len, src_len))
11887 {
11888 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11889 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11890 if (!fn)
11891 return NULL_TREE;
11892
11893 return build_call_expr (fn, 3, dest, src, size);
11894 }
11895 return NULL_TREE;
11896 }
11897
11898 /* If __builtin_strncat_chk is used, assume strncat is available. */
11899 fn = built_in_decls[BUILT_IN_STRNCAT];
11900 if (!fn)
11901 return NULL_TREE;
11902
11903 return build_call_expr (fn, 3, dest, src, len);
11904 }
11905
11906 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11907 a normal call should be emitted rather than expanding the function
11908 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11909
11910 static tree
11911 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11912 {
11913 tree dest, size, len, fn, fmt, flag;
11914 const char *fmt_str;
11915 int nargs = call_expr_nargs (exp);
11916
11917 /* Verify the required arguments in the original call. */
11918 if (nargs < 4)
11919 return NULL_TREE;
11920 dest = CALL_EXPR_ARG (exp, 0);
11921 if (!validate_arg (dest, POINTER_TYPE))
11922 return NULL_TREE;
11923 flag = CALL_EXPR_ARG (exp, 1);
11924 if (!validate_arg (flag, INTEGER_TYPE))
11925 return NULL_TREE;
11926 size = CALL_EXPR_ARG (exp, 2);
11927 if (!validate_arg (size, INTEGER_TYPE))
11928 return NULL_TREE;
11929 fmt = CALL_EXPR_ARG (exp, 3);
11930 if (!validate_arg (fmt, POINTER_TYPE))
11931 return NULL_TREE;
11932
11933 if (! host_integerp (size, 1))
11934 return NULL_TREE;
11935
11936 len = NULL_TREE;
11937
11938 if (!init_target_chars ())
11939 return NULL_TREE;
11940
11941 /* Check whether the format is a literal string constant. */
11942 fmt_str = c_getstr (fmt);
11943 if (fmt_str != NULL)
11944 {
11945 /* If the format doesn't contain % args or %%, we know the size. */
11946 if (strchr (fmt_str, target_percent) == 0)
11947 {
11948 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11949 len = build_int_cstu (size_type_node, strlen (fmt_str));
11950 }
11951 /* If the format is "%s" and first ... argument is a string literal,
11952 we know the size too. */
11953 else if (fcode == BUILT_IN_SPRINTF_CHK
11954 && strcmp (fmt_str, target_percent_s) == 0)
11955 {
11956 tree arg;
11957
11958 if (nargs == 5)
11959 {
11960 arg = CALL_EXPR_ARG (exp, 4);
11961 if (validate_arg (arg, POINTER_TYPE))
11962 {
11963 len = c_strlen (arg, 1);
11964 if (! len || ! host_integerp (len, 1))
11965 len = NULL_TREE;
11966 }
11967 }
11968 }
11969 }
11970
11971 if (! integer_all_onesp (size))
11972 {
11973 if (! len || ! tree_int_cst_lt (len, size))
11974 return NULL_TREE;
11975 }
11976
11977 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11978 or if format doesn't contain % chars or is "%s". */
11979 if (! integer_zerop (flag))
11980 {
11981 if (fmt_str == NULL)
11982 return NULL_TREE;
11983 if (strchr (fmt_str, target_percent) != NULL
11984 && strcmp (fmt_str, target_percent_s))
11985 return NULL_TREE;
11986 }
11987
11988 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11989 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11990 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11991 if (!fn)
11992 return NULL_TREE;
11993
11994 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11995 }
11996
11997 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11998 a normal call should be emitted rather than expanding the function
11999 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12000 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12001 passed as second argument. */
12002
12003 tree
12004 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12005 enum built_in_function fcode)
12006 {
12007 tree dest, size, len, fn, fmt, flag;
12008 const char *fmt_str;
12009
12010 /* Verify the required arguments in the original call. */
12011 if (call_expr_nargs (exp) < 5)
12012 return NULL_TREE;
12013 dest = CALL_EXPR_ARG (exp, 0);
12014 if (!validate_arg (dest, POINTER_TYPE))
12015 return NULL_TREE;
12016 len = CALL_EXPR_ARG (exp, 1);
12017 if (!validate_arg (len, INTEGER_TYPE))
12018 return NULL_TREE;
12019 flag = CALL_EXPR_ARG (exp, 2);
12020 if (!validate_arg (flag, INTEGER_TYPE))
12021 return NULL_TREE;
12022 size = CALL_EXPR_ARG (exp, 3);
12023 if (!validate_arg (size, INTEGER_TYPE))
12024 return NULL_TREE;
12025 fmt = CALL_EXPR_ARG (exp, 4);
12026 if (!validate_arg (fmt, POINTER_TYPE))
12027 return NULL_TREE;
12028
12029 if (! host_integerp (size, 1))
12030 return NULL_TREE;
12031
12032 if (! integer_all_onesp (size))
12033 {
12034 if (! host_integerp (len, 1))
12035 {
12036 /* If LEN is not constant, try MAXLEN too.
12037 For MAXLEN only allow optimizing into non-_ocs function
12038 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12039 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12040 return NULL_TREE;
12041 }
12042 else
12043 maxlen = len;
12044
12045 if (tree_int_cst_lt (size, maxlen))
12046 return NULL_TREE;
12047 }
12048
12049 if (!init_target_chars ())
12050 return NULL_TREE;
12051
12052 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12053 or if format doesn't contain % chars or is "%s". */
12054 if (! integer_zerop (flag))
12055 {
12056 fmt_str = c_getstr (fmt);
12057 if (fmt_str == NULL)
12058 return NULL_TREE;
12059 if (strchr (fmt_str, target_percent) != NULL
12060 && strcmp (fmt_str, target_percent_s))
12061 return NULL_TREE;
12062 }
12063
12064 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12065 available. */
12066 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12067 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12068 if (!fn)
12069 return NULL_TREE;
12070
12071 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12072 }
12073
12074 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12075 FMT and ARG are the arguments to the call; we don't fold cases with
12076 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12077
12078 Return NULL_TREE if no simplification was possible, otherwise return the
12079 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12080 code of the function to be simplified. */
12081
12082 static tree
12083 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12084 enum built_in_function fcode)
12085 {
12086 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12087 const char *fmt_str = NULL;
12088
12089 /* If the return value is used, don't do the transformation. */
12090 if (! ignore)
12091 return NULL_TREE;
12092
12093 /* Verify the required arguments in the original call. */
12094 if (!validate_arg (fmt, POINTER_TYPE))
12095 return NULL_TREE;
12096
12097 /* Check whether the format is a literal string constant. */
12098 fmt_str = c_getstr (fmt);
12099 if (fmt_str == NULL)
12100 return NULL_TREE;
12101
12102 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12103 {
12104 /* If we're using an unlocked function, assume the other
12105 unlocked functions exist explicitly. */
12106 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12107 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12108 }
12109 else
12110 {
12111 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12112 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12113 }
12114
12115 if (!init_target_chars ())
12116 return NULL_TREE;
12117
12118 if (strcmp (fmt_str, target_percent_s) == 0
12119 || strchr (fmt_str, target_percent) == NULL)
12120 {
12121 const char *str;
12122
12123 if (strcmp (fmt_str, target_percent_s) == 0)
12124 {
12125 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12126 return NULL_TREE;
12127
12128 if (!arg || !validate_arg (arg, POINTER_TYPE))
12129 return NULL_TREE;
12130
12131 str = c_getstr (arg);
12132 if (str == NULL)
12133 return NULL_TREE;
12134 }
12135 else
12136 {
12137 /* The format specifier doesn't contain any '%' characters. */
12138 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12139 && arg)
12140 return NULL_TREE;
12141 str = fmt_str;
12142 }
12143
12144 /* If the string was "", printf does nothing. */
12145 if (str[0] == '\0')
12146 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12147
12148 /* If the string has length of 1, call putchar. */
12149 if (str[1] == '\0')
12150 {
12151 /* Given printf("c"), (where c is any one character,)
12152 convert "c"[0] to an int and pass that to the replacement
12153 function. */
12154 newarg = build_int_cst (NULL_TREE, str[0]);
12155 if (fn_putchar)
12156 call = build_call_expr (fn_putchar, 1, newarg);
12157 }
12158 else
12159 {
12160 /* If the string was "string\n", call puts("string"). */
12161 size_t len = strlen (str);
12162 if ((unsigned char)str[len - 1] == target_newline)
12163 {
12164 /* Create a NUL-terminated string that's one char shorter
12165 than the original, stripping off the trailing '\n'. */
12166 char *newstr = alloca (len);
12167 memcpy (newstr, str, len - 1);
12168 newstr[len - 1] = 0;
12169
12170 newarg = build_string_literal (len, newstr);
12171 if (fn_puts)
12172 call = build_call_expr (fn_puts, 1, newarg);
12173 }
12174 else
12175 /* We'd like to arrange to call fputs(string,stdout) here,
12176 but we need stdout and don't have a way to get it yet. */
12177 return NULL_TREE;
12178 }
12179 }
12180
12181 /* The other optimizations can be done only on the non-va_list variants. */
12182 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12183 return NULL_TREE;
12184
12185 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12186 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12187 {
12188 if (!arg || !validate_arg (arg, POINTER_TYPE))
12189 return NULL_TREE;
12190 if (fn_puts)
12191 call = build_call_expr (fn_puts, 1, arg);
12192 }
12193
12194 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12195 else if (strcmp (fmt_str, target_percent_c) == 0)
12196 {
12197 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12198 return NULL_TREE;
12199 if (fn_putchar)
12200 call = build_call_expr (fn_putchar, 1, arg);
12201 }
12202
12203 if (!call)
12204 return NULL_TREE;
12205
12206 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12207 }
12208
12209 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12210 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12211 more than 3 arguments, and ARG may be null in the 2-argument case.
12212
12213 Return NULL_TREE if no simplification was possible, otherwise return the
12214 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12215 code of the function to be simplified. */
12216
12217 static tree
12218 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12219 enum built_in_function fcode)
12220 {
12221 tree fn_fputc, fn_fputs, call = NULL_TREE;
12222 const char *fmt_str = NULL;
12223
12224 /* If the return value is used, don't do the transformation. */
12225 if (! ignore)
12226 return NULL_TREE;
12227
12228 /* Verify the required arguments in the original call. */
12229 if (!validate_arg (fp, POINTER_TYPE))
12230 return NULL_TREE;
12231 if (!validate_arg (fmt, POINTER_TYPE))
12232 return NULL_TREE;
12233
12234 /* Check whether the format is a literal string constant. */
12235 fmt_str = c_getstr (fmt);
12236 if (fmt_str == NULL)
12237 return NULL_TREE;
12238
12239 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12240 {
12241 /* If we're using an unlocked function, assume the other
12242 unlocked functions exist explicitly. */
12243 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12244 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12245 }
12246 else
12247 {
12248 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12249 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12250 }
12251
12252 if (!init_target_chars ())
12253 return NULL_TREE;
12254
12255 /* If the format doesn't contain % args or %%, use strcpy. */
12256 if (strchr (fmt_str, target_percent) == NULL)
12257 {
12258 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12259 && arg)
12260 return NULL_TREE;
12261
12262 /* If the format specifier was "", fprintf does nothing. */
12263 if (fmt_str[0] == '\0')
12264 {
12265 /* If FP has side-effects, just wait until gimplification is
12266 done. */
12267 if (TREE_SIDE_EFFECTS (fp))
12268 return NULL_TREE;
12269
12270 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12271 }
12272
12273 /* When "string" doesn't contain %, replace all cases of
12274 fprintf (fp, string) with fputs (string, fp). The fputs
12275 builtin will take care of special cases like length == 1. */
12276 if (fn_fputs)
12277 call = build_call_expr (fn_fputs, 2, fmt, fp);
12278 }
12279
12280 /* The other optimizations can be done only on the non-va_list variants. */
12281 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12282 return NULL_TREE;
12283
12284 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12285 else if (strcmp (fmt_str, target_percent_s) == 0)
12286 {
12287 if (!arg || !validate_arg (arg, POINTER_TYPE))
12288 return NULL_TREE;
12289 if (fn_fputs)
12290 call = build_call_expr (fn_fputs, 2, arg, fp);
12291 }
12292
12293 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12294 else if (strcmp (fmt_str, target_percent_c) == 0)
12295 {
12296 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12297 return NULL_TREE;
12298 if (fn_fputc)
12299 call = build_call_expr (fn_fputc, 2, arg, fp);
12300 }
12301
12302 if (!call)
12303 return NULL_TREE;
12304 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12305 }
12306
12307 /* Initialize format string characters in the target charset. */
12308
12309 static bool
12310 init_target_chars (void)
12311 {
12312 static bool init;
12313 if (!init)
12314 {
12315 target_newline = lang_hooks.to_target_charset ('\n');
12316 target_percent = lang_hooks.to_target_charset ('%');
12317 target_c = lang_hooks.to_target_charset ('c');
12318 target_s = lang_hooks.to_target_charset ('s');
12319 if (target_newline == 0 || target_percent == 0 || target_c == 0
12320 || target_s == 0)
12321 return false;
12322
12323 target_percent_c[0] = target_percent;
12324 target_percent_c[1] = target_c;
12325 target_percent_c[2] = '\0';
12326
12327 target_percent_s[0] = target_percent;
12328 target_percent_s[1] = target_s;
12329 target_percent_s[2] = '\0';
12330
12331 target_percent_s_newline[0] = target_percent;
12332 target_percent_s_newline[1] = target_s;
12333 target_percent_s_newline[2] = target_newline;
12334 target_percent_s_newline[3] = '\0';
12335
12336 init = true;
12337 }
12338 return true;
12339 }
12340
12341 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12342 and no overflow/underflow occurred. INEXACT is true if M was not
12343 exactly calculated. TYPE is the tree type for the result. This
12344 function assumes that you cleared the MPFR flags and then
12345 calculated M to see if anything subsequently set a flag prior to
12346 entering this function. Return NULL_TREE if any checks fail. */
12347
12348 static tree
12349 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12350 {
12351 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12352 overflow/underflow occurred. If -frounding-math, proceed iff the
12353 result of calling FUNC was exact. */
12354 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12355 && (!flag_rounding_math || !inexact))
12356 {
12357 REAL_VALUE_TYPE rr;
12358
12359 real_from_mpfr (&rr, m, type, GMP_RNDN);
12360 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12361 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12362 but the mpft_t is not, then we underflowed in the
12363 conversion. */
12364 if (!real_isnan (&rr) && !real_isinf (&rr)
12365 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12366 {
12367 REAL_VALUE_TYPE rmode;
12368
12369 real_convert (&rmode, TYPE_MODE (type), &rr);
12370 /* Proceed iff the specified mode can hold the value. */
12371 if (real_identical (&rmode, &rr))
12372 return build_real (type, rmode);
12373 }
12374 }
12375 return NULL_TREE;
12376 }
12377
12378 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12379 FUNC on it and return the resulting value as a tree with type TYPE.
12380 If MIN and/or MAX are not NULL, then the supplied ARG must be
12381 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12382 acceptable values, otherwise they are not. The mpfr precision is
12383 set to the precision of TYPE. We assume that function FUNC returns
12384 zero if the result could be calculated exactly within the requested
12385 precision. */
12386
12387 static tree
12388 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12389 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12390 bool inclusive)
12391 {
12392 tree result = NULL_TREE;
12393
12394 STRIP_NOPS (arg);
12395
12396 /* To proceed, MPFR must exactly represent the target floating point
12397 format, which only happens when the target base equals two. */
12398 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12399 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12400 {
12401 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12402
12403 if (!real_isnan (ra) && !real_isinf (ra)
12404 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12405 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12406 {
12407 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12408 int inexact;
12409 mpfr_t m;
12410
12411 mpfr_init2 (m, prec);
12412 mpfr_from_real (m, ra, GMP_RNDN);
12413 mpfr_clear_flags ();
12414 inexact = func (m, m, GMP_RNDN);
12415 result = do_mpfr_ckconv (m, type, inexact);
12416 mpfr_clear (m);
12417 }
12418 }
12419
12420 return result;
12421 }
12422
12423 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12424 FUNC on it and return the resulting value as a tree with type TYPE.
12425 The mpfr precision is set to the precision of TYPE. We assume that
12426 function FUNC returns zero if the result could be calculated
12427 exactly within the requested precision. */
12428
12429 static tree
12430 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12431 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12432 {
12433 tree result = NULL_TREE;
12434
12435 STRIP_NOPS (arg1);
12436 STRIP_NOPS (arg2);
12437
12438 /* To proceed, MPFR must exactly represent the target floating point
12439 format, which only happens when the target base equals two. */
12440 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12441 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12442 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12443 {
12444 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12445 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12446
12447 if (!real_isnan (ra1) && !real_isinf (ra1)
12448 && !real_isnan (ra2) && !real_isinf (ra2))
12449 {
12450 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12451 int inexact;
12452 mpfr_t m1, m2;
12453
12454 mpfr_inits2 (prec, m1, m2, NULL);
12455 mpfr_from_real (m1, ra1, GMP_RNDN);
12456 mpfr_from_real (m2, ra2, GMP_RNDN);
12457 mpfr_clear_flags ();
12458 inexact = func (m1, m1, m2, GMP_RNDN);
12459 result = do_mpfr_ckconv (m1, type, inexact);
12460 mpfr_clears (m1, m2, NULL);
12461 }
12462 }
12463
12464 return result;
12465 }
12466
12467 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12468 FUNC on it and return the resulting value as a tree with type TYPE.
12469 The mpfr precision is set to the precision of TYPE. We assume that
12470 function FUNC returns zero if the result could be calculated
12471 exactly within the requested precision. */
12472
12473 static tree
12474 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12475 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12476 {
12477 tree result = NULL_TREE;
12478
12479 STRIP_NOPS (arg1);
12480 STRIP_NOPS (arg2);
12481 STRIP_NOPS (arg3);
12482
12483 /* To proceed, MPFR must exactly represent the target floating point
12484 format, which only happens when the target base equals two. */
12485 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12486 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12487 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12488 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12489 {
12490 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12491 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12492 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12493
12494 if (!real_isnan (ra1) && !real_isinf (ra1)
12495 && !real_isnan (ra2) && !real_isinf (ra2)
12496 && !real_isnan (ra3) && !real_isinf (ra3))
12497 {
12498 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12499 int inexact;
12500 mpfr_t m1, m2, m3;
12501
12502 mpfr_inits2 (prec, m1, m2, m3, NULL);
12503 mpfr_from_real (m1, ra1, GMP_RNDN);
12504 mpfr_from_real (m2, ra2, GMP_RNDN);
12505 mpfr_from_real (m3, ra3, GMP_RNDN);
12506 mpfr_clear_flags ();
12507 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12508 result = do_mpfr_ckconv (m1, type, inexact);
12509 mpfr_clears (m1, m2, m3, NULL);
12510 }
12511 }
12512
12513 return result;
12514 }
12515
12516 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12517 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12518 If ARG_SINP and ARG_COSP are NULL then the result is returned
12519 as a complex value.
12520 The type is taken from the type of ARG and is used for setting the
12521 precision of the calculation and results. */
12522
12523 static tree
12524 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12525 {
12526 tree const type = TREE_TYPE (arg);
12527 tree result = NULL_TREE;
12528
12529 STRIP_NOPS (arg);
12530
12531 /* To proceed, MPFR must exactly represent the target floating point
12532 format, which only happens when the target base equals two. */
12533 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12534 && TREE_CODE (arg) == REAL_CST
12535 && !TREE_OVERFLOW (arg))
12536 {
12537 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12538
12539 if (!real_isnan (ra) && !real_isinf (ra))
12540 {
12541 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12542 tree result_s, result_c;
12543 int inexact;
12544 mpfr_t m, ms, mc;
12545
12546 mpfr_inits2 (prec, m, ms, mc, NULL);
12547 mpfr_from_real (m, ra, GMP_RNDN);
12548 mpfr_clear_flags ();
12549 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12550 result_s = do_mpfr_ckconv (ms, type, inexact);
12551 result_c = do_mpfr_ckconv (mc, type, inexact);
12552 mpfr_clears (m, ms, mc, NULL);
12553 if (result_s && result_c)
12554 {
12555 /* If we are to return in a complex value do so. */
12556 if (!arg_sinp && !arg_cosp)
12557 return build_complex (build_complex_type (type),
12558 result_c, result_s);
12559
12560 /* Dereference the sin/cos pointer arguments. */
12561 arg_sinp = build_fold_indirect_ref (arg_sinp);
12562 arg_cosp = build_fold_indirect_ref (arg_cosp);
12563 /* Proceed if valid pointer type were passed in. */
12564 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12565 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12566 {
12567 /* Set the values. */
12568 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12569 result_s);
12570 TREE_SIDE_EFFECTS (result_s) = 1;
12571 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12572 result_c);
12573 TREE_SIDE_EFFECTS (result_c) = 1;
12574 /* Combine the assignments into a compound expr. */
12575 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12576 result_s, result_c));
12577 }
12578 }
12579 }
12580 }
12581 return result;
12582 }
12583
12584 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12585 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12586 two-argument mpfr order N Bessel function FUNC on them and return
12587 the resulting value as a tree with type TYPE. The mpfr precision
12588 is set to the precision of TYPE. We assume that function FUNC
12589 returns zero if the result could be calculated exactly within the
12590 requested precision. */
12591 static tree
12592 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12593 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12594 const REAL_VALUE_TYPE *min, bool inclusive)
12595 {
12596 tree result = NULL_TREE;
12597
12598 STRIP_NOPS (arg1);
12599 STRIP_NOPS (arg2);
12600
12601 /* To proceed, MPFR must exactly represent the target floating point
12602 format, which only happens when the target base equals two. */
12603 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12604 && host_integerp (arg1, 0)
12605 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12606 {
12607 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12608 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12609
12610 if (n == (long)n
12611 && !real_isnan (ra) && !real_isinf (ra)
12612 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12613 {
12614 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12615 int inexact;
12616 mpfr_t m;
12617
12618 mpfr_init2 (m, prec);
12619 mpfr_from_real (m, ra, GMP_RNDN);
12620 mpfr_clear_flags ();
12621 inexact = func (m, n, m, GMP_RNDN);
12622 result = do_mpfr_ckconv (m, type, inexact);
12623 mpfr_clear (m);
12624 }
12625 }
12626
12627 return result;
12628 }
12629
12630 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12631 the pointer *(ARG_QUO) and return the result. The type is taken
12632 from the type of ARG0 and is used for setting the precision of the
12633 calculation and results. */
12634
12635 static tree
12636 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12637 {
12638 tree const type = TREE_TYPE (arg0);
12639 tree result = NULL_TREE;
12640
12641 STRIP_NOPS (arg0);
12642 STRIP_NOPS (arg1);
12643
12644 /* To proceed, MPFR must exactly represent the target floating point
12645 format, which only happens when the target base equals two. */
12646 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12647 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12648 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12649 {
12650 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12651 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12652
12653 if (!real_isnan (ra0) && !real_isinf (ra0)
12654 && !real_isnan (ra1) && !real_isinf (ra1))
12655 {
12656 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12657 tree result_rem;
12658 long integer_quo;
12659 mpfr_t m0, m1;
12660
12661 mpfr_inits2 (prec, m0, m1, NULL);
12662 mpfr_from_real (m0, ra0, GMP_RNDN);
12663 mpfr_from_real (m1, ra1, GMP_RNDN);
12664 mpfr_clear_flags ();
12665 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12666 /* Remquo is independent of the rounding mode, so pass
12667 inexact=0 to do_mpfr_ckconv(). */
12668 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12669 mpfr_clears (m0, m1, NULL);
12670 if (result_rem)
12671 {
12672 /* MPFR calculates quo in the host's long so it may
12673 return more bits in quo than the target int can hold
12674 if sizeof(host long) > sizeof(target int). This can
12675 happen even for native compilers in LP64 mode. In
12676 these cases, modulo the quo value with the largest
12677 number that the target int can hold while leaving one
12678 bit for the sign. */
12679 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12680 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12681
12682 /* Dereference the quo pointer argument. */
12683 arg_quo = build_fold_indirect_ref (arg_quo);
12684 /* Proceed iff a valid pointer type was passed in. */
12685 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12686 {
12687 /* Set the value. */
12688 tree result_quo = fold_build2 (MODIFY_EXPR,
12689 TREE_TYPE (arg_quo), arg_quo,
12690 build_int_cst (NULL, integer_quo));
12691 TREE_SIDE_EFFECTS (result_quo) = 1;
12692 /* Combine the quo assignment with the rem. */
12693 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12694 result_quo, result_rem));
12695 }
12696 }
12697 }
12698 }
12699 return result;
12700 }
12701
12702 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12703 resulting value as a tree with type TYPE. The mpfr precision is
12704 set to the precision of TYPE. We assume that this mpfr function
12705 returns zero if the result could be calculated exactly within the
12706 requested precision. In addition, the integer pointer represented
12707 by ARG_SG will be dereferenced and set to the appropriate signgam
12708 (-1,1) value. */
12709
12710 static tree
12711 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12712 {
12713 tree result = NULL_TREE;
12714
12715 STRIP_NOPS (arg);
12716
12717 /* To proceed, MPFR must exactly represent the target floating point
12718 format, which only happens when the target base equals two. Also
12719 verify ARG is a constant and that ARG_SG is an int pointer. */
12720 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12721 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12722 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12723 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12724 {
12725 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12726
12727 /* In addition to NaN and Inf, the argument cannot be zero or a
12728 negative integer. */
12729 if (!real_isnan (ra) && !real_isinf (ra)
12730 && ra->cl != rvc_zero
12731 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12732 {
12733 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12734 int inexact, sg;
12735 mpfr_t m;
12736 tree result_lg;
12737
12738 mpfr_init2 (m, prec);
12739 mpfr_from_real (m, ra, GMP_RNDN);
12740 mpfr_clear_flags ();
12741 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12742 result_lg = do_mpfr_ckconv (m, type, inexact);
12743 mpfr_clear (m);
12744 if (result_lg)
12745 {
12746 tree result_sg;
12747
12748 /* Dereference the arg_sg pointer argument. */
12749 arg_sg = build_fold_indirect_ref (arg_sg);
12750 /* Assign the signgam value into *arg_sg. */
12751 result_sg = fold_build2 (MODIFY_EXPR,
12752 TREE_TYPE (arg_sg), arg_sg,
12753 build_int_cst (NULL, sg));
12754 TREE_SIDE_EFFECTS (result_sg) = 1;
12755 /* Combine the signgam assignment with the lgamma result. */
12756 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12757 result_sg, result_lg));
12758 }
12759 }
12760 }
12761
12762 return result;
12763 }
12764 #endif