]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
cuintp.c: Replace host_integerp (..., 0) with tree_fits_shwi_p throughout.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52 #include "cilk.h"
53
54
55 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
56
57 struct target_builtins default_target_builtins;
58 #if SWITCHABLE_TARGET
59 struct target_builtins *this_target_builtins = &default_target_builtins;
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[BUILT_IN_LAST]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 builtin_info_type builtin_info;
76
77 /* Non-zero if __builtin_constant_p should be folded right away. */
78 bool force_folding_builtin_constant_p;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_enable_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255 /* By default we assume that c99 functions are present at the runtime,
256 but sincos is not. */
257 bool
258 default_libc_has_function (enum function_class fn_class)
259 {
260 if (fn_class == function_c94
261 || fn_class == function_c99_misc
262 || fn_class == function_c99_math_complex)
263 return true;
264
265 return false;
266 }
267
268 bool
269 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
270 {
271 return true;
272 }
273
274 bool
275 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
276 {
277 return false;
278 }
279
280 /* Return true if NODE should be considered for inline expansion regardless
281 of the optimization level. This means whenever a function is invoked with
282 its "internal" name, which normally contains the prefix "__builtin". */
283
284 static bool
285 called_as_built_in (tree node)
286 {
287 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
288 we want the name used to call the function, not the name it
289 will have. */
290 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
291 return is_builtin_name (name);
292 }
293
294 /* Compute values M and N such that M divides (address of EXP - N) and such
295 that N < M. If these numbers can be determined, store M in alignp and N in
296 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
297 *alignp and any bit-offset to *bitposp.
298
299 Note that the address (and thus the alignment) computed here is based
300 on the address to which a symbol resolves, whereas DECL_ALIGN is based
301 on the address at which an object is actually located. These two
302 addresses are not always the same. For example, on ARM targets,
303 the address &foo of a Thumb function foo() has the lowest bit set,
304 whereas foo() itself starts on an even address.
305
306 If ADDR_P is true we are taking the address of the memory reference EXP
307 and thus cannot rely on the access taking place. */
308
309 static bool
310 get_object_alignment_2 (tree exp, unsigned int *alignp,
311 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
312 {
313 HOST_WIDE_INT bitsize, bitpos;
314 tree offset;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
317 unsigned int align = BITS_PER_UNIT;
318 bool known_alignment = false;
319
320 /* Get the innermost object and the constant (bitpos) and possibly
321 variable (offset) offset of the access. */
322 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
323 &mode, &unsignedp, &volatilep, true);
324
325 /* Extract alignment information from the innermost object and
326 possibly adjust bitpos and offset. */
327 if (TREE_CODE (exp) == FUNCTION_DECL)
328 {
329 /* Function addresses can encode extra information besides their
330 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
331 allows the low bit to be used as a virtual bit, we know
332 that the address itself must be at least 2-byte aligned. */
333 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
334 align = 2 * BITS_PER_UNIT;
335 }
336 else if (TREE_CODE (exp) == LABEL_DECL)
337 ;
338 else if (TREE_CODE (exp) == CONST_DECL)
339 {
340 /* The alignment of a CONST_DECL is determined by its initializer. */
341 exp = DECL_INITIAL (exp);
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 #ifdef CONSTANT_ALIGNMENT
344 if (CONSTANT_CLASS_P (exp))
345 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
346 #endif
347 known_alignment = true;
348 }
349 else if (DECL_P (exp))
350 {
351 align = DECL_ALIGN (exp);
352 known_alignment = true;
353 }
354 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
355 {
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 }
358 else if (TREE_CODE (exp) == INDIRECT_REF
359 || TREE_CODE (exp) == MEM_REF
360 || TREE_CODE (exp) == TARGET_MEM_REF)
361 {
362 tree addr = TREE_OPERAND (exp, 0);
363 unsigned ptr_align;
364 unsigned HOST_WIDE_INT ptr_bitpos;
365
366 if (TREE_CODE (addr) == BIT_AND_EXPR
367 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
368 {
369 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
370 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
371 align *= BITS_PER_UNIT;
372 addr = TREE_OPERAND (addr, 0);
373 }
374
375 known_alignment
376 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
377 align = MAX (ptr_align, align);
378
379 /* The alignment of the pointer operand in a TARGET_MEM_REF
380 has to take the variable offset parts into account. */
381 if (TREE_CODE (exp) == TARGET_MEM_REF)
382 {
383 if (TMR_INDEX (exp))
384 {
385 unsigned HOST_WIDE_INT step = 1;
386 if (TMR_STEP (exp))
387 step = TREE_INT_CST_LOW (TMR_STEP (exp));
388 align = MIN (align, (step & -step) * BITS_PER_UNIT);
389 }
390 if (TMR_INDEX2 (exp))
391 align = BITS_PER_UNIT;
392 known_alignment = false;
393 }
394
395 /* When EXP is an actual memory reference then we can use
396 TYPE_ALIGN of a pointer indirection to derive alignment.
397 Do so only if get_pointer_alignment_1 did not reveal absolute
398 alignment knowledge and if using that alignment would
399 improve the situation. */
400 if (!addr_p && !known_alignment
401 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
402 align = TYPE_ALIGN (TREE_TYPE (exp));
403 else
404 {
405 /* Else adjust bitpos accordingly. */
406 bitpos += ptr_bitpos;
407 if (TREE_CODE (exp) == MEM_REF
408 || TREE_CODE (exp) == TARGET_MEM_REF)
409 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
410 }
411 }
412 else if (TREE_CODE (exp) == STRING_CST)
413 {
414 /* STRING_CST are the only constant objects we allow to be not
415 wrapped inside a CONST_DECL. */
416 align = TYPE_ALIGN (TREE_TYPE (exp));
417 #ifdef CONSTANT_ALIGNMENT
418 if (CONSTANT_CLASS_P (exp))
419 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
420 #endif
421 known_alignment = true;
422 }
423
424 /* If there is a non-constant offset part extract the maximum
425 alignment that can prevail. */
426 if (offset)
427 {
428 int trailing_zeros = tree_ctz (offset);
429 if (trailing_zeros < HOST_BITS_PER_INT)
430 {
431 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
432 if (inner)
433 align = MIN (align, inner);
434 }
435 }
436
437 *alignp = align;
438 *bitposp = bitpos & (*alignp - 1);
439 return known_alignment;
440 }
441
442 /* For a memory reference expression EXP compute values M and N such that M
443 divides (&EXP - N) and such that N < M. If these numbers can be determined,
444 store M in alignp and N in *BITPOSP and return true. Otherwise return false
445 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
446
447 bool
448 get_object_alignment_1 (tree exp, unsigned int *alignp,
449 unsigned HOST_WIDE_INT *bitposp)
450 {
451 return get_object_alignment_2 (exp, alignp, bitposp, false);
452 }
453
454 /* Return the alignment in bits of EXP, an object. */
455
456 unsigned int
457 get_object_alignment (tree exp)
458 {
459 unsigned HOST_WIDE_INT bitpos = 0;
460 unsigned int align;
461
462 get_object_alignment_1 (exp, &align, &bitpos);
463
464 /* align and bitpos now specify known low bits of the pointer.
465 ptr & (align - 1) == bitpos. */
466
467 if (bitpos != 0)
468 align = (bitpos & -bitpos);
469 return align;
470 }
471
472 /* For a pointer valued expression EXP compute values M and N such that M
473 divides (EXP - N) and such that N < M. If these numbers can be determined,
474 store M in alignp and N in *BITPOSP and return true. Return false if
475 the results are just a conservative approximation.
476
477 If EXP is not a pointer, false is returned too. */
478
479 bool
480 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
481 unsigned HOST_WIDE_INT *bitposp)
482 {
483 STRIP_NOPS (exp);
484
485 if (TREE_CODE (exp) == ADDR_EXPR)
486 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
487 alignp, bitposp, true);
488 else if (TREE_CODE (exp) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (exp)))
490 {
491 unsigned int ptr_align, ptr_misalign;
492 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
493
494 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
495 {
496 *bitposp = ptr_misalign * BITS_PER_UNIT;
497 *alignp = ptr_align * BITS_PER_UNIT;
498 /* We cannot really tell whether this result is an approximation. */
499 return true;
500 }
501 else
502 {
503 *bitposp = 0;
504 *alignp = BITS_PER_UNIT;
505 return false;
506 }
507 }
508 else if (TREE_CODE (exp) == INTEGER_CST)
509 {
510 *alignp = BIGGEST_ALIGNMENT;
511 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
512 & (BIGGEST_ALIGNMENT - 1));
513 return true;
514 }
515
516 *bitposp = 0;
517 *alignp = BITS_PER_UNIT;
518 return false;
519 }
520
521 /* Return the alignment in bits of EXP, a pointer valued expression.
522 The alignment returned is, by default, the alignment of the thing that
523 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
524
525 Otherwise, look at the expression to see if we can do better, i.e., if the
526 expression is actually pointing at an object whose alignment is tighter. */
527
528 unsigned int
529 get_pointer_alignment (tree exp)
530 {
531 unsigned HOST_WIDE_INT bitpos = 0;
532 unsigned int align;
533
534 get_pointer_alignment_1 (exp, &align, &bitpos);
535
536 /* align and bitpos now specify known low bits of the pointer.
537 ptr & (align - 1) == bitpos. */
538
539 if (bitpos != 0)
540 align = (bitpos & -bitpos);
541
542 return align;
543 }
544
545 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
546 way, because it could contain a zero byte in the middle.
547 TREE_STRING_LENGTH is the size of the character array, not the string.
548
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
555
556 The value returned is of type `ssizetype'.
557
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
560
561 tree
562 c_strlen (tree src, int only_value)
563 {
564 tree offset_node;
565 HOST_WIDE_INT offset;
566 int max;
567 const char *ptr;
568 location_t loc;
569
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
586 loc = EXPR_LOC_OR_HERE (src);
587
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return NULL_TREE;
591
592 max = TREE_STRING_LENGTH (src) - 1;
593 ptr = TREE_STRING_POINTER (src);
594
595 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
596 {
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
600 int i;
601
602 for (i = 0; i < max; i++)
603 if (ptr[i] == 0)
604 return NULL_TREE;
605
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
612
613 return size_diffop_loc (loc, size_int (max), offset_node);
614 }
615
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node == 0)
619 offset = 0;
620 else if (! tree_fits_shwi_p (offset_node))
621 offset = -1;
622 else
623 offset = tree_low_cst (offset_node, 0);
624
625 /* If the offset is known to be out of bounds, warn, and call strlen at
626 runtime. */
627 if (offset < 0 || offset > max)
628 {
629 /* Suppress multiple warnings for propagated constant strings. */
630 if (! TREE_NO_WARNING (src))
631 {
632 warning_at (loc, 0, "offset outside bounds of constant string");
633 TREE_NO_WARNING (src) = 1;
634 }
635 return NULL_TREE;
636 }
637
638 /* Use strlen to search for the first zero byte. Since any strings
639 constructed with build_string will have nulls appended, we win even
640 if we get handed something like (char[4])"abcd".
641
642 Since OFFSET is our starting index into the string, no further
643 calculation is needed. */
644 return ssize_int (strlen (ptr + offset));
645 }
646
647 /* Return a char pointer for a C string if it is a string constant
648 or sum of string constant and integer constant. */
649
650 static const char *
651 c_getstr (tree src)
652 {
653 tree offset_node;
654
655 src = string_constant (src, &offset_node);
656 if (src == 0)
657 return 0;
658
659 if (offset_node == 0)
660 return TREE_STRING_POINTER (src);
661 else if (!host_integerp (offset_node, 1)
662 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
663 return 0;
664
665 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
666 }
667
668 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
670
671 static rtx
672 c_readstr (const char *str, enum machine_mode mode)
673 {
674 HOST_WIDE_INT c[2];
675 HOST_WIDE_INT ch;
676 unsigned int i, j;
677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679
680 c[0] = 0;
681 c[1] = 0;
682 ch = 1;
683 for (i = 0; i < GET_MODE_SIZE (mode); i++)
684 {
685 j = i;
686 if (WORDS_BIG_ENDIAN)
687 j = GET_MODE_SIZE (mode) - i - 1;
688 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
689 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
690 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
691 j *= BITS_PER_UNIT;
692 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
693
694 if (ch)
695 ch = (unsigned char) str[i];
696 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 }
698 return immed_double_const (c[0], c[1], mode);
699 }
700
701 /* Cast a target constant CST to target CHAR and if that value fits into
702 host char type, return zero and put that value into variable pointed to by
703 P. */
704
705 static int
706 target_char_cast (tree cst, char *p)
707 {
708 unsigned HOST_WIDE_INT val, hostval;
709
710 if (TREE_CODE (cst) != INTEGER_CST
711 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712 return 1;
713
714 val = TREE_INT_CST_LOW (cst);
715 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
716 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
717
718 hostval = val;
719 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
720 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
721
722 if (val != hostval)
723 return 1;
724
725 *p = hostval;
726 return 0;
727 }
728
729 /* Similar to save_expr, but assumes that arbitrary code is not executed
730 in between the multiple evaluations. In particular, we assume that a
731 non-addressable local variable will not be modified. */
732
733 static tree
734 builtin_save_expr (tree exp)
735 {
736 if (TREE_CODE (exp) == SSA_NAME
737 || (TREE_ADDRESSABLE (exp) == 0
738 && (TREE_CODE (exp) == PARM_DECL
739 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
740 return exp;
741
742 return save_expr (exp);
743 }
744
745 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
746 times to get the address of either a higher stack frame, or a return
747 address located within it (depending on FNDECL_CODE). */
748
749 static rtx
750 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
751 {
752 int i;
753
754 #ifdef INITIAL_FRAME_ADDRESS_RTX
755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756 #else
757 rtx tem;
758
759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
763
764 For a nonzero count, or a zero count with __builtin_frame_address,
765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
767 we must disable frame pointer elimination. */
768 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
769 tem = frame_pointer_rtx;
770 else
771 {
772 tem = hard_frame_pointer_rtx;
773
774 /* Tell reload not to eliminate the frame pointer. */
775 crtl->accesses_prior_frames = 1;
776 }
777 #endif
778
779 /* Some machines need special handling before we can access
780 arbitrary frames. For example, on the SPARC, we must first flush
781 all register windows to the stack. */
782 #ifdef SETUP_FRAME_ADDRESSES
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785 #endif
786
787 /* On the SPARC, the return address is not in the frame, it is in a
788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
791 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
792 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 count--;
794 #endif
795
796 /* Scan back COUNT frames to the specified frame. */
797 for (i = 0; i < count; i++)
798 {
799 /* Assume the dynamic chain pointer is in the word that the
800 frame address points to, unless otherwise specified. */
801 #ifdef DYNAMIC_CHAIN_ADDRESS
802 tem = DYNAMIC_CHAIN_ADDRESS (tem);
803 #endif
804 tem = memory_address (Pmode, tem);
805 tem = gen_frame_mem (Pmode, tem);
806 tem = copy_to_reg (tem);
807 }
808
809 /* For __builtin_frame_address, return what we've got. But, on
810 the SPARC for example, we may have to add a bias. */
811 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
812 #ifdef FRAME_ADDR_RTX
813 return FRAME_ADDR_RTX (tem);
814 #else
815 return tem;
816 #endif
817
818 /* For __builtin_return_address, get the return address from that frame. */
819 #ifdef RETURN_ADDR_RTX
820 tem = RETURN_ADDR_RTX (count, tem);
821 #else
822 tem = memory_address (Pmode,
823 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
824 tem = gen_frame_mem (Pmode, tem);
825 #endif
826 return tem;
827 }
828
829 /* Alias set used for setjmp buffer. */
830 static alias_set_type setjmp_alias_set = -1;
831
832 /* Construct the leading half of a __builtin_setjmp call. Control will
833 return to RECEIVER_LABEL. This is also called directly by the SJLJ
834 exception handling code. */
835
836 void
837 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
838 {
839 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
840 rtx stack_save;
841 rtx mem;
842
843 if (setjmp_alias_set == -1)
844 setjmp_alias_set = new_alias_set ();
845
846 buf_addr = convert_memory_address (Pmode, buf_addr);
847
848 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
849
850 /* We store the frame pointer and the address of receiver_label in
851 the buffer and use the rest of it for the stack save area, which
852 is machine-dependent. */
853
854 mem = gen_rtx_MEM (Pmode, buf_addr);
855 set_mem_alias_set (mem, setjmp_alias_set);
856 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
857
858 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
859 GET_MODE_SIZE (Pmode))),
860 set_mem_alias_set (mem, setjmp_alias_set);
861
862 emit_move_insn (validize_mem (mem),
863 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
864
865 stack_save = gen_rtx_MEM (sa_mode,
866 plus_constant (Pmode, buf_addr,
867 2 * GET_MODE_SIZE (Pmode)));
868 set_mem_alias_set (stack_save, setjmp_alias_set);
869 emit_stack_save (SAVE_NONLOCAL, &stack_save);
870
871 /* If there is further processing to do, do it. */
872 #ifdef HAVE_builtin_setjmp_setup
873 if (HAVE_builtin_setjmp_setup)
874 emit_insn (gen_builtin_setjmp_setup (buf_addr));
875 #endif
876
877 /* We have a nonlocal label. */
878 cfun->has_nonlocal_label = 1;
879 }
880
881 /* Construct the trailing part of a __builtin_setjmp call. This is
882 also called directly by the SJLJ exception handling code.
883 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
884
885 void
886 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
887 {
888 rtx chain;
889
890 /* Mark the FP as used when we get here, so we have to make sure it's
891 marked as used by this function. */
892 emit_use (hard_frame_pointer_rtx);
893
894 /* Mark the static chain as clobbered here so life information
895 doesn't get messed up for it. */
896 chain = targetm.calls.static_chain (current_function_decl, true);
897 if (chain && REG_P (chain))
898 emit_clobber (chain);
899
900 /* Now put in the code to restore the frame pointer, and argument
901 pointer, if needed. */
902 #ifdef HAVE_nonlocal_goto
903 if (! HAVE_nonlocal_goto)
904 #endif
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
909
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
917
918 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
919 if (fixed_regs[ARG_POINTER_REGNUM])
920 {
921 #ifdef ELIMINABLE_REGS
922 /* If the argument pointer can be eliminated in favor of the
923 frame pointer, we don't need to restore it. We assume here
924 that if such an elimination is present, it can always be used.
925 This is the case on all known machines; if we don't make this
926 assumption, we do unnecessary saving on many machines. */
927 size_t i;
928 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929
930 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
931 if (elim_regs[i].from == ARG_POINTER_REGNUM
932 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
933 break;
934
935 if (i == ARRAY_SIZE (elim_regs))
936 #endif
937 {
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl->args.internal_arg_pointer,
941 copy_to_reg (get_arg_pointer_save_area ()));
942 }
943 }
944 #endif
945
946 #ifdef HAVE_builtin_setjmp_receiver
947 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
948 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
949 else
950 #endif
951 #ifdef HAVE_nonlocal_goto_receiver
952 if (HAVE_nonlocal_goto_receiver)
953 emit_insn (gen_nonlocal_goto_receiver ());
954 else
955 #endif
956 { /* Nothing */ }
957
958 /* We must not allow the code we just generated to be reordered by
959 scheduling. Specifically, the update of the frame pointer must
960 happen immediately, not later. Similarly, we must block
961 (frame-related) register values to be used across this code. */
962 emit_insn (gen_blockage ());
963 }
964
965 /* __builtin_longjmp is passed a pointer to an array of five words (not
966 all will be used on all machines). It operates similarly to the C
967 library function of the same name, but is more efficient. Much of
968 the code below is copied from the handling of non-local gotos. */
969
970 static void
971 expand_builtin_longjmp (rtx buf_addr, rtx value)
972 {
973 rtx fp, lab, stack, insn, last;
974 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975
976 /* DRAP is needed for stack realign if longjmp is expanded to current
977 function */
978 if (SUPPORTS_STACK_ALIGNMENT)
979 crtl->need_drap = true;
980
981 if (setjmp_alias_set == -1)
982 setjmp_alias_set = new_alias_set ();
983
984 buf_addr = convert_memory_address (Pmode, buf_addr);
985
986 buf_addr = force_reg (Pmode, buf_addr);
987
988 /* We require that the user must pass a second argument of 1, because
989 that is what builtin_setjmp will return. */
990 gcc_assert (value == const1_rtx);
991
992 last = get_last_insn ();
993 #ifdef HAVE_builtin_longjmp
994 if (HAVE_builtin_longjmp)
995 emit_insn (gen_builtin_longjmp (buf_addr));
996 else
997 #endif
998 {
999 fp = gen_rtx_MEM (Pmode, buf_addr);
1000 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1001 GET_MODE_SIZE (Pmode)));
1002
1003 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1004 2 * GET_MODE_SIZE (Pmode)));
1005 set_mem_alias_set (fp, setjmp_alias_set);
1006 set_mem_alias_set (lab, setjmp_alias_set);
1007 set_mem_alias_set (stack, setjmp_alias_set);
1008
1009 /* Pick up FP, label, and SP from the block and jump. This code is
1010 from expand_goto in stmt.c; see there for detailed comments. */
1011 #ifdef HAVE_nonlocal_goto
1012 if (HAVE_nonlocal_goto)
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1018 #endif
1019 {
1020 lab = copy_to_reg (lab);
1021
1022 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1023 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024
1025 emit_move_insn (hard_frame_pointer_rtx, fp);
1026 emit_stack_restore (SAVE_NONLOCAL, stack);
1027
1028 emit_use (hard_frame_pointer_rtx);
1029 emit_use (stack_pointer_rtx);
1030 emit_indirect_jump (lab);
1031 }
1032 }
1033
1034 /* Search backwards and mark the jump insn as a non-local goto.
1035 Note that this precludes the use of __builtin_longjmp to a
1036 __builtin_setjmp target in the same function. However, we've
1037 already cautioned the user that these functions are for
1038 internal exception handling use only. */
1039 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 {
1041 gcc_assert (insn != last);
1042
1043 if (JUMP_P (insn))
1044 {
1045 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1046 break;
1047 }
1048 else if (CALL_P (insn))
1049 break;
1050 }
1051 }
1052
1053 static inline bool
1054 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1055 {
1056 return (iter->i < iter->n);
1057 }
1058
1059 /* This function validates the types of a function call argument list
1060 against a specified list of tree_codes. If the last specifier is a 0,
1061 that represents an ellipses, otherwise the last specifier must be a
1062 VOID_TYPE. */
1063
1064 static bool
1065 validate_arglist (const_tree callexpr, ...)
1066 {
1067 enum tree_code code;
1068 bool res = 0;
1069 va_list ap;
1070 const_call_expr_arg_iterator iter;
1071 const_tree arg;
1072
1073 va_start (ap, callexpr);
1074 init_const_call_expr_arg_iterator (callexpr, &iter);
1075
1076 do
1077 {
1078 code = (enum tree_code) va_arg (ap, int);
1079 switch (code)
1080 {
1081 case 0:
1082 /* This signifies an ellipses, any further arguments are all ok. */
1083 res = true;
1084 goto end;
1085 case VOID_TYPE:
1086 /* This signifies an endlink, if no arguments remain, return
1087 true, otherwise return false. */
1088 res = !more_const_call_expr_args_p (&iter);
1089 goto end;
1090 default:
1091 /* If no parameters remain or the parameter's code does not
1092 match the specified code, return false. Otherwise continue
1093 checking any remaining arguments. */
1094 arg = next_const_call_expr_arg (&iter);
1095 if (!validate_arg (arg, code))
1096 goto end;
1097 break;
1098 }
1099 }
1100 while (1);
1101
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1106
1107 return res;
1108 }
1109
1110 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1111 and the address of the save area. */
1112
1113 static rtx
1114 expand_builtin_nonlocal_goto (tree exp)
1115 {
1116 tree t_label, t_save_area;
1117 rtx r_label, r_save_area, r_fp, r_sp, insn;
1118
1119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1120 return NULL_RTX;
1121
1122 t_label = CALL_EXPR_ARG (exp, 0);
1123 t_save_area = CALL_EXPR_ARG (exp, 1);
1124
1125 r_label = expand_normal (t_label);
1126 r_label = convert_memory_address (Pmode, r_label);
1127 r_save_area = expand_normal (t_save_area);
1128 r_save_area = convert_memory_address (Pmode, r_save_area);
1129 /* Copy the address of the save location to a register just in case it was
1130 based on the frame pointer. */
1131 r_save_area = copy_to_reg (r_save_area);
1132 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1133 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1134 plus_constant (Pmode, r_save_area,
1135 GET_MODE_SIZE (Pmode)));
1136
1137 crtl->has_nonlocal_goto = 1;
1138
1139 #ifdef HAVE_nonlocal_goto
1140 /* ??? We no longer need to pass the static chain value, afaik. */
1141 if (HAVE_nonlocal_goto)
1142 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1143 else
1144 #endif
1145 {
1146 r_label = copy_to_reg (r_label);
1147
1148 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1149 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1150
1151 /* Restore frame pointer for containing function. */
1152 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1153 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1154
1155 /* USE of hard_frame_pointer_rtx added for consistency;
1156 not clear if really needed. */
1157 emit_use (hard_frame_pointer_rtx);
1158 emit_use (stack_pointer_rtx);
1159
1160 /* If the architecture is using a GP register, we must
1161 conservatively assume that the target function makes use of it.
1162 The prologue of functions with nonlocal gotos must therefore
1163 initialize the GP register to the appropriate value, and we
1164 must then make sure that this value is live at the point
1165 of the jump. (Note that this doesn't necessarily apply
1166 to targets with a nonlocal_goto pattern; they are free
1167 to implement it in their own way. Note also that this is
1168 a no-op if the GP register is a global invariant.) */
1169 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1170 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1171 emit_use (pic_offset_table_rtx);
1172
1173 emit_indirect_jump (r_label);
1174 }
1175
1176 /* Search backwards to the jump insn and mark it as a
1177 non-local goto. */
1178 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1179 {
1180 if (JUMP_P (insn))
1181 {
1182 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1183 break;
1184 }
1185 else if (CALL_P (insn))
1186 break;
1187 }
1188
1189 return const0_rtx;
1190 }
1191
1192 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1193 (not all will be used on all machines) that was passed to __builtin_setjmp.
1194 It updates the stack pointer in that block to correspond to the current
1195 stack pointer. */
1196
1197 static void
1198 expand_builtin_update_setjmp_buf (rtx buf_addr)
1199 {
1200 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1201 rtx stack_save
1202 = gen_rtx_MEM (sa_mode,
1203 memory_address
1204 (sa_mode,
1205 plus_constant (Pmode, buf_addr,
1206 2 * GET_MODE_SIZE (Pmode))));
1207
1208 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1209 }
1210
1211 /* Expand a call to __builtin_prefetch. For a target that does not support
1212 data prefetch, evaluate the memory address argument in case it has side
1213 effects. */
1214
1215 static void
1216 expand_builtin_prefetch (tree exp)
1217 {
1218 tree arg0, arg1, arg2;
1219 int nargs;
1220 rtx op0, op1, op2;
1221
1222 if (!validate_arglist (exp, POINTER_TYPE, 0))
1223 return;
1224
1225 arg0 = CALL_EXPR_ARG (exp, 0);
1226
1227 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1228 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1229 locality). */
1230 nargs = call_expr_nargs (exp);
1231 if (nargs > 1)
1232 arg1 = CALL_EXPR_ARG (exp, 1);
1233 else
1234 arg1 = integer_zero_node;
1235 if (nargs > 2)
1236 arg2 = CALL_EXPR_ARG (exp, 2);
1237 else
1238 arg2 = integer_three_node;
1239
1240 /* Argument 0 is an address. */
1241 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1242
1243 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1244 if (TREE_CODE (arg1) != INTEGER_CST)
1245 {
1246 error ("second argument to %<__builtin_prefetch%> must be a constant");
1247 arg1 = integer_zero_node;
1248 }
1249 op1 = expand_normal (arg1);
1250 /* Argument 1 must be either zero or one. */
1251 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1252 {
1253 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1254 " using zero");
1255 op1 = const0_rtx;
1256 }
1257
1258 /* Argument 2 (locality) must be a compile-time constant int. */
1259 if (TREE_CODE (arg2) != INTEGER_CST)
1260 {
1261 error ("third argument to %<__builtin_prefetch%> must be a constant");
1262 arg2 = integer_zero_node;
1263 }
1264 op2 = expand_normal (arg2);
1265 /* Argument 2 must be 0, 1, 2, or 3. */
1266 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1267 {
1268 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1269 op2 = const0_rtx;
1270 }
1271
1272 #ifdef HAVE_prefetch
1273 if (HAVE_prefetch)
1274 {
1275 struct expand_operand ops[3];
1276
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1280 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1281 return;
1282 }
1283 #endif
1284
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1289 }
1290
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1295
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1298 {
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1301
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1306
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1309
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1315
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1323
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1332 {
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1341 }
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1344 }
1345 \f
1346 /* Built-in functions to perform an untyped call and return. */
1347
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1352
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1355
1356 static int
1357 apply_args_size (void)
1358 {
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 enum machine_mode mode;
1363
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1366 {
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1369
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1374
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1377 {
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1379
1380 gcc_assert (mode != VOIDmode);
1381
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1387 }
1388 else
1389 {
1390 apply_args_mode[regno] = VOIDmode;
1391 }
1392 }
1393 return size;
1394 }
1395
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1398
1399 static int
1400 apply_result_size (void)
1401 {
1402 static int size = -1;
1403 int align, regno;
1404 enum machine_mode mode;
1405
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1408 {
1409 size = 0;
1410
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1413 {
1414 mode = targetm.calls.get_raw_result_mode (regno);
1415
1416 gcc_assert (mode != VOIDmode);
1417
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1423 }
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1426
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1432 }
1433 return size;
1434 }
1435
1436 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1437 /* Create a vector describing the result block RESULT. If SAVEP is true,
1438 the result block is used to save the values; otherwise it is used to
1439 restore the values. */
1440
1441 static rtx
1442 result_vector (int savep, rtx result)
1443 {
1444 int regno, size, align, nelts;
1445 enum machine_mode mode;
1446 rtx reg, mem;
1447 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448
1449 size = nelts = 0;
1450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1451 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 {
1453 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1454 if (size % align != 0)
1455 size = CEIL (size, align) * align;
1456 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1457 mem = adjust_address (result, mode, size);
1458 savevec[nelts++] = (savep
1459 ? gen_rtx_SET (VOIDmode, mem, reg)
1460 : gen_rtx_SET (VOIDmode, reg, mem));
1461 size += GET_MODE_SIZE (mode);
1462 }
1463 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1464 }
1465 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1466
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1472 {
1473 rtx registers, tem;
1474 int size, align, regno;
1475 enum machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 #ifdef STACK_GROWS_DOWNWARD
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1507 tem
1508 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1509 NULL_RTX);
1510 #endif
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1512
1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1518 {
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526 }
1527
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1534
1535 static rtx
1536 expand_builtin_apply_args (void)
1537 {
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547 rtx seq;
1548
1549 start_sequence ();
1550 temp = expand_builtin_apply_args_1 ();
1551 seq = get_insns ();
1552 end_sequence ();
1553
1554 apply_args_value = temp;
1555
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1561 that pseudo. */
1562 push_topmost_sequence ();
1563 if (REG_P (crtl->args.internal_arg_pointer)
1564 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1565 emit_insn_before (seq, parm_birth_insn);
1566 else
1567 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1569 return temp;
1570 }
1571 }
1572
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1575
1576 static rtx
1577 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1578 {
1579 int size, align, regno;
1580 enum machine_mode mode;
1581 rtx incoming_args, result, reg, dest, src, call_insn;
1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1585
1586 arguments = convert_memory_address (Pmode, arguments);
1587
1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1594 #ifndef STACK_GROWS_DOWNWARD
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
1597 #endif
1598
1599 /* Push a new argument block and copy the arguments. Do not allow
1600 the (potential) memcpy call below to interfere with our stack
1601 manipulations. */
1602 do_pending_stack_adjust ();
1603 NO_DEFER_POP;
1604
1605 /* Save the stack with nonlocal if available. */
1606 #ifdef HAVE_save_stack_nonlocal
1607 if (HAVE_save_stack_nonlocal)
1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1609 else
1610 #endif
1611 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1612
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1618
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT)
1624 crtl->need_drap = true;
1625
1626 dest = virtual_outgoing_args_rtx;
1627 #ifndef STACK_GROWS_DOWNWARD
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 #endif
1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1638
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
1642 set_mem_align (arguments, PARM_BOUNDARY);
1643
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
1646 if (struct_value)
1647 size += GET_MODE_SIZE (Pmode);
1648
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 {
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1661 }
1662
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 {
1668 rtx value = gen_reg_rtx (Pmode);
1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1670 emit_move_insn (struct_value, value);
1671 if (REG_P (struct_value))
1672 use_reg (&call_fusage, struct_value);
1673 size += GET_MODE_SIZE (Pmode);
1674 }
1675
1676 /* All arguments and registers used for the call are set up by now! */
1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1678
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1684
1685 /* Generate the actual call instruction and save the return value. */
1686 #ifdef HAVE_untyped_call
1687 if (HAVE_untyped_call)
1688 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1689 result, result_vector (1, result)));
1690 else
1691 #endif
1692 #ifdef HAVE_call_value
1693 if (HAVE_call_value)
1694 {
1695 rtx valreg = 0;
1696
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1703 {
1704 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1705
1706 valreg = gen_rtx_REG (mode, regno);
1707 }
1708
1709 emit_call_insn (GEN_CALL_VALUE (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1712
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1714 }
1715 else
1716 #endif
1717 gcc_unreachable ();
1718
1719 /* Find the CALL insn we just emitted, and attach the register usage
1720 information. */
1721 call_insn = last_call_insn ();
1722 add_function_usage_to (call_insn, call_fusage);
1723
1724 /* Restore the stack. */
1725 #ifdef HAVE_save_stack_nonlocal
1726 if (HAVE_save_stack_nonlocal)
1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1728 else
1729 #endif
1730 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1731 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732
1733 OK_DEFER_POP;
1734
1735 /* Return the address of the result block. */
1736 result = copy_addr_to_reg (XEXP (result, 0));
1737 return convert_memory_address (ptr_mode, result);
1738 }
1739
1740 /* Perform an untyped return. */
1741
1742 static void
1743 expand_builtin_return (rtx result)
1744 {
1745 int size, align, regno;
1746 enum machine_mode mode;
1747 rtx reg;
1748 rtx call_fusage = 0;
1749
1750 result = convert_memory_address (Pmode, result);
1751
1752 apply_result_size ();
1753 result = gen_rtx_MEM (BLKmode, result);
1754
1755 #ifdef HAVE_untyped_return
1756 if (HAVE_untyped_return)
1757 {
1758 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1759 emit_barrier ();
1760 return;
1761 }
1762 #endif
1763
1764 /* Restore the return value and note that each value is used. */
1765 size = 0;
1766 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1767 if ((mode = apply_result_mode[regno]) != VOIDmode)
1768 {
1769 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1770 if (size % align != 0)
1771 size = CEIL (size, align) * align;
1772 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1773 emit_move_insn (reg, adjust_address (result, mode, size));
1774
1775 push_to_sequence (call_fusage);
1776 emit_use (reg);
1777 call_fusage = get_insns ();
1778 end_sequence ();
1779 size += GET_MODE_SIZE (mode);
1780 }
1781
1782 /* Put the USE insns before the return. */
1783 emit_insn (call_fusage);
1784
1785 /* Return whatever values was restored by jumping directly to the end
1786 of the function. */
1787 expand_naked_return ();
1788 }
1789
1790 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1791
1792 static enum type_class
1793 type_to_class (tree type)
1794 {
1795 switch (TREE_CODE (type))
1796 {
1797 case VOID_TYPE: return void_type_class;
1798 case INTEGER_TYPE: return integer_type_class;
1799 case ENUMERAL_TYPE: return enumeral_type_class;
1800 case BOOLEAN_TYPE: return boolean_type_class;
1801 case POINTER_TYPE: return pointer_type_class;
1802 case REFERENCE_TYPE: return reference_type_class;
1803 case OFFSET_TYPE: return offset_type_class;
1804 case REAL_TYPE: return real_type_class;
1805 case COMPLEX_TYPE: return complex_type_class;
1806 case FUNCTION_TYPE: return function_type_class;
1807 case METHOD_TYPE: return method_type_class;
1808 case RECORD_TYPE: return record_type_class;
1809 case UNION_TYPE:
1810 case QUAL_UNION_TYPE: return union_type_class;
1811 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1812 ? string_type_class : array_type_class);
1813 case LANG_TYPE: return lang_type_class;
1814 default: return no_type_class;
1815 }
1816 }
1817
1818 /* Expand a call EXP to __builtin_classify_type. */
1819
1820 static rtx
1821 expand_builtin_classify_type (tree exp)
1822 {
1823 if (call_expr_nargs (exp))
1824 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1825 return GEN_INT (no_type_class);
1826 }
1827
1828 /* This helper macro, meant to be used in mathfn_built_in below,
1829 determines which among a set of three builtin math functions is
1830 appropriate for a given type mode. The `F' and `L' cases are
1831 automatically generated from the `double' case. */
1832 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1834 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1835 fcodel = BUILT_IN_MATHFN##L ; break;
1836 /* Similar to above, but appends _R after any F/L suffix. */
1837 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1838 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1839 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1840 fcodel = BUILT_IN_MATHFN##L_R ; break;
1841
1842 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1843 if available. If IMPLICIT is true use the implicit builtin declaration,
1844 otherwise use the explicit declaration. If we can't do the conversion,
1845 return zero. */
1846
1847 static tree
1848 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1849 {
1850 enum built_in_function fcode, fcodef, fcodel, fcode2;
1851
1852 switch (fn)
1853 {
1854 CASE_MATHFN (BUILT_IN_ACOS)
1855 CASE_MATHFN (BUILT_IN_ACOSH)
1856 CASE_MATHFN (BUILT_IN_ASIN)
1857 CASE_MATHFN (BUILT_IN_ASINH)
1858 CASE_MATHFN (BUILT_IN_ATAN)
1859 CASE_MATHFN (BUILT_IN_ATAN2)
1860 CASE_MATHFN (BUILT_IN_ATANH)
1861 CASE_MATHFN (BUILT_IN_CBRT)
1862 CASE_MATHFN (BUILT_IN_CEIL)
1863 CASE_MATHFN (BUILT_IN_CEXPI)
1864 CASE_MATHFN (BUILT_IN_COPYSIGN)
1865 CASE_MATHFN (BUILT_IN_COS)
1866 CASE_MATHFN (BUILT_IN_COSH)
1867 CASE_MATHFN (BUILT_IN_DREM)
1868 CASE_MATHFN (BUILT_IN_ERF)
1869 CASE_MATHFN (BUILT_IN_ERFC)
1870 CASE_MATHFN (BUILT_IN_EXP)
1871 CASE_MATHFN (BUILT_IN_EXP10)
1872 CASE_MATHFN (BUILT_IN_EXP2)
1873 CASE_MATHFN (BUILT_IN_EXPM1)
1874 CASE_MATHFN (BUILT_IN_FABS)
1875 CASE_MATHFN (BUILT_IN_FDIM)
1876 CASE_MATHFN (BUILT_IN_FLOOR)
1877 CASE_MATHFN (BUILT_IN_FMA)
1878 CASE_MATHFN (BUILT_IN_FMAX)
1879 CASE_MATHFN (BUILT_IN_FMIN)
1880 CASE_MATHFN (BUILT_IN_FMOD)
1881 CASE_MATHFN (BUILT_IN_FREXP)
1882 CASE_MATHFN (BUILT_IN_GAMMA)
1883 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1884 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1885 CASE_MATHFN (BUILT_IN_HYPOT)
1886 CASE_MATHFN (BUILT_IN_ILOGB)
1887 CASE_MATHFN (BUILT_IN_ICEIL)
1888 CASE_MATHFN (BUILT_IN_IFLOOR)
1889 CASE_MATHFN (BUILT_IN_INF)
1890 CASE_MATHFN (BUILT_IN_IRINT)
1891 CASE_MATHFN (BUILT_IN_IROUND)
1892 CASE_MATHFN (BUILT_IN_ISINF)
1893 CASE_MATHFN (BUILT_IN_J0)
1894 CASE_MATHFN (BUILT_IN_J1)
1895 CASE_MATHFN (BUILT_IN_JN)
1896 CASE_MATHFN (BUILT_IN_LCEIL)
1897 CASE_MATHFN (BUILT_IN_LDEXP)
1898 CASE_MATHFN (BUILT_IN_LFLOOR)
1899 CASE_MATHFN (BUILT_IN_LGAMMA)
1900 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1901 CASE_MATHFN (BUILT_IN_LLCEIL)
1902 CASE_MATHFN (BUILT_IN_LLFLOOR)
1903 CASE_MATHFN (BUILT_IN_LLRINT)
1904 CASE_MATHFN (BUILT_IN_LLROUND)
1905 CASE_MATHFN (BUILT_IN_LOG)
1906 CASE_MATHFN (BUILT_IN_LOG10)
1907 CASE_MATHFN (BUILT_IN_LOG1P)
1908 CASE_MATHFN (BUILT_IN_LOG2)
1909 CASE_MATHFN (BUILT_IN_LOGB)
1910 CASE_MATHFN (BUILT_IN_LRINT)
1911 CASE_MATHFN (BUILT_IN_LROUND)
1912 CASE_MATHFN (BUILT_IN_MODF)
1913 CASE_MATHFN (BUILT_IN_NAN)
1914 CASE_MATHFN (BUILT_IN_NANS)
1915 CASE_MATHFN (BUILT_IN_NEARBYINT)
1916 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1917 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1918 CASE_MATHFN (BUILT_IN_POW)
1919 CASE_MATHFN (BUILT_IN_POWI)
1920 CASE_MATHFN (BUILT_IN_POW10)
1921 CASE_MATHFN (BUILT_IN_REMAINDER)
1922 CASE_MATHFN (BUILT_IN_REMQUO)
1923 CASE_MATHFN (BUILT_IN_RINT)
1924 CASE_MATHFN (BUILT_IN_ROUND)
1925 CASE_MATHFN (BUILT_IN_SCALB)
1926 CASE_MATHFN (BUILT_IN_SCALBLN)
1927 CASE_MATHFN (BUILT_IN_SCALBN)
1928 CASE_MATHFN (BUILT_IN_SIGNBIT)
1929 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1930 CASE_MATHFN (BUILT_IN_SIN)
1931 CASE_MATHFN (BUILT_IN_SINCOS)
1932 CASE_MATHFN (BUILT_IN_SINH)
1933 CASE_MATHFN (BUILT_IN_SQRT)
1934 CASE_MATHFN (BUILT_IN_TAN)
1935 CASE_MATHFN (BUILT_IN_TANH)
1936 CASE_MATHFN (BUILT_IN_TGAMMA)
1937 CASE_MATHFN (BUILT_IN_TRUNC)
1938 CASE_MATHFN (BUILT_IN_Y0)
1939 CASE_MATHFN (BUILT_IN_Y1)
1940 CASE_MATHFN (BUILT_IN_YN)
1941
1942 default:
1943 return NULL_TREE;
1944 }
1945
1946 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1947 fcode2 = fcode;
1948 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1949 fcode2 = fcodef;
1950 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1951 fcode2 = fcodel;
1952 else
1953 return NULL_TREE;
1954
1955 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1956 return NULL_TREE;
1957
1958 return builtin_decl_explicit (fcode2);
1959 }
1960
1961 /* Like mathfn_built_in_1(), but always use the implicit array. */
1962
1963 tree
1964 mathfn_built_in (tree type, enum built_in_function fn)
1965 {
1966 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 }
1968
1969 /* If errno must be maintained, expand the RTL to check if the result,
1970 TARGET, of a built-in function call, EXP, is NaN, and if so set
1971 errno to EDOM. */
1972
1973 static void
1974 expand_errno_check (tree exp, rtx target)
1975 {
1976 rtx lab = gen_label_rtx ();
1977
1978 /* Test the result; if it is NaN, set errno=EDOM because
1979 the argument was not in the domain. */
1980 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1981 NULL_RTX, NULL_RTX, lab,
1982 /* The jump is very likely. */
1983 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1984
1985 #ifdef TARGET_EDOM
1986 /* If this built-in doesn't throw an exception, set errno directly. */
1987 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1988 {
1989 #ifdef GEN_ERRNO_RTX
1990 rtx errno_rtx = GEN_ERRNO_RTX;
1991 #else
1992 rtx errno_rtx
1993 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1994 #endif
1995 emit_move_insn (errno_rtx,
1996 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1997 emit_label (lab);
1998 return;
1999 }
2000 #endif
2001
2002 /* Make sure the library call isn't expanded as a tail call. */
2003 CALL_EXPR_TAILCALL (exp) = 0;
2004
2005 /* We can't set errno=EDOM directly; let the library call do it.
2006 Pop the arguments right away in case the call gets deleted. */
2007 NO_DEFER_POP;
2008 expand_call (exp, target, 0);
2009 OK_DEFER_POP;
2010 emit_label (lab);
2011 }
2012
2013 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2014 Return NULL_RTX if a normal call should be emitted rather than expanding
2015 the function in-line. EXP is the expression that is a call to the builtin
2016 function; if convenient, the result should be placed in TARGET.
2017 SUBTARGET may be used as the target for computing one of EXP's operands. */
2018
2019 static rtx
2020 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2021 {
2022 optab builtin_optab;
2023 rtx op0, insns;
2024 tree fndecl = get_callee_fndecl (exp);
2025 enum machine_mode mode;
2026 bool errno_set = false;
2027 bool try_widening = false;
2028 tree arg;
2029
2030 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2031 return NULL_RTX;
2032
2033 arg = CALL_EXPR_ARG (exp, 0);
2034
2035 switch (DECL_FUNCTION_CODE (fndecl))
2036 {
2037 CASE_FLT_FN (BUILT_IN_SQRT):
2038 errno_set = ! tree_expr_nonnegative_p (arg);
2039 try_widening = true;
2040 builtin_optab = sqrt_optab;
2041 break;
2042 CASE_FLT_FN (BUILT_IN_EXP):
2043 errno_set = true; builtin_optab = exp_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXP10):
2045 CASE_FLT_FN (BUILT_IN_POW10):
2046 errno_set = true; builtin_optab = exp10_optab; break;
2047 CASE_FLT_FN (BUILT_IN_EXP2):
2048 errno_set = true; builtin_optab = exp2_optab; break;
2049 CASE_FLT_FN (BUILT_IN_EXPM1):
2050 errno_set = true; builtin_optab = expm1_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOGB):
2052 errno_set = true; builtin_optab = logb_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG):
2054 errno_set = true; builtin_optab = log_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG10):
2056 errno_set = true; builtin_optab = log10_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG2):
2058 errno_set = true; builtin_optab = log2_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG1P):
2060 errno_set = true; builtin_optab = log1p_optab; break;
2061 CASE_FLT_FN (BUILT_IN_ASIN):
2062 builtin_optab = asin_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ACOS):
2064 builtin_optab = acos_optab; break;
2065 CASE_FLT_FN (BUILT_IN_TAN):
2066 builtin_optab = tan_optab; break;
2067 CASE_FLT_FN (BUILT_IN_ATAN):
2068 builtin_optab = atan_optab; break;
2069 CASE_FLT_FN (BUILT_IN_FLOOR):
2070 builtin_optab = floor_optab; break;
2071 CASE_FLT_FN (BUILT_IN_CEIL):
2072 builtin_optab = ceil_optab; break;
2073 CASE_FLT_FN (BUILT_IN_TRUNC):
2074 builtin_optab = btrunc_optab; break;
2075 CASE_FLT_FN (BUILT_IN_ROUND):
2076 builtin_optab = round_optab; break;
2077 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2078 builtin_optab = nearbyint_optab;
2079 if (flag_trapping_math)
2080 break;
2081 /* Else fallthrough and expand as rint. */
2082 CASE_FLT_FN (BUILT_IN_RINT):
2083 builtin_optab = rint_optab; break;
2084 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2085 builtin_optab = significand_optab; break;
2086 default:
2087 gcc_unreachable ();
2088 }
2089
2090 /* Make a suitable register to place result in. */
2091 mode = TYPE_MODE (TREE_TYPE (exp));
2092
2093 if (! flag_errno_math || ! HONOR_NANS (mode))
2094 errno_set = false;
2095
2096 /* Before working hard, check whether the instruction is available, but try
2097 to widen the mode for specific operations. */
2098 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2099 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2100 && (!errno_set || !optimize_insn_for_size_p ()))
2101 {
2102 rtx result = gen_reg_rtx (mode);
2103
2104 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2105 need to expand the argument again. This way, we will not perform
2106 side-effects more the once. */
2107 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2108
2109 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2110
2111 start_sequence ();
2112
2113 /* Compute into RESULT.
2114 Set RESULT to wherever the result comes back. */
2115 result = expand_unop (mode, builtin_optab, op0, result, 0);
2116
2117 if (result != 0)
2118 {
2119 if (errno_set)
2120 expand_errno_check (exp, result);
2121
2122 /* Output the entire sequence. */
2123 insns = get_insns ();
2124 end_sequence ();
2125 emit_insn (insns);
2126 return result;
2127 }
2128
2129 /* If we were unable to expand via the builtin, stop the sequence
2130 (without outputting the insns) and call to the library function
2131 with the stabilized argument list. */
2132 end_sequence ();
2133 }
2134
2135 return expand_call (exp, target, target == const0_rtx);
2136 }
2137
2138 /* Expand a call to the builtin binary math functions (pow and atan2).
2139 Return NULL_RTX if a normal call should be emitted rather than expanding the
2140 function in-line. EXP is the expression that is a call to the builtin
2141 function; if convenient, the result should be placed in TARGET.
2142 SUBTARGET may be used as the target for computing one of EXP's
2143 operands. */
2144
2145 static rtx
2146 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2147 {
2148 optab builtin_optab;
2149 rtx op0, op1, insns, result;
2150 int op1_type = REAL_TYPE;
2151 tree fndecl = get_callee_fndecl (exp);
2152 tree arg0, arg1;
2153 enum machine_mode mode;
2154 bool errno_set = true;
2155
2156 switch (DECL_FUNCTION_CODE (fndecl))
2157 {
2158 CASE_FLT_FN (BUILT_IN_SCALBN):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN):
2160 CASE_FLT_FN (BUILT_IN_LDEXP):
2161 op1_type = INTEGER_TYPE;
2162 default:
2163 break;
2164 }
2165
2166 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2167 return NULL_RTX;
2168
2169 arg0 = CALL_EXPR_ARG (exp, 0);
2170 arg1 = CALL_EXPR_ARG (exp, 1);
2171
2172 switch (DECL_FUNCTION_CODE (fndecl))
2173 {
2174 CASE_FLT_FN (BUILT_IN_POW):
2175 builtin_optab = pow_optab; break;
2176 CASE_FLT_FN (BUILT_IN_ATAN2):
2177 builtin_optab = atan2_optab; break;
2178 CASE_FLT_FN (BUILT_IN_SCALB):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 builtin_optab = scalb_optab; break;
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2185 return 0;
2186 /* Fall through... */
2187 CASE_FLT_FN (BUILT_IN_LDEXP):
2188 builtin_optab = ldexp_optab; break;
2189 CASE_FLT_FN (BUILT_IN_FMOD):
2190 builtin_optab = fmod_optab; break;
2191 CASE_FLT_FN (BUILT_IN_REMAINDER):
2192 CASE_FLT_FN (BUILT_IN_DREM):
2193 builtin_optab = remainder_optab; break;
2194 default:
2195 gcc_unreachable ();
2196 }
2197
2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
2200
2201 /* Before working hard, check whether the instruction is available. */
2202 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2203 return NULL_RTX;
2204
2205 result = gen_reg_rtx (mode);
2206
2207 if (! flag_errno_math || ! HONOR_NANS (mode))
2208 errno_set = false;
2209
2210 if (errno_set && optimize_insn_for_size_p ())
2211 return 0;
2212
2213 /* Always stabilize the argument list. */
2214 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2215 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2216
2217 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2218 op1 = expand_normal (arg1);
2219
2220 start_sequence ();
2221
2222 /* Compute into RESULT.
2223 Set RESULT to wherever the result comes back. */
2224 result = expand_binop (mode, builtin_optab, op0, op1,
2225 result, 0, OPTAB_DIRECT);
2226
2227 /* If we were unable to expand via the builtin, stop the sequence
2228 (without outputting the insns) and call to the library function
2229 with the stabilized argument list. */
2230 if (result == 0)
2231 {
2232 end_sequence ();
2233 return expand_call (exp, target, target == const0_rtx);
2234 }
2235
2236 if (errno_set)
2237 expand_errno_check (exp, result);
2238
2239 /* Output the entire sequence. */
2240 insns = get_insns ();
2241 end_sequence ();
2242 emit_insn (insns);
2243
2244 return result;
2245 }
2246
2247 /* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2252 operands. */
2253
2254 static rtx
2255 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2256 {
2257 optab builtin_optab;
2258 rtx op0, op1, op2, insns, result;
2259 tree fndecl = get_callee_fndecl (exp);
2260 tree arg0, arg1, arg2;
2261 enum machine_mode mode;
2262
2263 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2264 return NULL_RTX;
2265
2266 arg0 = CALL_EXPR_ARG (exp, 0);
2267 arg1 = CALL_EXPR_ARG (exp, 1);
2268 arg2 = CALL_EXPR_ARG (exp, 2);
2269
2270 switch (DECL_FUNCTION_CODE (fndecl))
2271 {
2272 CASE_FLT_FN (BUILT_IN_FMA):
2273 builtin_optab = fma_optab; break;
2274 default:
2275 gcc_unreachable ();
2276 }
2277
2278 /* Make a suitable register to place result in. */
2279 mode = TYPE_MODE (TREE_TYPE (exp));
2280
2281 /* Before working hard, check whether the instruction is available. */
2282 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2283 return NULL_RTX;
2284
2285 result = gen_reg_rtx (mode);
2286
2287 /* Always stabilize the argument list. */
2288 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2289 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2290 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2291
2292 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2293 op1 = expand_normal (arg1);
2294 op2 = expand_normal (arg2);
2295
2296 start_sequence ();
2297
2298 /* Compute into RESULT.
2299 Set RESULT to wherever the result comes back. */
2300 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2301 result, 0);
2302
2303 /* If we were unable to expand via the builtin, stop the sequence
2304 (without outputting the insns) and call to the library function
2305 with the stabilized argument list. */
2306 if (result == 0)
2307 {
2308 end_sequence ();
2309 return expand_call (exp, target, target == const0_rtx);
2310 }
2311
2312 /* Output the entire sequence. */
2313 insns = get_insns ();
2314 end_sequence ();
2315 emit_insn (insns);
2316
2317 return result;
2318 }
2319
2320 /* Expand a call to the builtin sin and cos math functions.
2321 Return NULL_RTX if a normal call should be emitted rather than expanding the
2322 function in-line. EXP is the expression that is a call to the builtin
2323 function; if convenient, the result should be placed in TARGET.
2324 SUBTARGET may be used as the target for computing one of EXP's
2325 operands. */
2326
2327 static rtx
2328 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2329 {
2330 optab builtin_optab;
2331 rtx op0, insns;
2332 tree fndecl = get_callee_fndecl (exp);
2333 enum machine_mode mode;
2334 tree arg;
2335
2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
2338
2339 arg = CALL_EXPR_ARG (exp, 0);
2340
2341 switch (DECL_FUNCTION_CODE (fndecl))
2342 {
2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
2345 builtin_optab = sincos_optab; break;
2346 default:
2347 gcc_unreachable ();
2348 }
2349
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2352
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 builtin_optab = sin_optab; break;
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = cos_optab; break;
2362 default:
2363 gcc_unreachable ();
2364 }
2365
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2368 {
2369 rtx result = gen_reg_rtx (mode);
2370
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2375
2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2377
2378 start_sequence ();
2379
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab == sincos_optab)
2383 {
2384 int ok;
2385
2386 switch (DECL_FUNCTION_CODE (fndecl))
2387 {
2388 CASE_FLT_FN (BUILT_IN_SIN):
2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2390 break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2393 break;
2394 default:
2395 gcc_unreachable ();
2396 }
2397 gcc_assert (ok);
2398 }
2399 else
2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
2401
2402 if (result != 0)
2403 {
2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
2408 return result;
2409 }
2410
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2415 }
2416
2417 return expand_call (exp, target, target == const0_rtx);
2418 }
2419
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2423
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg, tree fndecl)
2426 {
2427 bool errno_set = false;
2428 optab builtin_optab = unknown_optab;
2429 enum machine_mode mode;
2430
2431 switch (DECL_FUNCTION_CODE (fndecl))
2432 {
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
2437 case BUILT_IN_ISNORMAL:
2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
2446 /* These builtins have no optabs (yet). */
2447 break;
2448 default:
2449 gcc_unreachable ();
2450 }
2451
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
2454 return CODE_FOR_nothing;
2455
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (builtin_optab)
2460 return optab_handler (builtin_optab, mode);
2461 return CODE_FOR_nothing;
2462 }
2463
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2470
2471 static rtx
2472 expand_builtin_interclass_mathfn (tree exp, rtx target)
2473 {
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
2477 enum machine_mode mode;
2478 tree arg;
2479
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2486
2487 if (icode != CODE_FOR_nothing)
2488 {
2489 struct expand_operand ops[1];
2490 rtx last = get_last_insn ();
2491 tree orig_arg = arg;
2492
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2497
2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2499
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2502
2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2507
2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
2510 }
2511
2512 return NULL_RTX;
2513 }
2514
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2519
2520 static rtx
2521 expand_builtin_sincos (tree exp)
2522 {
2523 rtx op0, op1, op2, target1, target2;
2524 enum machine_mode mode;
2525 tree arg, sinp, cosp;
2526 int result;
2527 location_t loc = EXPR_LOCATION (exp);
2528 tree alias_type, alias_off;
2529
2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2533
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
2537
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2540
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2543 return NULL_RTX;
2544
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2547
2548 op0 = expand_normal (arg);
2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
2555
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2560
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2565
2566 return const0_rtx;
2567 }
2568
2569 /* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
2571 the result should be placed in TARGET. */
2572
2573 static rtx
2574 expand_builtin_cexpi (tree exp, rtx target)
2575 {
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg, type;
2578 enum machine_mode mode;
2579 rtx op0, op1, op2;
2580 location_t loc = EXPR_LOCATION (exp);
2581
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 return NULL_RTX;
2584
2585 arg = CALL_EXPR_ARG (exp, 0);
2586 type = TREE_TYPE (arg);
2587 mode = TYPE_MODE (TREE_TYPE (arg));
2588
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
2592 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2593 {
2594 op1 = gen_reg_rtx (mode);
2595 op2 = gen_reg_rtx (mode);
2596
2597 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2598
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 }
2602 else if (targetm.libc_has_function (function_sincos))
2603 {
2604 tree call, fn = NULL_TREE;
2605 tree top1, top2;
2606 rtx op1a, op2a;
2607
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2614 else
2615 gcc_unreachable ();
2616
2617 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op1a = copy_addr_to_reg (XEXP (op1, 0));
2620 op2a = copy_addr_to_reg (XEXP (op2, 0));
2621 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2622 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623
2624 /* Make sure not to fold the sincos call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2627 call, 3, arg, top1, top2));
2628 }
2629 else
2630 {
2631 tree call, fn = NULL_TREE, narg;
2632 tree ctype = build_complex_type (type);
2633
2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2635 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2640 else
2641 gcc_unreachable ();
2642
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649 const char *name = NULL;
2650
2651 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2652 name = "cexpf";
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2654 name = "cexp";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2656 name = "cexpl";
2657
2658 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2659 fn = build_fn_decl (name, fntype);
2660 }
2661
2662 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2663 build_real (type, dconst0), arg);
2664
2665 /* Make sure not to fold the cexp call again. */
2666 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2667 return expand_expr (build_call_nary (ctype, call, 1, narg),
2668 target, VOIDmode, EXPAND_NORMAL);
2669 }
2670
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2673 make_tree (TREE_TYPE (arg), op2),
2674 make_tree (TREE_TYPE (arg), op1)),
2675 target, VOIDmode, EXPAND_NORMAL);
2676 }
2677
2678 /* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682
2683 static tree
2684 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685 {
2686 va_list ap;
2687 tree fntype = TREE_TYPE (fndecl);
2688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689
2690 va_start (ap, n);
2691 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2692 va_end (ap);
2693 SET_EXPR_LOCATION (fn, loc);
2694 return fn;
2695 }
2696
2697 /* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
2702 if convenient, the result should be placed in TARGET. */
2703
2704 static rtx
2705 expand_builtin_int_roundingfn (tree exp, rtx target)
2706 {
2707 convert_optab builtin_optab;
2708 rtx op0, insns, tmp;
2709 tree fndecl = get_callee_fndecl (exp);
2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
2712 enum machine_mode mode;
2713 tree arg;
2714
2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2716 gcc_unreachable ();
2717
2718 arg = CALL_EXPR_ARG (exp, 0);
2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
2722 CASE_FLT_FN (BUILT_IN_ICEIL):
2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2728
2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2735
2736 default:
2737 gcc_unreachable ();
2738 }
2739
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2742
2743 target = gen_reg_rtx (mode);
2744
2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2749
2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2751
2752 start_sequence ();
2753
2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2756 {
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn (insns);
2761 return target;
2762 }
2763
2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2767
2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2770
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2776 {
2777 tree fntype;
2778 const char *name = NULL;
2779
2780 switch (DECL_FUNCTION_CODE (fndecl))
2781 {
2782 case BUILT_IN_ICEIL:
2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
2787 case BUILT_IN_ICEILF:
2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
2792 case BUILT_IN_ICEILL:
2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
2797 case BUILT_IN_IFLOOR:
2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
2802 case BUILT_IN_IFLOORF:
2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
2807 case BUILT_IN_IFLOORL:
2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2819 }
2820
2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2822
2823 tmp = expand_normal (exp);
2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2825
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2830
2831 return target;
2832 }
2833
2834 /* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
2838 function; if convenient, the result should be placed in TARGET. */
2839
2840 static rtx
2841 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2842 {
2843 convert_optab builtin_optab;
2844 rtx op0, insns;
2845 tree fndecl = get_callee_fndecl (exp);
2846 tree arg;
2847 enum machine_mode mode;
2848 enum built_in_function fallback_fn = BUILT_IN_NONE;
2849
2850 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2851 gcc_unreachable ();
2852
2853 arg = CALL_EXPR_ARG (exp, 0);
2854
2855 switch (DECL_FUNCTION_CODE (fndecl))
2856 {
2857 CASE_FLT_FN (BUILT_IN_IRINT):
2858 fallback_fn = BUILT_IN_LRINT;
2859 /* FALLTHRU */
2860 CASE_FLT_FN (BUILT_IN_LRINT):
2861 CASE_FLT_FN (BUILT_IN_LLRINT):
2862 builtin_optab = lrint_optab;
2863 break;
2864
2865 CASE_FLT_FN (BUILT_IN_IROUND):
2866 fallback_fn = BUILT_IN_LROUND;
2867 /* FALLTHRU */
2868 CASE_FLT_FN (BUILT_IN_LROUND):
2869 CASE_FLT_FN (BUILT_IN_LLROUND):
2870 builtin_optab = lround_optab;
2871 break;
2872
2873 default:
2874 gcc_unreachable ();
2875 }
2876
2877 /* There's no easy way to detect the case we need to set EDOM. */
2878 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2879 return NULL_RTX;
2880
2881 /* Make a suitable register to place result in. */
2882 mode = TYPE_MODE (TREE_TYPE (exp));
2883
2884 /* There's no easy way to detect the case we need to set EDOM. */
2885 if (!flag_errno_math)
2886 {
2887 rtx result = gen_reg_rtx (mode);
2888
2889 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2890 need to expand the argument again. This way, we will not perform
2891 side-effects more the once. */
2892 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2893
2894 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2895
2896 start_sequence ();
2897
2898 if (expand_sfix_optab (result, op0, builtin_optab))
2899 {
2900 /* Output the entire sequence. */
2901 insns = get_insns ();
2902 end_sequence ();
2903 emit_insn (insns);
2904 return result;
2905 }
2906
2907 /* If we were unable to expand via the builtin, stop the sequence
2908 (without outputting the insns) and call to the library function
2909 with the stabilized argument list. */
2910 end_sequence ();
2911 }
2912
2913 if (fallback_fn != BUILT_IN_NONE)
2914 {
2915 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2916 targets, (int) round (x) should never be transformed into
2917 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2918 a call to lround in the hope that the target provides at least some
2919 C99 functions. This should result in the best user experience for
2920 not full C99 targets. */
2921 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2922 fallback_fn, 0);
2923
2924 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2925 fallback_fndecl, 1, arg);
2926
2927 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2928 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2929 return convert_to_mode (mode, target, 0);
2930 }
2931
2932 return expand_call (exp, target, target == const0_rtx);
2933 }
2934
2935 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2936 a normal call should be emitted rather than expanding the function
2937 in-line. EXP is the expression that is a call to the builtin
2938 function; if convenient, the result should be placed in TARGET. */
2939
2940 static rtx
2941 expand_builtin_powi (tree exp, rtx target)
2942 {
2943 tree arg0, arg1;
2944 rtx op0, op1;
2945 enum machine_mode mode;
2946 enum machine_mode mode2;
2947
2948 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2949 return NULL_RTX;
2950
2951 arg0 = CALL_EXPR_ARG (exp, 0);
2952 arg1 = CALL_EXPR_ARG (exp, 1);
2953 mode = TYPE_MODE (TREE_TYPE (exp));
2954
2955 /* Emit a libcall to libgcc. */
2956
2957 /* Mode of the 2nd argument must match that of an int. */
2958 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2959
2960 if (target == NULL_RTX)
2961 target = gen_reg_rtx (mode);
2962
2963 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2964 if (GET_MODE (op0) != mode)
2965 op0 = convert_to_mode (mode, op0, 0);
2966 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2967 if (GET_MODE (op1) != mode2)
2968 op1 = convert_to_mode (mode2, op1, 0);
2969
2970 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2971 target, LCT_CONST, mode, 2,
2972 op0, mode, op1, mode2);
2973
2974 return target;
2975 }
2976
2977 /* Expand expression EXP which is a call to the strlen builtin. Return
2978 NULL_RTX if we failed the caller should emit a normal call, otherwise
2979 try to get the result in TARGET, if convenient. */
2980
2981 static rtx
2982 expand_builtin_strlen (tree exp, rtx target,
2983 enum machine_mode target_mode)
2984 {
2985 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2986 return NULL_RTX;
2987 else
2988 {
2989 struct expand_operand ops[4];
2990 rtx pat;
2991 tree len;
2992 tree src = CALL_EXPR_ARG (exp, 0);
2993 rtx src_reg, before_strlen;
2994 enum machine_mode insn_mode = target_mode;
2995 enum insn_code icode = CODE_FOR_nothing;
2996 unsigned int align;
2997
2998 /* If the length can be computed at compile-time, return it. */
2999 len = c_strlen (src, 0);
3000 if (len)
3001 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3002
3003 /* If the length can be computed at compile-time and is constant
3004 integer, but there are side-effects in src, evaluate
3005 src for side-effects, then return len.
3006 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3007 can be optimized into: i++; x = 3; */
3008 len = c_strlen (src, 1);
3009 if (len && TREE_CODE (len) == INTEGER_CST)
3010 {
3011 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3012 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013 }
3014
3015 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3016
3017 /* If SRC is not a pointer type, don't do this operation inline. */
3018 if (align == 0)
3019 return NULL_RTX;
3020
3021 /* Bail out if we can't compute strlen in the right mode. */
3022 while (insn_mode != VOIDmode)
3023 {
3024 icode = optab_handler (strlen_optab, insn_mode);
3025 if (icode != CODE_FOR_nothing)
3026 break;
3027
3028 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3029 }
3030 if (insn_mode == VOIDmode)
3031 return NULL_RTX;
3032
3033 /* Make a place to hold the source address. We will not expand
3034 the actual source until we are sure that the expansion will
3035 not fail -- there are trees that cannot be expanded twice. */
3036 src_reg = gen_reg_rtx (Pmode);
3037
3038 /* Mark the beginning of the strlen sequence so we can emit the
3039 source operand later. */
3040 before_strlen = get_last_insn ();
3041
3042 create_output_operand (&ops[0], target, insn_mode);
3043 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3044 create_integer_operand (&ops[2], 0);
3045 create_integer_operand (&ops[3], align);
3046 if (!maybe_expand_insn (icode, 4, ops))
3047 return NULL_RTX;
3048
3049 /* Now that we are assured of success, expand the source. */
3050 start_sequence ();
3051 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3052 if (pat != src_reg)
3053 {
3054 #ifdef POINTERS_EXTEND_UNSIGNED
3055 if (GET_MODE (pat) != Pmode)
3056 pat = convert_to_mode (Pmode, pat,
3057 POINTERS_EXTEND_UNSIGNED);
3058 #endif
3059 emit_move_insn (src_reg, pat);
3060 }
3061 pat = get_insns ();
3062 end_sequence ();
3063
3064 if (before_strlen)
3065 emit_insn_after (pat, before_strlen);
3066 else
3067 emit_insn_before (pat, get_insns ());
3068
3069 /* Return the value in the proper mode for this function. */
3070 if (GET_MODE (ops[0].value) == target_mode)
3071 target = ops[0].value;
3072 else if (target != 0)
3073 convert_move (target, ops[0].value, 0);
3074 else
3075 target = convert_to_mode (target_mode, ops[0].value, 0);
3076
3077 return target;
3078 }
3079 }
3080
3081 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3082 bytes from constant string DATA + OFFSET and return it as target
3083 constant. */
3084
3085 static rtx
3086 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3087 enum machine_mode mode)
3088 {
3089 const char *str = (const char *) data;
3090
3091 gcc_assert (offset >= 0
3092 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3093 <= strlen (str) + 1));
3094
3095 return c_readstr (str + offset, mode);
3096 }
3097
3098 /* LEN specify length of the block of memcpy/memset operation.
3099 Figure out its range and put it into MIN_SIZE/MAX_SIZE. */
3100
3101 static void
3102 determine_block_size (tree len, rtx len_rtx,
3103 unsigned HOST_WIDE_INT *min_size,
3104 unsigned HOST_WIDE_INT *max_size)
3105 {
3106 if (CONST_INT_P (len_rtx))
3107 {
3108 *min_size = *max_size = UINTVAL (len_rtx);
3109 return;
3110 }
3111 else
3112 {
3113 double_int min, max;
3114 if (TREE_CODE (len) == SSA_NAME
3115 && get_range_info (len, &min, &max) == VR_RANGE)
3116 {
3117 if (min.fits_uhwi ())
3118 *min_size = min.to_uhwi ();
3119 else
3120 *min_size = 0;
3121 if (max.fits_uhwi ())
3122 *max_size = max.to_uhwi ();
3123 else
3124 *max_size = (HOST_WIDE_INT)-1;
3125 }
3126 else
3127 {
3128 if (host_integerp (TYPE_MIN_VALUE (TREE_TYPE (len)), 1))
3129 *min_size = tree_low_cst (TYPE_MIN_VALUE (TREE_TYPE (len)), 1);
3130 else
3131 *min_size = 0;
3132 if (host_integerp (TYPE_MAX_VALUE (TREE_TYPE (len)), 1))
3133 *max_size = tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (len)), 1);
3134 else
3135 *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3136 }
3137 }
3138 gcc_checking_assert (*max_size <=
3139 (unsigned HOST_WIDE_INT)
3140 GET_MODE_MASK (GET_MODE (len_rtx)));
3141 }
3142
3143 /* Expand a call EXP to the memcpy builtin.
3144 Return NULL_RTX if we failed, the caller should emit a normal call,
3145 otherwise try to get the result in TARGET, if convenient (and in
3146 mode MODE if that's convenient). */
3147
3148 static rtx
3149 expand_builtin_memcpy (tree exp, rtx target)
3150 {
3151 if (!validate_arglist (exp,
3152 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3153 return NULL_RTX;
3154 else
3155 {
3156 tree dest = CALL_EXPR_ARG (exp, 0);
3157 tree src = CALL_EXPR_ARG (exp, 1);
3158 tree len = CALL_EXPR_ARG (exp, 2);
3159 const char *src_str;
3160 unsigned int src_align = get_pointer_alignment (src);
3161 unsigned int dest_align = get_pointer_alignment (dest);
3162 rtx dest_mem, src_mem, dest_addr, len_rtx;
3163 HOST_WIDE_INT expected_size = -1;
3164 unsigned int expected_align = 0;
3165 unsigned HOST_WIDE_INT min_size;
3166 unsigned HOST_WIDE_INT max_size;
3167
3168 /* If DEST is not a pointer type, call the normal function. */
3169 if (dest_align == 0)
3170 return NULL_RTX;
3171
3172 /* If either SRC is not a pointer type, don't do this
3173 operation in-line. */
3174 if (src_align == 0)
3175 return NULL_RTX;
3176
3177 if (currently_expanding_gimple_stmt)
3178 stringop_block_profile (currently_expanding_gimple_stmt,
3179 &expected_align, &expected_size);
3180
3181 if (expected_align < dest_align)
3182 expected_align = dest_align;
3183 dest_mem = get_memory_rtx (dest, len);
3184 set_mem_align (dest_mem, dest_align);
3185 len_rtx = expand_normal (len);
3186 determine_block_size (len, len_rtx, &min_size, &max_size);
3187 src_str = c_getstr (src);
3188
3189 /* If SRC is a string constant and block move would be done
3190 by pieces, we can avoid loading the string from memory
3191 and only stored the computed constants. */
3192 if (src_str
3193 && CONST_INT_P (len_rtx)
3194 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3195 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3196 CONST_CAST (char *, src_str),
3197 dest_align, false))
3198 {
3199 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3200 builtin_memcpy_read_str,
3201 CONST_CAST (char *, src_str),
3202 dest_align, false, 0);
3203 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3204 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3205 return dest_mem;
3206 }
3207
3208 src_mem = get_memory_rtx (src, len);
3209 set_mem_align (src_mem, src_align);
3210
3211 /* Copy word part most expediently. */
3212 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3213 CALL_EXPR_TAILCALL (exp)
3214 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3215 expected_align, expected_size,
3216 min_size, max_size);
3217
3218 if (dest_addr == 0)
3219 {
3220 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3221 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3222 }
3223 return dest_addr;
3224 }
3225 }
3226
3227 /* Expand a call EXP to the mempcpy builtin.
3228 Return NULL_RTX if we failed; the caller should emit a normal call,
3229 otherwise try to get the result in TARGET, if convenient (and in
3230 mode MODE if that's convenient). If ENDP is 0 return the
3231 destination pointer, if ENDP is 1 return the end pointer ala
3232 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3233 stpcpy. */
3234
3235 static rtx
3236 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3237 {
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 return NULL_RTX;
3241 else
3242 {
3243 tree dest = CALL_EXPR_ARG (exp, 0);
3244 tree src = CALL_EXPR_ARG (exp, 1);
3245 tree len = CALL_EXPR_ARG (exp, 2);
3246 return expand_builtin_mempcpy_args (dest, src, len,
3247 target, mode, /*endp=*/ 1);
3248 }
3249 }
3250
3251 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3252 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3253 so that this can also be called without constructing an actual CALL_EXPR.
3254 The other arguments and return value are the same as for
3255 expand_builtin_mempcpy. */
3256
3257 static rtx
3258 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3259 rtx target, enum machine_mode mode, int endp)
3260 {
3261 /* If return value is ignored, transform mempcpy into memcpy. */
3262 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3263 {
3264 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3265 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3266 dest, src, len);
3267 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 }
3269 else
3270 {
3271 const char *src_str;
3272 unsigned int src_align = get_pointer_alignment (src);
3273 unsigned int dest_align = get_pointer_alignment (dest);
3274 rtx dest_mem, src_mem, len_rtx;
3275
3276 /* If either SRC or DEST is not a pointer type, don't do this
3277 operation in-line. */
3278 if (dest_align == 0 || src_align == 0)
3279 return NULL_RTX;
3280
3281 /* If LEN is not constant, call the normal function. */
3282 if (! host_integerp (len, 1))
3283 return NULL_RTX;
3284
3285 len_rtx = expand_normal (len);
3286 src_str = c_getstr (src);
3287
3288 /* If SRC is a string constant and block move would be done
3289 by pieces, we can avoid loading the string from memory
3290 and only stored the computed constants. */
3291 if (src_str
3292 && CONST_INT_P (len_rtx)
3293 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3294 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3295 CONST_CAST (char *, src_str),
3296 dest_align, false))
3297 {
3298 dest_mem = get_memory_rtx (dest, len);
3299 set_mem_align (dest_mem, dest_align);
3300 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3301 builtin_memcpy_read_str,
3302 CONST_CAST (char *, src_str),
3303 dest_align, false, endp);
3304 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3305 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3306 return dest_mem;
3307 }
3308
3309 if (CONST_INT_P (len_rtx)
3310 && can_move_by_pieces (INTVAL (len_rtx),
3311 MIN (dest_align, src_align)))
3312 {
3313 dest_mem = get_memory_rtx (dest, len);
3314 set_mem_align (dest_mem, dest_align);
3315 src_mem = get_memory_rtx (src, len);
3316 set_mem_align (src_mem, src_align);
3317 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3318 MIN (dest_align, src_align), endp);
3319 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3320 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3321 return dest_mem;
3322 }
3323
3324 return NULL_RTX;
3325 }
3326 }
3327
3328 #ifndef HAVE_movstr
3329 # define HAVE_movstr 0
3330 # define CODE_FOR_movstr CODE_FOR_nothing
3331 #endif
3332
3333 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3334 we failed, the caller should emit a normal call, otherwise try to
3335 get the result in TARGET, if convenient. If ENDP is 0 return the
3336 destination pointer, if ENDP is 1 return the end pointer ala
3337 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3338 stpcpy. */
3339
3340 static rtx
3341 expand_movstr (tree dest, tree src, rtx target, int endp)
3342 {
3343 struct expand_operand ops[3];
3344 rtx dest_mem;
3345 rtx src_mem;
3346
3347 if (!HAVE_movstr)
3348 return NULL_RTX;
3349
3350 dest_mem = get_memory_rtx (dest, NULL);
3351 src_mem = get_memory_rtx (src, NULL);
3352 if (!endp)
3353 {
3354 target = force_reg (Pmode, XEXP (dest_mem, 0));
3355 dest_mem = replace_equiv_address (dest_mem, target);
3356 }
3357
3358 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3359 create_fixed_operand (&ops[1], dest_mem);
3360 create_fixed_operand (&ops[2], src_mem);
3361 expand_insn (CODE_FOR_movstr, 3, ops);
3362
3363 if (endp && target != const0_rtx)
3364 {
3365 target = ops[0].value;
3366 /* movstr is supposed to set end to the address of the NUL
3367 terminator. If the caller requested a mempcpy-like return value,
3368 adjust it. */
3369 if (endp == 1)
3370 {
3371 rtx tem = plus_constant (GET_MODE (target),
3372 gen_lowpart (GET_MODE (target), target), 1);
3373 emit_move_insn (target, force_operand (tem, NULL_RTX));
3374 }
3375 }
3376 return target;
3377 }
3378
3379 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3380 NULL_RTX if we failed the caller should emit a normal call, otherwise
3381 try to get the result in TARGET, if convenient (and in mode MODE if that's
3382 convenient). */
3383
3384 static rtx
3385 expand_builtin_strcpy (tree exp, rtx target)
3386 {
3387 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3388 {
3389 tree dest = CALL_EXPR_ARG (exp, 0);
3390 tree src = CALL_EXPR_ARG (exp, 1);
3391 return expand_builtin_strcpy_args (dest, src, target);
3392 }
3393 return NULL_RTX;
3394 }
3395
3396 /* Helper function to do the actual work for expand_builtin_strcpy. The
3397 arguments to the builtin_strcpy call DEST and SRC are broken out
3398 so that this can also be called without constructing an actual CALL_EXPR.
3399 The other arguments and return value are the same as for
3400 expand_builtin_strcpy. */
3401
3402 static rtx
3403 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3404 {
3405 return expand_movstr (dest, src, target, /*endp=*/0);
3406 }
3407
3408 /* Expand a call EXP to the stpcpy builtin.
3409 Return NULL_RTX if we failed the caller should emit a normal call,
3410 otherwise try to get the result in TARGET, if convenient (and in
3411 mode MODE if that's convenient). */
3412
3413 static rtx
3414 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3415 {
3416 tree dst, src;
3417 location_t loc = EXPR_LOCATION (exp);
3418
3419 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3420 return NULL_RTX;
3421
3422 dst = CALL_EXPR_ARG (exp, 0);
3423 src = CALL_EXPR_ARG (exp, 1);
3424
3425 /* If return value is ignored, transform stpcpy into strcpy. */
3426 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3427 {
3428 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3429 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3430 return expand_expr (result, target, mode, EXPAND_NORMAL);
3431 }
3432 else
3433 {
3434 tree len, lenp1;
3435 rtx ret;
3436
3437 /* Ensure we get an actual string whose length can be evaluated at
3438 compile-time, not an expression containing a string. This is
3439 because the latter will potentially produce pessimized code
3440 when used to produce the return value. */
3441 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3442 return expand_movstr (dst, src, target, /*endp=*/2);
3443
3444 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3445 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3446 target, mode, /*endp=*/2);
3447
3448 if (ret)
3449 return ret;
3450
3451 if (TREE_CODE (len) == INTEGER_CST)
3452 {
3453 rtx len_rtx = expand_normal (len);
3454
3455 if (CONST_INT_P (len_rtx))
3456 {
3457 ret = expand_builtin_strcpy_args (dst, src, target);
3458
3459 if (ret)
3460 {
3461 if (! target)
3462 {
3463 if (mode != VOIDmode)
3464 target = gen_reg_rtx (mode);
3465 else
3466 target = gen_reg_rtx (GET_MODE (ret));
3467 }
3468 if (GET_MODE (target) != GET_MODE (ret))
3469 ret = gen_lowpart (GET_MODE (target), ret);
3470
3471 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3472 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3473 gcc_assert (ret);
3474
3475 return target;
3476 }
3477 }
3478 }
3479
3480 return expand_movstr (dst, src, target, /*endp=*/2);
3481 }
3482 }
3483
3484 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3485 bytes from constant string DATA + OFFSET and return it as target
3486 constant. */
3487
3488 rtx
3489 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3490 enum machine_mode mode)
3491 {
3492 const char *str = (const char *) data;
3493
3494 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3495 return const0_rtx;
3496
3497 return c_readstr (str + offset, mode);
3498 }
3499
3500 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3501 NULL_RTX if we failed the caller should emit a normal call. */
3502
3503 static rtx
3504 expand_builtin_strncpy (tree exp, rtx target)
3505 {
3506 location_t loc = EXPR_LOCATION (exp);
3507
3508 if (validate_arglist (exp,
3509 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3510 {
3511 tree dest = CALL_EXPR_ARG (exp, 0);
3512 tree src = CALL_EXPR_ARG (exp, 1);
3513 tree len = CALL_EXPR_ARG (exp, 2);
3514 tree slen = c_strlen (src, 1);
3515
3516 /* We must be passed a constant len and src parameter. */
3517 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3518 return NULL_RTX;
3519
3520 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3521
3522 /* We're required to pad with trailing zeros if the requested
3523 len is greater than strlen(s2)+1. In that case try to
3524 use store_by_pieces, if it fails, punt. */
3525 if (tree_int_cst_lt (slen, len))
3526 {
3527 unsigned int dest_align = get_pointer_alignment (dest);
3528 const char *p = c_getstr (src);
3529 rtx dest_mem;
3530
3531 if (!p || dest_align == 0 || !host_integerp (len, 1)
3532 || !can_store_by_pieces (tree_low_cst (len, 1),
3533 builtin_strncpy_read_str,
3534 CONST_CAST (char *, p),
3535 dest_align, false))
3536 return NULL_RTX;
3537
3538 dest_mem = get_memory_rtx (dest, len);
3539 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3540 builtin_strncpy_read_str,
3541 CONST_CAST (char *, p), dest_align, false, 0);
3542 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3543 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3544 return dest_mem;
3545 }
3546 }
3547 return NULL_RTX;
3548 }
3549
3550 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3551 bytes from constant string DATA + OFFSET and return it as target
3552 constant. */
3553
3554 rtx
3555 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3556 enum machine_mode mode)
3557 {
3558 const char *c = (const char *) data;
3559 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3560
3561 memset (p, *c, GET_MODE_SIZE (mode));
3562
3563 return c_readstr (p, mode);
3564 }
3565
3566 /* Callback routine for store_by_pieces. Return the RTL of a register
3567 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3568 char value given in the RTL register data. For example, if mode is
3569 4 bytes wide, return the RTL for 0x01010101*data. */
3570
3571 static rtx
3572 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3573 enum machine_mode mode)
3574 {
3575 rtx target, coeff;
3576 size_t size;
3577 char *p;
3578
3579 size = GET_MODE_SIZE (mode);
3580 if (size == 1)
3581 return (rtx) data;
3582
3583 p = XALLOCAVEC (char, size);
3584 memset (p, 1, size);
3585 coeff = c_readstr (p, mode);
3586
3587 target = convert_to_mode (mode, (rtx) data, 1);
3588 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3589 return force_reg (mode, target);
3590 }
3591
3592 /* Expand expression EXP, which is a call to the memset builtin. Return
3593 NULL_RTX if we failed the caller should emit a normal call, otherwise
3594 try to get the result in TARGET, if convenient (and in mode MODE if that's
3595 convenient). */
3596
3597 static rtx
3598 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3599 {
3600 if (!validate_arglist (exp,
3601 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3602 return NULL_RTX;
3603 else
3604 {
3605 tree dest = CALL_EXPR_ARG (exp, 0);
3606 tree val = CALL_EXPR_ARG (exp, 1);
3607 tree len = CALL_EXPR_ARG (exp, 2);
3608 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3609 }
3610 }
3611
3612 /* Helper function to do the actual work for expand_builtin_memset. The
3613 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3614 so that this can also be called without constructing an actual CALL_EXPR.
3615 The other arguments and return value are the same as for
3616 expand_builtin_memset. */
3617
3618 static rtx
3619 expand_builtin_memset_args (tree dest, tree val, tree len,
3620 rtx target, enum machine_mode mode, tree orig_exp)
3621 {
3622 tree fndecl, fn;
3623 enum built_in_function fcode;
3624 enum machine_mode val_mode;
3625 char c;
3626 unsigned int dest_align;
3627 rtx dest_mem, dest_addr, len_rtx;
3628 HOST_WIDE_INT expected_size = -1;
3629 unsigned int expected_align = 0;
3630 unsigned HOST_WIDE_INT min_size;
3631 unsigned HOST_WIDE_INT max_size;
3632
3633 dest_align = get_pointer_alignment (dest);
3634
3635 /* If DEST is not a pointer type, don't do this operation in-line. */
3636 if (dest_align == 0)
3637 return NULL_RTX;
3638
3639 if (currently_expanding_gimple_stmt)
3640 stringop_block_profile (currently_expanding_gimple_stmt,
3641 &expected_align, &expected_size);
3642
3643 if (expected_align < dest_align)
3644 expected_align = dest_align;
3645
3646 /* If the LEN parameter is zero, return DEST. */
3647 if (integer_zerop (len))
3648 {
3649 /* Evaluate and ignore VAL in case it has side-effects. */
3650 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3651 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3652 }
3653
3654 /* Stabilize the arguments in case we fail. */
3655 dest = builtin_save_expr (dest);
3656 val = builtin_save_expr (val);
3657 len = builtin_save_expr (len);
3658
3659 len_rtx = expand_normal (len);
3660 determine_block_size (len, len_rtx, &min_size, &max_size);
3661 dest_mem = get_memory_rtx (dest, len);
3662 val_mode = TYPE_MODE (unsigned_char_type_node);
3663
3664 if (TREE_CODE (val) != INTEGER_CST)
3665 {
3666 rtx val_rtx;
3667
3668 val_rtx = expand_normal (val);
3669 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3670
3671 /* Assume that we can memset by pieces if we can store
3672 * the coefficients by pieces (in the required modes).
3673 * We can't pass builtin_memset_gen_str as that emits RTL. */
3674 c = 1;
3675 if (host_integerp (len, 1)
3676 && can_store_by_pieces (tree_low_cst (len, 1),
3677 builtin_memset_read_str, &c, dest_align,
3678 true))
3679 {
3680 val_rtx = force_reg (val_mode, val_rtx);
3681 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3682 builtin_memset_gen_str, val_rtx, dest_align,
3683 true, 0);
3684 }
3685 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3686 dest_align, expected_align,
3687 expected_size, min_size, max_size))
3688 goto do_libcall;
3689
3690 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3691 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3692 return dest_mem;
3693 }
3694
3695 if (target_char_cast (val, &c))
3696 goto do_libcall;
3697
3698 if (c)
3699 {
3700 if (host_integerp (len, 1)
3701 && can_store_by_pieces (tree_low_cst (len, 1),
3702 builtin_memset_read_str, &c, dest_align,
3703 true))
3704 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3705 builtin_memset_read_str, &c, dest_align, true, 0);
3706 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3707 gen_int_mode (c, val_mode),
3708 dest_align, expected_align,
3709 expected_size, min_size, max_size))
3710 goto do_libcall;
3711
3712 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3713 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3714 return dest_mem;
3715 }
3716
3717 set_mem_align (dest_mem, dest_align);
3718 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3719 CALL_EXPR_TAILCALL (orig_exp)
3720 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3721 expected_align, expected_size,
3722 min_size, max_size);
3723
3724 if (dest_addr == 0)
3725 {
3726 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3727 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3728 }
3729
3730 return dest_addr;
3731
3732 do_libcall:
3733 fndecl = get_callee_fndecl (orig_exp);
3734 fcode = DECL_FUNCTION_CODE (fndecl);
3735 if (fcode == BUILT_IN_MEMSET)
3736 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3737 dest, val, len);
3738 else if (fcode == BUILT_IN_BZERO)
3739 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3740 dest, len);
3741 else
3742 gcc_unreachable ();
3743 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3744 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3745 return expand_call (fn, target, target == const0_rtx);
3746 }
3747
3748 /* Expand expression EXP, which is a call to the bzero builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call. */
3750
3751 static rtx
3752 expand_builtin_bzero (tree exp)
3753 {
3754 tree dest, size;
3755 location_t loc = EXPR_LOCATION (exp);
3756
3757 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3758 return NULL_RTX;
3759
3760 dest = CALL_EXPR_ARG (exp, 0);
3761 size = CALL_EXPR_ARG (exp, 1);
3762
3763 /* New argument list transforming bzero(ptr x, int y) to
3764 memset(ptr x, int 0, size_t y). This is done this way
3765 so that if it isn't expanded inline, we fallback to
3766 calling bzero instead of memset. */
3767
3768 return expand_builtin_memset_args (dest, integer_zero_node,
3769 fold_convert_loc (loc,
3770 size_type_node, size),
3771 const0_rtx, VOIDmode, exp);
3772 }
3773
3774 /* Expand expression EXP, which is a call to the memcmp built-in function.
3775 Return NULL_RTX if we failed and the caller should emit a normal call,
3776 otherwise try to get the result in TARGET, if convenient (and in mode
3777 MODE, if that's convenient). */
3778
3779 static rtx
3780 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3781 ATTRIBUTE_UNUSED enum machine_mode mode)
3782 {
3783 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3784
3785 if (!validate_arglist (exp,
3786 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3787 return NULL_RTX;
3788
3789 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3790 implementing memcmp because it will stop if it encounters two
3791 zero bytes. */
3792 #if defined HAVE_cmpmemsi
3793 {
3794 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3795 rtx result;
3796 rtx insn;
3797 tree arg1 = CALL_EXPR_ARG (exp, 0);
3798 tree arg2 = CALL_EXPR_ARG (exp, 1);
3799 tree len = CALL_EXPR_ARG (exp, 2);
3800
3801 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3802 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3803 enum machine_mode insn_mode;
3804
3805 if (HAVE_cmpmemsi)
3806 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3807 else
3808 return NULL_RTX;
3809
3810 /* If we don't have POINTER_TYPE, call the function. */
3811 if (arg1_align == 0 || arg2_align == 0)
3812 return NULL_RTX;
3813
3814 /* Make a place to write the result of the instruction. */
3815 result = target;
3816 if (! (result != 0
3817 && REG_P (result) && GET_MODE (result) == insn_mode
3818 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3819 result = gen_reg_rtx (insn_mode);
3820
3821 arg1_rtx = get_memory_rtx (arg1, len);
3822 arg2_rtx = get_memory_rtx (arg2, len);
3823 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3824
3825 /* Set MEM_SIZE as appropriate. */
3826 if (CONST_INT_P (arg3_rtx))
3827 {
3828 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3829 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3830 }
3831
3832 if (HAVE_cmpmemsi)
3833 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3834 GEN_INT (MIN (arg1_align, arg2_align)));
3835 else
3836 gcc_unreachable ();
3837
3838 if (insn)
3839 emit_insn (insn);
3840 else
3841 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3842 TYPE_MODE (integer_type_node), 3,
3843 XEXP (arg1_rtx, 0), Pmode,
3844 XEXP (arg2_rtx, 0), Pmode,
3845 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3846 TYPE_UNSIGNED (sizetype)),
3847 TYPE_MODE (sizetype));
3848
3849 /* Return the value in the proper mode for this function. */
3850 mode = TYPE_MODE (TREE_TYPE (exp));
3851 if (GET_MODE (result) == mode)
3852 return result;
3853 else if (target != 0)
3854 {
3855 convert_move (target, result, 0);
3856 return target;
3857 }
3858 else
3859 return convert_to_mode (mode, result, 0);
3860 }
3861 #endif /* HAVE_cmpmemsi. */
3862
3863 return NULL_RTX;
3864 }
3865
3866 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3867 if we failed the caller should emit a normal call, otherwise try to get
3868 the result in TARGET, if convenient. */
3869
3870 static rtx
3871 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3872 {
3873 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3874 return NULL_RTX;
3875
3876 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3877 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3878 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3879 {
3880 rtx arg1_rtx, arg2_rtx;
3881 rtx result, insn = NULL_RTX;
3882 tree fndecl, fn;
3883 tree arg1 = CALL_EXPR_ARG (exp, 0);
3884 tree arg2 = CALL_EXPR_ARG (exp, 1);
3885
3886 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3887 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3888
3889 /* If we don't have POINTER_TYPE, call the function. */
3890 if (arg1_align == 0 || arg2_align == 0)
3891 return NULL_RTX;
3892
3893 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3894 arg1 = builtin_save_expr (arg1);
3895 arg2 = builtin_save_expr (arg2);
3896
3897 arg1_rtx = get_memory_rtx (arg1, NULL);
3898 arg2_rtx = get_memory_rtx (arg2, NULL);
3899
3900 #ifdef HAVE_cmpstrsi
3901 /* Try to call cmpstrsi. */
3902 if (HAVE_cmpstrsi)
3903 {
3904 enum machine_mode insn_mode
3905 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3906
3907 /* Make a place to write the result of the instruction. */
3908 result = target;
3909 if (! (result != 0
3910 && REG_P (result) && GET_MODE (result) == insn_mode
3911 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3912 result = gen_reg_rtx (insn_mode);
3913
3914 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3915 GEN_INT (MIN (arg1_align, arg2_align)));
3916 }
3917 #endif
3918 #ifdef HAVE_cmpstrnsi
3919 /* Try to determine at least one length and call cmpstrnsi. */
3920 if (!insn && HAVE_cmpstrnsi)
3921 {
3922 tree len;
3923 rtx arg3_rtx;
3924
3925 enum machine_mode insn_mode
3926 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3927 tree len1 = c_strlen (arg1, 1);
3928 tree len2 = c_strlen (arg2, 1);
3929
3930 if (len1)
3931 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3932 if (len2)
3933 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3934
3935 /* If we don't have a constant length for the first, use the length
3936 of the second, if we know it. We don't require a constant for
3937 this case; some cost analysis could be done if both are available
3938 but neither is constant. For now, assume they're equally cheap,
3939 unless one has side effects. If both strings have constant lengths,
3940 use the smaller. */
3941
3942 if (!len1)
3943 len = len2;
3944 else if (!len2)
3945 len = len1;
3946 else if (TREE_SIDE_EFFECTS (len1))
3947 len = len2;
3948 else if (TREE_SIDE_EFFECTS (len2))
3949 len = len1;
3950 else if (TREE_CODE (len1) != INTEGER_CST)
3951 len = len2;
3952 else if (TREE_CODE (len2) != INTEGER_CST)
3953 len = len1;
3954 else if (tree_int_cst_lt (len1, len2))
3955 len = len1;
3956 else
3957 len = len2;
3958
3959 /* If both arguments have side effects, we cannot optimize. */
3960 if (!len || TREE_SIDE_EFFECTS (len))
3961 goto do_libcall;
3962
3963 arg3_rtx = expand_normal (len);
3964
3965 /* Make a place to write the result of the instruction. */
3966 result = target;
3967 if (! (result != 0
3968 && REG_P (result) && GET_MODE (result) == insn_mode
3969 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3970 result = gen_reg_rtx (insn_mode);
3971
3972 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3973 GEN_INT (MIN (arg1_align, arg2_align)));
3974 }
3975 #endif
3976
3977 if (insn)
3978 {
3979 enum machine_mode mode;
3980 emit_insn (insn);
3981
3982 /* Return the value in the proper mode for this function. */
3983 mode = TYPE_MODE (TREE_TYPE (exp));
3984 if (GET_MODE (result) == mode)
3985 return result;
3986 if (target == 0)
3987 return convert_to_mode (mode, result, 0);
3988 convert_move (target, result, 0);
3989 return target;
3990 }
3991
3992 /* Expand the library call ourselves using a stabilized argument
3993 list to avoid re-evaluating the function's arguments twice. */
3994 #ifdef HAVE_cmpstrnsi
3995 do_libcall:
3996 #endif
3997 fndecl = get_callee_fndecl (exp);
3998 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3999 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4000 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4001 return expand_call (fn, target, target == const0_rtx);
4002 }
4003 #endif
4004 return NULL_RTX;
4005 }
4006
4007 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4008 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4009 the result in TARGET, if convenient. */
4010
4011 static rtx
4012 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4013 ATTRIBUTE_UNUSED enum machine_mode mode)
4014 {
4015 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4016
4017 if (!validate_arglist (exp,
4018 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4019 return NULL_RTX;
4020
4021 /* If c_strlen can determine an expression for one of the string
4022 lengths, and it doesn't have side effects, then emit cmpstrnsi
4023 using length MIN(strlen(string)+1, arg3). */
4024 #ifdef HAVE_cmpstrnsi
4025 if (HAVE_cmpstrnsi)
4026 {
4027 tree len, len1, len2;
4028 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4029 rtx result, insn;
4030 tree fndecl, fn;
4031 tree arg1 = CALL_EXPR_ARG (exp, 0);
4032 tree arg2 = CALL_EXPR_ARG (exp, 1);
4033 tree arg3 = CALL_EXPR_ARG (exp, 2);
4034
4035 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4036 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4037 enum machine_mode insn_mode
4038 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4039
4040 len1 = c_strlen (arg1, 1);
4041 len2 = c_strlen (arg2, 1);
4042
4043 if (len1)
4044 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4045 if (len2)
4046 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4047
4048 /* If we don't have a constant length for the first, use the length
4049 of the second, if we know it. We don't require a constant for
4050 this case; some cost analysis could be done if both are available
4051 but neither is constant. For now, assume they're equally cheap,
4052 unless one has side effects. If both strings have constant lengths,
4053 use the smaller. */
4054
4055 if (!len1)
4056 len = len2;
4057 else if (!len2)
4058 len = len1;
4059 else if (TREE_SIDE_EFFECTS (len1))
4060 len = len2;
4061 else if (TREE_SIDE_EFFECTS (len2))
4062 len = len1;
4063 else if (TREE_CODE (len1) != INTEGER_CST)
4064 len = len2;
4065 else if (TREE_CODE (len2) != INTEGER_CST)
4066 len = len1;
4067 else if (tree_int_cst_lt (len1, len2))
4068 len = len1;
4069 else
4070 len = len2;
4071
4072 /* If both arguments have side effects, we cannot optimize. */
4073 if (!len || TREE_SIDE_EFFECTS (len))
4074 return NULL_RTX;
4075
4076 /* The actual new length parameter is MIN(len,arg3). */
4077 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4078 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4079
4080 /* If we don't have POINTER_TYPE, call the function. */
4081 if (arg1_align == 0 || arg2_align == 0)
4082 return NULL_RTX;
4083
4084 /* Make a place to write the result of the instruction. */
4085 result = target;
4086 if (! (result != 0
4087 && REG_P (result) && GET_MODE (result) == insn_mode
4088 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4089 result = gen_reg_rtx (insn_mode);
4090
4091 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4092 arg1 = builtin_save_expr (arg1);
4093 arg2 = builtin_save_expr (arg2);
4094 len = builtin_save_expr (len);
4095
4096 arg1_rtx = get_memory_rtx (arg1, len);
4097 arg2_rtx = get_memory_rtx (arg2, len);
4098 arg3_rtx = expand_normal (len);
4099 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4100 GEN_INT (MIN (arg1_align, arg2_align)));
4101 if (insn)
4102 {
4103 emit_insn (insn);
4104
4105 /* Return the value in the proper mode for this function. */
4106 mode = TYPE_MODE (TREE_TYPE (exp));
4107 if (GET_MODE (result) == mode)
4108 return result;
4109 if (target == 0)
4110 return convert_to_mode (mode, result, 0);
4111 convert_move (target, result, 0);
4112 return target;
4113 }
4114
4115 /* Expand the library call ourselves using a stabilized argument
4116 list to avoid re-evaluating the function's arguments twice. */
4117 fndecl = get_callee_fndecl (exp);
4118 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4119 arg1, arg2, len);
4120 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4121 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4122 return expand_call (fn, target, target == const0_rtx);
4123 }
4124 #endif
4125 return NULL_RTX;
4126 }
4127
4128 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4129 if that's convenient. */
4130
4131 rtx
4132 expand_builtin_saveregs (void)
4133 {
4134 rtx val, seq;
4135
4136 /* Don't do __builtin_saveregs more than once in a function.
4137 Save the result of the first call and reuse it. */
4138 if (saveregs_value != 0)
4139 return saveregs_value;
4140
4141 /* When this function is called, it means that registers must be
4142 saved on entry to this function. So we migrate the call to the
4143 first insn of this function. */
4144
4145 start_sequence ();
4146
4147 /* Do whatever the machine needs done in this case. */
4148 val = targetm.calls.expand_builtin_saveregs ();
4149
4150 seq = get_insns ();
4151 end_sequence ();
4152
4153 saveregs_value = val;
4154
4155 /* Put the insns after the NOTE that starts the function. If this
4156 is inside a start_sequence, make the outer-level insn chain current, so
4157 the code is placed at the start of the function. */
4158 push_topmost_sequence ();
4159 emit_insn_after (seq, entry_of_function ());
4160 pop_topmost_sequence ();
4161
4162 return val;
4163 }
4164
4165 /* Expand a call to __builtin_next_arg. */
4166
4167 static rtx
4168 expand_builtin_next_arg (void)
4169 {
4170 /* Checking arguments is already done in fold_builtin_next_arg
4171 that must be called before this function. */
4172 return expand_binop (ptr_mode, add_optab,
4173 crtl->args.internal_arg_pointer,
4174 crtl->args.arg_offset_rtx,
4175 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4176 }
4177
4178 /* Make it easier for the backends by protecting the valist argument
4179 from multiple evaluations. */
4180
4181 static tree
4182 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4183 {
4184 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4185
4186 /* The current way of determining the type of valist is completely
4187 bogus. We should have the information on the va builtin instead. */
4188 if (!vatype)
4189 vatype = targetm.fn_abi_va_list (cfun->decl);
4190
4191 if (TREE_CODE (vatype) == ARRAY_TYPE)
4192 {
4193 if (TREE_SIDE_EFFECTS (valist))
4194 valist = save_expr (valist);
4195
4196 /* For this case, the backends will be expecting a pointer to
4197 vatype, but it's possible we've actually been given an array
4198 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4199 So fix it. */
4200 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4201 {
4202 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4203 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4204 }
4205 }
4206 else
4207 {
4208 tree pt = build_pointer_type (vatype);
4209
4210 if (! needs_lvalue)
4211 {
4212 if (! TREE_SIDE_EFFECTS (valist))
4213 return valist;
4214
4215 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4216 TREE_SIDE_EFFECTS (valist) = 1;
4217 }
4218
4219 if (TREE_SIDE_EFFECTS (valist))
4220 valist = save_expr (valist);
4221 valist = fold_build2_loc (loc, MEM_REF,
4222 vatype, valist, build_int_cst (pt, 0));
4223 }
4224
4225 return valist;
4226 }
4227
4228 /* The "standard" definition of va_list is void*. */
4229
4230 tree
4231 std_build_builtin_va_list (void)
4232 {
4233 return ptr_type_node;
4234 }
4235
4236 /* The "standard" abi va_list is va_list_type_node. */
4237
4238 tree
4239 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4240 {
4241 return va_list_type_node;
4242 }
4243
4244 /* The "standard" type of va_list is va_list_type_node. */
4245
4246 tree
4247 std_canonical_va_list_type (tree type)
4248 {
4249 tree wtype, htype;
4250
4251 if (INDIRECT_REF_P (type))
4252 type = TREE_TYPE (type);
4253 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4254 type = TREE_TYPE (type);
4255 wtype = va_list_type_node;
4256 htype = type;
4257 /* Treat structure va_list types. */
4258 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4259 htype = TREE_TYPE (htype);
4260 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4261 {
4262 /* If va_list is an array type, the argument may have decayed
4263 to a pointer type, e.g. by being passed to another function.
4264 In that case, unwrap both types so that we can compare the
4265 underlying records. */
4266 if (TREE_CODE (htype) == ARRAY_TYPE
4267 || POINTER_TYPE_P (htype))
4268 {
4269 wtype = TREE_TYPE (wtype);
4270 htype = TREE_TYPE (htype);
4271 }
4272 }
4273 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4274 return va_list_type_node;
4275
4276 return NULL_TREE;
4277 }
4278
4279 /* The "standard" implementation of va_start: just assign `nextarg' to
4280 the variable. */
4281
4282 void
4283 std_expand_builtin_va_start (tree valist, rtx nextarg)
4284 {
4285 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4286 convert_move (va_r, nextarg, 0);
4287 }
4288
4289 /* Expand EXP, a call to __builtin_va_start. */
4290
4291 static rtx
4292 expand_builtin_va_start (tree exp)
4293 {
4294 rtx nextarg;
4295 tree valist;
4296 location_t loc = EXPR_LOCATION (exp);
4297
4298 if (call_expr_nargs (exp) < 2)
4299 {
4300 error_at (loc, "too few arguments to function %<va_start%>");
4301 return const0_rtx;
4302 }
4303
4304 if (fold_builtin_next_arg (exp, true))
4305 return const0_rtx;
4306
4307 nextarg = expand_builtin_next_arg ();
4308 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4309
4310 if (targetm.expand_builtin_va_start)
4311 targetm.expand_builtin_va_start (valist, nextarg);
4312 else
4313 std_expand_builtin_va_start (valist, nextarg);
4314
4315 return const0_rtx;
4316 }
4317
4318 /* Expand EXP, a call to __builtin_va_end. */
4319
4320 static rtx
4321 expand_builtin_va_end (tree exp)
4322 {
4323 tree valist = CALL_EXPR_ARG (exp, 0);
4324
4325 /* Evaluate for side effects, if needed. I hate macros that don't
4326 do that. */
4327 if (TREE_SIDE_EFFECTS (valist))
4328 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4329
4330 return const0_rtx;
4331 }
4332
4333 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4334 builtin rather than just as an assignment in stdarg.h because of the
4335 nastiness of array-type va_list types. */
4336
4337 static rtx
4338 expand_builtin_va_copy (tree exp)
4339 {
4340 tree dst, src, t;
4341 location_t loc = EXPR_LOCATION (exp);
4342
4343 dst = CALL_EXPR_ARG (exp, 0);
4344 src = CALL_EXPR_ARG (exp, 1);
4345
4346 dst = stabilize_va_list_loc (loc, dst, 1);
4347 src = stabilize_va_list_loc (loc, src, 0);
4348
4349 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4350
4351 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4352 {
4353 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4354 TREE_SIDE_EFFECTS (t) = 1;
4355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4356 }
4357 else
4358 {
4359 rtx dstb, srcb, size;
4360
4361 /* Evaluate to pointers. */
4362 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4363 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4364 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4365 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4366
4367 dstb = convert_memory_address (Pmode, dstb);
4368 srcb = convert_memory_address (Pmode, srcb);
4369
4370 /* "Dereference" to BLKmode memories. */
4371 dstb = gen_rtx_MEM (BLKmode, dstb);
4372 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4373 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4374 srcb = gen_rtx_MEM (BLKmode, srcb);
4375 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4376 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4377
4378 /* Copy. */
4379 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4380 }
4381
4382 return const0_rtx;
4383 }
4384
4385 /* Expand a call to one of the builtin functions __builtin_frame_address or
4386 __builtin_return_address. */
4387
4388 static rtx
4389 expand_builtin_frame_address (tree fndecl, tree exp)
4390 {
4391 /* The argument must be a nonnegative integer constant.
4392 It counts the number of frames to scan up the stack.
4393 The value is the return address saved in that frame. */
4394 if (call_expr_nargs (exp) == 0)
4395 /* Warning about missing arg was already issued. */
4396 return const0_rtx;
4397 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4398 {
4399 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4400 error ("invalid argument to %<__builtin_frame_address%>");
4401 else
4402 error ("invalid argument to %<__builtin_return_address%>");
4403 return const0_rtx;
4404 }
4405 else
4406 {
4407 rtx tem
4408 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4409 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4410
4411 /* Some ports cannot access arbitrary stack frames. */
4412 if (tem == NULL)
4413 {
4414 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4415 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4416 else
4417 warning (0, "unsupported argument to %<__builtin_return_address%>");
4418 return const0_rtx;
4419 }
4420
4421 /* For __builtin_frame_address, return what we've got. */
4422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4423 return tem;
4424
4425 if (!REG_P (tem)
4426 && ! CONSTANT_P (tem))
4427 tem = copy_addr_to_reg (tem);
4428 return tem;
4429 }
4430 }
4431
4432 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4433 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4434 is the same as for allocate_dynamic_stack_space. */
4435
4436 static rtx
4437 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4438 {
4439 rtx op0;
4440 rtx result;
4441 bool valid_arglist;
4442 unsigned int align;
4443 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4444 == BUILT_IN_ALLOCA_WITH_ALIGN);
4445
4446 valid_arglist
4447 = (alloca_with_align
4448 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4449 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4450
4451 if (!valid_arglist)
4452 return NULL_RTX;
4453
4454 /* Compute the argument. */
4455 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4456
4457 /* Compute the alignment. */
4458 align = (alloca_with_align
4459 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4460 : BIGGEST_ALIGNMENT);
4461
4462 /* Allocate the desired space. */
4463 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4464 result = convert_memory_address (ptr_mode, result);
4465
4466 return result;
4467 }
4468
4469 /* Expand a call to bswap builtin in EXP.
4470 Return NULL_RTX if a normal call should be emitted rather than expanding the
4471 function in-line. If convenient, the result should be placed in TARGET.
4472 SUBTARGET may be used as the target for computing one of EXP's operands. */
4473
4474 static rtx
4475 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4476 rtx subtarget)
4477 {
4478 tree arg;
4479 rtx op0;
4480
4481 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4482 return NULL_RTX;
4483
4484 arg = CALL_EXPR_ARG (exp, 0);
4485 op0 = expand_expr (arg,
4486 subtarget && GET_MODE (subtarget) == target_mode
4487 ? subtarget : NULL_RTX,
4488 target_mode, EXPAND_NORMAL);
4489 if (GET_MODE (op0) != target_mode)
4490 op0 = convert_to_mode (target_mode, op0, 1);
4491
4492 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4493
4494 gcc_assert (target);
4495
4496 return convert_to_mode (target_mode, target, 1);
4497 }
4498
4499 /* Expand a call to a unary builtin in EXP.
4500 Return NULL_RTX if a normal call should be emitted rather than expanding the
4501 function in-line. If convenient, the result should be placed in TARGET.
4502 SUBTARGET may be used as the target for computing one of EXP's operands. */
4503
4504 static rtx
4505 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4506 rtx subtarget, optab op_optab)
4507 {
4508 rtx op0;
4509
4510 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4511 return NULL_RTX;
4512
4513 /* Compute the argument. */
4514 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4515 (subtarget
4516 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4517 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4518 VOIDmode, EXPAND_NORMAL);
4519 /* Compute op, into TARGET if possible.
4520 Set TARGET to wherever the result comes back. */
4521 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4522 op_optab, op0, target, op_optab != clrsb_optab);
4523 gcc_assert (target);
4524
4525 return convert_to_mode (target_mode, target, 0);
4526 }
4527
4528 /* Expand a call to __builtin_expect. We just return our argument
4529 as the builtin_expect semantic should've been already executed by
4530 tree branch prediction pass. */
4531
4532 static rtx
4533 expand_builtin_expect (tree exp, rtx target)
4534 {
4535 tree arg;
4536
4537 if (call_expr_nargs (exp) < 2)
4538 return const0_rtx;
4539 arg = CALL_EXPR_ARG (exp, 0);
4540
4541 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4542 /* When guessing was done, the hints should be already stripped away. */
4543 gcc_assert (!flag_guess_branch_prob
4544 || optimize == 0 || seen_error ());
4545 return target;
4546 }
4547
4548 /* Expand a call to __builtin_assume_aligned. We just return our first
4549 argument as the builtin_assume_aligned semantic should've been already
4550 executed by CCP. */
4551
4552 static rtx
4553 expand_builtin_assume_aligned (tree exp, rtx target)
4554 {
4555 if (call_expr_nargs (exp) < 2)
4556 return const0_rtx;
4557 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4558 EXPAND_NORMAL);
4559 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4560 && (call_expr_nargs (exp) < 3
4561 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4562 return target;
4563 }
4564
4565 void
4566 expand_builtin_trap (void)
4567 {
4568 #ifdef HAVE_trap
4569 if (HAVE_trap)
4570 {
4571 rtx insn = emit_insn (gen_trap ());
4572 /* For trap insns when not accumulating outgoing args force
4573 REG_ARGS_SIZE note to prevent crossjumping of calls with
4574 different args sizes. */
4575 if (!ACCUMULATE_OUTGOING_ARGS)
4576 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4577 }
4578 else
4579 #endif
4580 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4581 emit_barrier ();
4582 }
4583
4584 /* Expand a call to __builtin_unreachable. We do nothing except emit
4585 a barrier saying that control flow will not pass here.
4586
4587 It is the responsibility of the program being compiled to ensure
4588 that control flow does never reach __builtin_unreachable. */
4589 static void
4590 expand_builtin_unreachable (void)
4591 {
4592 emit_barrier ();
4593 }
4594
4595 /* Expand EXP, a call to fabs, fabsf or fabsl.
4596 Return NULL_RTX if a normal call should be emitted rather than expanding
4597 the function inline. If convenient, the result should be placed
4598 in TARGET. SUBTARGET may be used as the target for computing
4599 the operand. */
4600
4601 static rtx
4602 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4603 {
4604 enum machine_mode mode;
4605 tree arg;
4606 rtx op0;
4607
4608 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4609 return NULL_RTX;
4610
4611 arg = CALL_EXPR_ARG (exp, 0);
4612 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4613 mode = TYPE_MODE (TREE_TYPE (arg));
4614 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4615 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4616 }
4617
4618 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4619 Return NULL is a normal call should be emitted rather than expanding the
4620 function inline. If convenient, the result should be placed in TARGET.
4621 SUBTARGET may be used as the target for computing the operand. */
4622
4623 static rtx
4624 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4625 {
4626 rtx op0, op1;
4627 tree arg;
4628
4629 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4630 return NULL_RTX;
4631
4632 arg = CALL_EXPR_ARG (exp, 0);
4633 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4634
4635 arg = CALL_EXPR_ARG (exp, 1);
4636 op1 = expand_normal (arg);
4637
4638 return expand_copysign (op0, op1, target);
4639 }
4640
4641 /* Create a new constant string literal and return a char* pointer to it.
4642 The STRING_CST value is the LEN characters at STR. */
4643 tree
4644 build_string_literal (int len, const char *str)
4645 {
4646 tree t, elem, index, type;
4647
4648 t = build_string (len, str);
4649 elem = build_type_variant (char_type_node, 1, 0);
4650 index = build_index_type (size_int (len - 1));
4651 type = build_array_type (elem, index);
4652 TREE_TYPE (t) = type;
4653 TREE_CONSTANT (t) = 1;
4654 TREE_READONLY (t) = 1;
4655 TREE_STATIC (t) = 1;
4656
4657 type = build_pointer_type (elem);
4658 t = build1 (ADDR_EXPR, type,
4659 build4 (ARRAY_REF, elem,
4660 t, integer_zero_node, NULL_TREE, NULL_TREE));
4661 return t;
4662 }
4663
4664 /* Expand a call to __builtin___clear_cache. */
4665
4666 static rtx
4667 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4668 {
4669 #ifndef HAVE_clear_cache
4670 #ifdef CLEAR_INSN_CACHE
4671 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4672 does something. Just do the default expansion to a call to
4673 __clear_cache(). */
4674 return NULL_RTX;
4675 #else
4676 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4677 does nothing. There is no need to call it. Do nothing. */
4678 return const0_rtx;
4679 #endif /* CLEAR_INSN_CACHE */
4680 #else
4681 /* We have a "clear_cache" insn, and it will handle everything. */
4682 tree begin, end;
4683 rtx begin_rtx, end_rtx;
4684
4685 /* We must not expand to a library call. If we did, any
4686 fallback library function in libgcc that might contain a call to
4687 __builtin___clear_cache() would recurse infinitely. */
4688 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4689 {
4690 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4691 return const0_rtx;
4692 }
4693
4694 if (HAVE_clear_cache)
4695 {
4696 struct expand_operand ops[2];
4697
4698 begin = CALL_EXPR_ARG (exp, 0);
4699 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4700
4701 end = CALL_EXPR_ARG (exp, 1);
4702 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4703
4704 create_address_operand (&ops[0], begin_rtx);
4705 create_address_operand (&ops[1], end_rtx);
4706 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4707 return const0_rtx;
4708 }
4709 return const0_rtx;
4710 #endif /* HAVE_clear_cache */
4711 }
4712
4713 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4714
4715 static rtx
4716 round_trampoline_addr (rtx tramp)
4717 {
4718 rtx temp, addend, mask;
4719
4720 /* If we don't need too much alignment, we'll have been guaranteed
4721 proper alignment by get_trampoline_type. */
4722 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4723 return tramp;
4724
4725 /* Round address up to desired boundary. */
4726 temp = gen_reg_rtx (Pmode);
4727 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4728 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4729
4730 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4731 temp, 0, OPTAB_LIB_WIDEN);
4732 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4733 temp, 0, OPTAB_LIB_WIDEN);
4734
4735 return tramp;
4736 }
4737
4738 static rtx
4739 expand_builtin_init_trampoline (tree exp, bool onstack)
4740 {
4741 tree t_tramp, t_func, t_chain;
4742 rtx m_tramp, r_tramp, r_chain, tmp;
4743
4744 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4745 POINTER_TYPE, VOID_TYPE))
4746 return NULL_RTX;
4747
4748 t_tramp = CALL_EXPR_ARG (exp, 0);
4749 t_func = CALL_EXPR_ARG (exp, 1);
4750 t_chain = CALL_EXPR_ARG (exp, 2);
4751
4752 r_tramp = expand_normal (t_tramp);
4753 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4754 MEM_NOTRAP_P (m_tramp) = 1;
4755
4756 /* If ONSTACK, the TRAMP argument should be the address of a field
4757 within the local function's FRAME decl. Either way, let's see if
4758 we can fill in the MEM_ATTRs for this memory. */
4759 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4760 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4761
4762 /* Creator of a heap trampoline is responsible for making sure the
4763 address is aligned to at least STACK_BOUNDARY. Normally malloc
4764 will ensure this anyhow. */
4765 tmp = round_trampoline_addr (r_tramp);
4766 if (tmp != r_tramp)
4767 {
4768 m_tramp = change_address (m_tramp, BLKmode, tmp);
4769 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4770 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4771 }
4772
4773 /* The FUNC argument should be the address of the nested function.
4774 Extract the actual function decl to pass to the hook. */
4775 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4776 t_func = TREE_OPERAND (t_func, 0);
4777 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4778
4779 r_chain = expand_normal (t_chain);
4780
4781 /* Generate insns to initialize the trampoline. */
4782 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4783
4784 if (onstack)
4785 {
4786 trampolines_created = 1;
4787
4788 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4789 "trampoline generated for nested function %qD", t_func);
4790 }
4791
4792 return const0_rtx;
4793 }
4794
4795 static rtx
4796 expand_builtin_adjust_trampoline (tree exp)
4797 {
4798 rtx tramp;
4799
4800 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4801 return NULL_RTX;
4802
4803 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4804 tramp = round_trampoline_addr (tramp);
4805 if (targetm.calls.trampoline_adjust_address)
4806 tramp = targetm.calls.trampoline_adjust_address (tramp);
4807
4808 return tramp;
4809 }
4810
4811 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4812 function. The function first checks whether the back end provides
4813 an insn to implement signbit for the respective mode. If not, it
4814 checks whether the floating point format of the value is such that
4815 the sign bit can be extracted. If that is not the case, the
4816 function returns NULL_RTX to indicate that a normal call should be
4817 emitted rather than expanding the function in-line. EXP is the
4818 expression that is a call to the builtin function; if convenient,
4819 the result should be placed in TARGET. */
4820 static rtx
4821 expand_builtin_signbit (tree exp, rtx target)
4822 {
4823 const struct real_format *fmt;
4824 enum machine_mode fmode, imode, rmode;
4825 tree arg;
4826 int word, bitpos;
4827 enum insn_code icode;
4828 rtx temp;
4829 location_t loc = EXPR_LOCATION (exp);
4830
4831 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4832 return NULL_RTX;
4833
4834 arg = CALL_EXPR_ARG (exp, 0);
4835 fmode = TYPE_MODE (TREE_TYPE (arg));
4836 rmode = TYPE_MODE (TREE_TYPE (exp));
4837 fmt = REAL_MODE_FORMAT (fmode);
4838
4839 arg = builtin_save_expr (arg);
4840
4841 /* Expand the argument yielding a RTX expression. */
4842 temp = expand_normal (arg);
4843
4844 /* Check if the back end provides an insn that handles signbit for the
4845 argument's mode. */
4846 icode = optab_handler (signbit_optab, fmode);
4847 if (icode != CODE_FOR_nothing)
4848 {
4849 rtx last = get_last_insn ();
4850 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4851 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4852 return target;
4853 delete_insns_since (last);
4854 }
4855
4856 /* For floating point formats without a sign bit, implement signbit
4857 as "ARG < 0.0". */
4858 bitpos = fmt->signbit_ro;
4859 if (bitpos < 0)
4860 {
4861 /* But we can't do this if the format supports signed zero. */
4862 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4863 return NULL_RTX;
4864
4865 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4866 build_real (TREE_TYPE (arg), dconst0));
4867 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4868 }
4869
4870 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4871 {
4872 imode = int_mode_for_mode (fmode);
4873 if (imode == BLKmode)
4874 return NULL_RTX;
4875 temp = gen_lowpart (imode, temp);
4876 }
4877 else
4878 {
4879 imode = word_mode;
4880 /* Handle targets with different FP word orders. */
4881 if (FLOAT_WORDS_BIG_ENDIAN)
4882 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4883 else
4884 word = bitpos / BITS_PER_WORD;
4885 temp = operand_subword_force (temp, word, fmode);
4886 bitpos = bitpos % BITS_PER_WORD;
4887 }
4888
4889 /* Force the intermediate word_mode (or narrower) result into a
4890 register. This avoids attempting to create paradoxical SUBREGs
4891 of floating point modes below. */
4892 temp = force_reg (imode, temp);
4893
4894 /* If the bitpos is within the "result mode" lowpart, the operation
4895 can be implement with a single bitwise AND. Otherwise, we need
4896 a right shift and an AND. */
4897
4898 if (bitpos < GET_MODE_BITSIZE (rmode))
4899 {
4900 double_int mask = double_int_zero.set_bit (bitpos);
4901
4902 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4903 temp = gen_lowpart (rmode, temp);
4904 temp = expand_binop (rmode, and_optab, temp,
4905 immed_double_int_const (mask, rmode),
4906 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4907 }
4908 else
4909 {
4910 /* Perform a logical right shift to place the signbit in the least
4911 significant bit, then truncate the result to the desired mode
4912 and mask just this bit. */
4913 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4914 temp = gen_lowpart (rmode, temp);
4915 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4916 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4917 }
4918
4919 return temp;
4920 }
4921
4922 /* Expand fork or exec calls. TARGET is the desired target of the
4923 call. EXP is the call. FN is the
4924 identificator of the actual function. IGNORE is nonzero if the
4925 value is to be ignored. */
4926
4927 static rtx
4928 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4929 {
4930 tree id, decl;
4931 tree call;
4932
4933 /* If we are not profiling, just call the function. */
4934 if (!profile_arc_flag)
4935 return NULL_RTX;
4936
4937 /* Otherwise call the wrapper. This should be equivalent for the rest of
4938 compiler, so the code does not diverge, and the wrapper may run the
4939 code necessary for keeping the profiling sane. */
4940
4941 switch (DECL_FUNCTION_CODE (fn))
4942 {
4943 case BUILT_IN_FORK:
4944 id = get_identifier ("__gcov_fork");
4945 break;
4946
4947 case BUILT_IN_EXECL:
4948 id = get_identifier ("__gcov_execl");
4949 break;
4950
4951 case BUILT_IN_EXECV:
4952 id = get_identifier ("__gcov_execv");
4953 break;
4954
4955 case BUILT_IN_EXECLP:
4956 id = get_identifier ("__gcov_execlp");
4957 break;
4958
4959 case BUILT_IN_EXECLE:
4960 id = get_identifier ("__gcov_execle");
4961 break;
4962
4963 case BUILT_IN_EXECVP:
4964 id = get_identifier ("__gcov_execvp");
4965 break;
4966
4967 case BUILT_IN_EXECVE:
4968 id = get_identifier ("__gcov_execve");
4969 break;
4970
4971 default:
4972 gcc_unreachable ();
4973 }
4974
4975 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4976 FUNCTION_DECL, id, TREE_TYPE (fn));
4977 DECL_EXTERNAL (decl) = 1;
4978 TREE_PUBLIC (decl) = 1;
4979 DECL_ARTIFICIAL (decl) = 1;
4980 TREE_NOTHROW (decl) = 1;
4981 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4982 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4983 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4984 return expand_call (call, target, ignore);
4985 }
4986
4987
4988 \f
4989 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4990 the pointer in these functions is void*, the tree optimizers may remove
4991 casts. The mode computed in expand_builtin isn't reliable either, due
4992 to __sync_bool_compare_and_swap.
4993
4994 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4995 group of builtins. This gives us log2 of the mode size. */
4996
4997 static inline enum machine_mode
4998 get_builtin_sync_mode (int fcode_diff)
4999 {
5000 /* The size is not negotiable, so ask not to get BLKmode in return
5001 if the target indicates that a smaller size would be better. */
5002 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5003 }
5004
5005 /* Expand the memory expression LOC and return the appropriate memory operand
5006 for the builtin_sync operations. */
5007
5008 static rtx
5009 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5010 {
5011 rtx addr, mem;
5012
5013 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5014 addr = convert_memory_address (Pmode, addr);
5015
5016 /* Note that we explicitly do not want any alias information for this
5017 memory, so that we kill all other live memories. Otherwise we don't
5018 satisfy the full barrier semantics of the intrinsic. */
5019 mem = validize_mem (gen_rtx_MEM (mode, addr));
5020
5021 /* The alignment needs to be at least according to that of the mode. */
5022 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5023 get_pointer_alignment (loc)));
5024 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5025 MEM_VOLATILE_P (mem) = 1;
5026
5027 return mem;
5028 }
5029
5030 /* Make sure an argument is in the right mode.
5031 EXP is the tree argument.
5032 MODE is the mode it should be in. */
5033
5034 static rtx
5035 expand_expr_force_mode (tree exp, enum machine_mode mode)
5036 {
5037 rtx val;
5038 enum machine_mode old_mode;
5039
5040 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5041 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5042 of CONST_INTs, where we know the old_mode only from the call argument. */
5043
5044 old_mode = GET_MODE (val);
5045 if (old_mode == VOIDmode)
5046 old_mode = TYPE_MODE (TREE_TYPE (exp));
5047 val = convert_modes (mode, old_mode, val, 1);
5048 return val;
5049 }
5050
5051
5052 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5053 EXP is the CALL_EXPR. CODE is the rtx code
5054 that corresponds to the arithmetic or logical operation from the name;
5055 an exception here is that NOT actually means NAND. TARGET is an optional
5056 place for us to store the results; AFTER is true if this is the
5057 fetch_and_xxx form. */
5058
5059 static rtx
5060 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5061 enum rtx_code code, bool after,
5062 rtx target)
5063 {
5064 rtx val, mem;
5065 location_t loc = EXPR_LOCATION (exp);
5066
5067 if (code == NOT && warn_sync_nand)
5068 {
5069 tree fndecl = get_callee_fndecl (exp);
5070 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5071
5072 static bool warned_f_a_n, warned_n_a_f;
5073
5074 switch (fcode)
5075 {
5076 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5077 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5078 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5079 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5080 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5081 if (warned_f_a_n)
5082 break;
5083
5084 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5085 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5086 warned_f_a_n = true;
5087 break;
5088
5089 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5090 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5091 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5092 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5093 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5094 if (warned_n_a_f)
5095 break;
5096
5097 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5098 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5099 warned_n_a_f = true;
5100 break;
5101
5102 default:
5103 gcc_unreachable ();
5104 }
5105 }
5106
5107 /* Expand the operands. */
5108 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5109 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5110
5111 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5112 after);
5113 }
5114
5115 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5116 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5117 true if this is the boolean form. TARGET is a place for us to store the
5118 results; this is NOT optional if IS_BOOL is true. */
5119
5120 static rtx
5121 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5122 bool is_bool, rtx target)
5123 {
5124 rtx old_val, new_val, mem;
5125 rtx *pbool, *poval;
5126
5127 /* Expand the operands. */
5128 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5129 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5130 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5131
5132 pbool = poval = NULL;
5133 if (target != const0_rtx)
5134 {
5135 if (is_bool)
5136 pbool = &target;
5137 else
5138 poval = &target;
5139 }
5140 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5141 false, MEMMODEL_SEQ_CST,
5142 MEMMODEL_SEQ_CST))
5143 return NULL_RTX;
5144
5145 return target;
5146 }
5147
5148 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5149 general form is actually an atomic exchange, and some targets only
5150 support a reduced form with the second argument being a constant 1.
5151 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5152 the results. */
5153
5154 static rtx
5155 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5156 rtx target)
5157 {
5158 rtx val, mem;
5159
5160 /* Expand the operands. */
5161 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5162 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5163
5164 return expand_sync_lock_test_and_set (target, mem, val);
5165 }
5166
5167 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5168
5169 static void
5170 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5171 {
5172 rtx mem;
5173
5174 /* Expand the operands. */
5175 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5176
5177 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5178 }
5179
5180 /* Given an integer representing an ``enum memmodel'', verify its
5181 correctness and return the memory model enum. */
5182
5183 static enum memmodel
5184 get_memmodel (tree exp)
5185 {
5186 rtx op;
5187 unsigned HOST_WIDE_INT val;
5188
5189 /* If the parameter is not a constant, it's a run time value so we'll just
5190 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5191 if (TREE_CODE (exp) != INTEGER_CST)
5192 return MEMMODEL_SEQ_CST;
5193
5194 op = expand_normal (exp);
5195
5196 val = INTVAL (op);
5197 if (targetm.memmodel_check)
5198 val = targetm.memmodel_check (val);
5199 else if (val & ~MEMMODEL_MASK)
5200 {
5201 warning (OPT_Winvalid_memory_model,
5202 "Unknown architecture specifier in memory model to builtin.");
5203 return MEMMODEL_SEQ_CST;
5204 }
5205
5206 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5207 {
5208 warning (OPT_Winvalid_memory_model,
5209 "invalid memory model argument to builtin");
5210 return MEMMODEL_SEQ_CST;
5211 }
5212
5213 return (enum memmodel) val;
5214 }
5215
5216 /* Expand the __atomic_exchange intrinsic:
5217 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5218 EXP is the CALL_EXPR.
5219 TARGET is an optional place for us to store the results. */
5220
5221 static rtx
5222 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5223 {
5224 rtx val, mem;
5225 enum memmodel model;
5226
5227 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5228 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5229 {
5230 error ("invalid memory model for %<__atomic_exchange%>");
5231 return NULL_RTX;
5232 }
5233
5234 if (!flag_inline_atomics)
5235 return NULL_RTX;
5236
5237 /* Expand the operands. */
5238 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5239 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5240
5241 return expand_atomic_exchange (target, mem, val, model);
5242 }
5243
5244 /* Expand the __atomic_compare_exchange intrinsic:
5245 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5246 TYPE desired, BOOL weak,
5247 enum memmodel success,
5248 enum memmodel failure)
5249 EXP is the CALL_EXPR.
5250 TARGET is an optional place for us to store the results. */
5251
5252 static rtx
5253 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5254 rtx target)
5255 {
5256 rtx expect, desired, mem, oldval;
5257 enum memmodel success, failure;
5258 tree weak;
5259 bool is_weak;
5260
5261 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5262 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5263
5264 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5265 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5266 {
5267 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5268 return NULL_RTX;
5269 }
5270
5271 if (failure > success)
5272 {
5273 error ("failure memory model cannot be stronger than success "
5274 "memory model for %<__atomic_compare_exchange%>");
5275 return NULL_RTX;
5276 }
5277
5278 if (!flag_inline_atomics)
5279 return NULL_RTX;
5280
5281 /* Expand the operands. */
5282 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5283
5284 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5285 expect = convert_memory_address (Pmode, expect);
5286 expect = gen_rtx_MEM (mode, expect);
5287 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5288
5289 weak = CALL_EXPR_ARG (exp, 3);
5290 is_weak = false;
5291 if (tree_fits_shwi_p (weak) && tree_low_cst (weak, 0) != 0)
5292 is_weak = true;
5293
5294 oldval = expect;
5295 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5296 &oldval, mem, oldval, desired,
5297 is_weak, success, failure))
5298 return NULL_RTX;
5299
5300 if (oldval != expect)
5301 emit_move_insn (expect, oldval);
5302
5303 return target;
5304 }
5305
5306 /* Expand the __atomic_load intrinsic:
5307 TYPE __atomic_load (TYPE *object, enum memmodel)
5308 EXP is the CALL_EXPR.
5309 TARGET is an optional place for us to store the results. */
5310
5311 static rtx
5312 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5313 {
5314 rtx mem;
5315 enum memmodel model;
5316
5317 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5318 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5319 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5320 {
5321 error ("invalid memory model for %<__atomic_load%>");
5322 return NULL_RTX;
5323 }
5324
5325 if (!flag_inline_atomics)
5326 return NULL_RTX;
5327
5328 /* Expand the operand. */
5329 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5330
5331 return expand_atomic_load (target, mem, model);
5332 }
5333
5334
5335 /* Expand the __atomic_store intrinsic:
5336 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5339
5340 static rtx
5341 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5342 {
5343 rtx mem, val;
5344 enum memmodel model;
5345
5346 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5347 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5348 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5349 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5350 {
5351 error ("invalid memory model for %<__atomic_store%>");
5352 return NULL_RTX;
5353 }
5354
5355 if (!flag_inline_atomics)
5356 return NULL_RTX;
5357
5358 /* Expand the operands. */
5359 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5360 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5361
5362 return expand_atomic_store (mem, val, model, false);
5363 }
5364
5365 /* Expand the __atomic_fetch_XXX intrinsic:
5366 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results.
5369 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5370 FETCH_AFTER is true if returning the result of the operation.
5371 FETCH_AFTER is false if returning the value before the operation.
5372 IGNORE is true if the result is not used.
5373 EXT_CALL is the correct builtin for an external call if this cannot be
5374 resolved to an instruction sequence. */
5375
5376 static rtx
5377 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5378 enum rtx_code code, bool fetch_after,
5379 bool ignore, enum built_in_function ext_call)
5380 {
5381 rtx val, mem, ret;
5382 enum memmodel model;
5383 tree fndecl;
5384 tree addr;
5385
5386 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5387
5388 /* Expand the operands. */
5389 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5390 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5391
5392 /* Only try generating instructions if inlining is turned on. */
5393 if (flag_inline_atomics)
5394 {
5395 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5396 if (ret)
5397 return ret;
5398 }
5399
5400 /* Return if a different routine isn't needed for the library call. */
5401 if (ext_call == BUILT_IN_NONE)
5402 return NULL_RTX;
5403
5404 /* Change the call to the specified function. */
5405 fndecl = get_callee_fndecl (exp);
5406 addr = CALL_EXPR_FN (exp);
5407 STRIP_NOPS (addr);
5408
5409 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5410 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5411
5412 /* Expand the call here so we can emit trailing code. */
5413 ret = expand_call (exp, target, ignore);
5414
5415 /* Replace the original function just in case it matters. */
5416 TREE_OPERAND (addr, 0) = fndecl;
5417
5418 /* Then issue the arithmetic correction to return the right result. */
5419 if (!ignore)
5420 {
5421 if (code == NOT)
5422 {
5423 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5424 OPTAB_LIB_WIDEN);
5425 ret = expand_simple_unop (mode, NOT, ret, target, true);
5426 }
5427 else
5428 ret = expand_simple_binop (mode, code, ret, val, target, true,
5429 OPTAB_LIB_WIDEN);
5430 }
5431 return ret;
5432 }
5433
5434
5435 #ifndef HAVE_atomic_clear
5436 # define HAVE_atomic_clear 0
5437 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5438 #endif
5439
5440 /* Expand an atomic clear operation.
5441 void _atomic_clear (BOOL *obj, enum memmodel)
5442 EXP is the call expression. */
5443
5444 static rtx
5445 expand_builtin_atomic_clear (tree exp)
5446 {
5447 enum machine_mode mode;
5448 rtx mem, ret;
5449 enum memmodel model;
5450
5451 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5452 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5453 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5454
5455 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5456 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5457 {
5458 error ("invalid memory model for %<__atomic_store%>");
5459 return const0_rtx;
5460 }
5461
5462 if (HAVE_atomic_clear)
5463 {
5464 emit_insn (gen_atomic_clear (mem, model));
5465 return const0_rtx;
5466 }
5467
5468 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5469 Failing that, a store is issued by __atomic_store. The only way this can
5470 fail is if the bool type is larger than a word size. Unlikely, but
5471 handle it anyway for completeness. Assume a single threaded model since
5472 there is no atomic support in this case, and no barriers are required. */
5473 ret = expand_atomic_store (mem, const0_rtx, model, true);
5474 if (!ret)
5475 emit_move_insn (mem, const0_rtx);
5476 return const0_rtx;
5477 }
5478
5479 /* Expand an atomic test_and_set operation.
5480 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5481 EXP is the call expression. */
5482
5483 static rtx
5484 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5485 {
5486 rtx mem;
5487 enum memmodel model;
5488 enum machine_mode mode;
5489
5490 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5491 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5492 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5493
5494 return expand_atomic_test_and_set (target, mem, model);
5495 }
5496
5497
5498 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5499 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5500
5501 static tree
5502 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5503 {
5504 int size;
5505 enum machine_mode mode;
5506 unsigned int mode_align, type_align;
5507
5508 if (TREE_CODE (arg0) != INTEGER_CST)
5509 return NULL_TREE;
5510
5511 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5512 mode = mode_for_size (size, MODE_INT, 0);
5513 mode_align = GET_MODE_ALIGNMENT (mode);
5514
5515 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5516 type_align = mode_align;
5517 else
5518 {
5519 tree ttype = TREE_TYPE (arg1);
5520
5521 /* This function is usually invoked and folded immediately by the front
5522 end before anything else has a chance to look at it. The pointer
5523 parameter at this point is usually cast to a void *, so check for that
5524 and look past the cast. */
5525 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5526 && VOID_TYPE_P (TREE_TYPE (ttype)))
5527 arg1 = TREE_OPERAND (arg1, 0);
5528
5529 ttype = TREE_TYPE (arg1);
5530 gcc_assert (POINTER_TYPE_P (ttype));
5531
5532 /* Get the underlying type of the object. */
5533 ttype = TREE_TYPE (ttype);
5534 type_align = TYPE_ALIGN (ttype);
5535 }
5536
5537 /* If the object has smaller alignment, the the lock free routines cannot
5538 be used. */
5539 if (type_align < mode_align)
5540 return boolean_false_node;
5541
5542 /* Check if a compare_and_swap pattern exists for the mode which represents
5543 the required size. The pattern is not allowed to fail, so the existence
5544 of the pattern indicates support is present. */
5545 if (can_compare_and_swap_p (mode, true))
5546 return boolean_true_node;
5547 else
5548 return boolean_false_node;
5549 }
5550
5551 /* Return true if the parameters to call EXP represent an object which will
5552 always generate lock free instructions. The first argument represents the
5553 size of the object, and the second parameter is a pointer to the object
5554 itself. If NULL is passed for the object, then the result is based on
5555 typical alignment for an object of the specified size. Otherwise return
5556 false. */
5557
5558 static rtx
5559 expand_builtin_atomic_always_lock_free (tree exp)
5560 {
5561 tree size;
5562 tree arg0 = CALL_EXPR_ARG (exp, 0);
5563 tree arg1 = CALL_EXPR_ARG (exp, 1);
5564
5565 if (TREE_CODE (arg0) != INTEGER_CST)
5566 {
5567 error ("non-constant argument 1 to __atomic_always_lock_free");
5568 return const0_rtx;
5569 }
5570
5571 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5572 if (size == boolean_true_node)
5573 return const1_rtx;
5574 return const0_rtx;
5575 }
5576
5577 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5578 is lock free on this architecture. */
5579
5580 static tree
5581 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5582 {
5583 if (!flag_inline_atomics)
5584 return NULL_TREE;
5585
5586 /* If it isn't always lock free, don't generate a result. */
5587 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5588 return boolean_true_node;
5589
5590 return NULL_TREE;
5591 }
5592
5593 /* Return true if the parameters to call EXP represent an object which will
5594 always generate lock free instructions. The first argument represents the
5595 size of the object, and the second parameter is a pointer to the object
5596 itself. If NULL is passed for the object, then the result is based on
5597 typical alignment for an object of the specified size. Otherwise return
5598 NULL*/
5599
5600 static rtx
5601 expand_builtin_atomic_is_lock_free (tree exp)
5602 {
5603 tree size;
5604 tree arg0 = CALL_EXPR_ARG (exp, 0);
5605 tree arg1 = CALL_EXPR_ARG (exp, 1);
5606
5607 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5608 {
5609 error ("non-integer argument 1 to __atomic_is_lock_free");
5610 return NULL_RTX;
5611 }
5612
5613 if (!flag_inline_atomics)
5614 return NULL_RTX;
5615
5616 /* If the value is known at compile time, return the RTX for it. */
5617 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5618 if (size == boolean_true_node)
5619 return const1_rtx;
5620
5621 return NULL_RTX;
5622 }
5623
5624 /* Expand the __atomic_thread_fence intrinsic:
5625 void __atomic_thread_fence (enum memmodel)
5626 EXP is the CALL_EXPR. */
5627
5628 static void
5629 expand_builtin_atomic_thread_fence (tree exp)
5630 {
5631 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5632 expand_mem_thread_fence (model);
5633 }
5634
5635 /* Expand the __atomic_signal_fence intrinsic:
5636 void __atomic_signal_fence (enum memmodel)
5637 EXP is the CALL_EXPR. */
5638
5639 static void
5640 expand_builtin_atomic_signal_fence (tree exp)
5641 {
5642 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5643 expand_mem_signal_fence (model);
5644 }
5645
5646 /* Expand the __sync_synchronize intrinsic. */
5647
5648 static void
5649 expand_builtin_sync_synchronize (void)
5650 {
5651 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5652 }
5653
5654 static rtx
5655 expand_builtin_thread_pointer (tree exp, rtx target)
5656 {
5657 enum insn_code icode;
5658 if (!validate_arglist (exp, VOID_TYPE))
5659 return const0_rtx;
5660 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5661 if (icode != CODE_FOR_nothing)
5662 {
5663 struct expand_operand op;
5664 if (!REG_P (target) || GET_MODE (target) != Pmode)
5665 target = gen_reg_rtx (Pmode);
5666 create_output_operand (&op, target, Pmode);
5667 expand_insn (icode, 1, &op);
5668 return target;
5669 }
5670 error ("__builtin_thread_pointer is not supported on this target");
5671 return const0_rtx;
5672 }
5673
5674 static void
5675 expand_builtin_set_thread_pointer (tree exp)
5676 {
5677 enum insn_code icode;
5678 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5679 return;
5680 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5681 if (icode != CODE_FOR_nothing)
5682 {
5683 struct expand_operand op;
5684 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5685 Pmode, EXPAND_NORMAL);
5686 create_input_operand (&op, val, Pmode);
5687 expand_insn (icode, 1, &op);
5688 return;
5689 }
5690 error ("__builtin_set_thread_pointer is not supported on this target");
5691 }
5692
5693 \f
5694 /* Emit code to restore the current value of stack. */
5695
5696 static void
5697 expand_stack_restore (tree var)
5698 {
5699 rtx prev, sa = expand_normal (var);
5700
5701 sa = convert_memory_address (Pmode, sa);
5702
5703 prev = get_last_insn ();
5704 emit_stack_restore (SAVE_BLOCK, sa);
5705 fixup_args_size_notes (prev, get_last_insn (), 0);
5706 }
5707
5708
5709 /* Emit code to save the current value of stack. */
5710
5711 static rtx
5712 expand_stack_save (void)
5713 {
5714 rtx ret = NULL_RTX;
5715
5716 do_pending_stack_adjust ();
5717 emit_stack_save (SAVE_BLOCK, &ret);
5718 return ret;
5719 }
5720
5721 /* Expand an expression EXP that calls a built-in function,
5722 with result going to TARGET if that's convenient
5723 (and in mode MODE if that's convenient).
5724 SUBTARGET may be used as the target for computing one of EXP's operands.
5725 IGNORE is nonzero if the value is to be ignored. */
5726
5727 rtx
5728 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5729 int ignore)
5730 {
5731 tree fndecl = get_callee_fndecl (exp);
5732 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5733 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5734 int flags;
5735
5736 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5737 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5738
5739 /* When not optimizing, generate calls to library functions for a certain
5740 set of builtins. */
5741 if (!optimize
5742 && !called_as_built_in (fndecl)
5743 && fcode != BUILT_IN_FORK
5744 && fcode != BUILT_IN_EXECL
5745 && fcode != BUILT_IN_EXECV
5746 && fcode != BUILT_IN_EXECLP
5747 && fcode != BUILT_IN_EXECLE
5748 && fcode != BUILT_IN_EXECVP
5749 && fcode != BUILT_IN_EXECVE
5750 && fcode != BUILT_IN_ALLOCA
5751 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5752 && fcode != BUILT_IN_FREE
5753 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5754 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5755 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5756 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5757 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5758 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5759 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5760 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5761 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5762 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5763 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND)
5764 return expand_call (exp, target, ignore);
5765
5766 /* The built-in function expanders test for target == const0_rtx
5767 to determine whether the function's result will be ignored. */
5768 if (ignore)
5769 target = const0_rtx;
5770
5771 /* If the result of a pure or const built-in function is ignored, and
5772 none of its arguments are volatile, we can avoid expanding the
5773 built-in call and just evaluate the arguments for side-effects. */
5774 if (target == const0_rtx
5775 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5776 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5777 {
5778 bool volatilep = false;
5779 tree arg;
5780 call_expr_arg_iterator iter;
5781
5782 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5783 if (TREE_THIS_VOLATILE (arg))
5784 {
5785 volatilep = true;
5786 break;
5787 }
5788
5789 if (! volatilep)
5790 {
5791 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5792 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5793 return const0_rtx;
5794 }
5795 }
5796
5797 switch (fcode)
5798 {
5799 CASE_FLT_FN (BUILT_IN_FABS):
5800 case BUILT_IN_FABSD32:
5801 case BUILT_IN_FABSD64:
5802 case BUILT_IN_FABSD128:
5803 target = expand_builtin_fabs (exp, target, subtarget);
5804 if (target)
5805 return target;
5806 break;
5807
5808 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5809 target = expand_builtin_copysign (exp, target, subtarget);
5810 if (target)
5811 return target;
5812 break;
5813
5814 /* Just do a normal library call if we were unable to fold
5815 the values. */
5816 CASE_FLT_FN (BUILT_IN_CABS):
5817 break;
5818
5819 CASE_FLT_FN (BUILT_IN_EXP):
5820 CASE_FLT_FN (BUILT_IN_EXP10):
5821 CASE_FLT_FN (BUILT_IN_POW10):
5822 CASE_FLT_FN (BUILT_IN_EXP2):
5823 CASE_FLT_FN (BUILT_IN_EXPM1):
5824 CASE_FLT_FN (BUILT_IN_LOGB):
5825 CASE_FLT_FN (BUILT_IN_LOG):
5826 CASE_FLT_FN (BUILT_IN_LOG10):
5827 CASE_FLT_FN (BUILT_IN_LOG2):
5828 CASE_FLT_FN (BUILT_IN_LOG1P):
5829 CASE_FLT_FN (BUILT_IN_TAN):
5830 CASE_FLT_FN (BUILT_IN_ASIN):
5831 CASE_FLT_FN (BUILT_IN_ACOS):
5832 CASE_FLT_FN (BUILT_IN_ATAN):
5833 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5834 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5835 because of possible accuracy problems. */
5836 if (! flag_unsafe_math_optimizations)
5837 break;
5838 CASE_FLT_FN (BUILT_IN_SQRT):
5839 CASE_FLT_FN (BUILT_IN_FLOOR):
5840 CASE_FLT_FN (BUILT_IN_CEIL):
5841 CASE_FLT_FN (BUILT_IN_TRUNC):
5842 CASE_FLT_FN (BUILT_IN_ROUND):
5843 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5844 CASE_FLT_FN (BUILT_IN_RINT):
5845 target = expand_builtin_mathfn (exp, target, subtarget);
5846 if (target)
5847 return target;
5848 break;
5849
5850 CASE_FLT_FN (BUILT_IN_FMA):
5851 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5852 if (target)
5853 return target;
5854 break;
5855
5856 CASE_FLT_FN (BUILT_IN_ILOGB):
5857 if (! flag_unsafe_math_optimizations)
5858 break;
5859 CASE_FLT_FN (BUILT_IN_ISINF):
5860 CASE_FLT_FN (BUILT_IN_FINITE):
5861 case BUILT_IN_ISFINITE:
5862 case BUILT_IN_ISNORMAL:
5863 target = expand_builtin_interclass_mathfn (exp, target);
5864 if (target)
5865 return target;
5866 break;
5867
5868 CASE_FLT_FN (BUILT_IN_ICEIL):
5869 CASE_FLT_FN (BUILT_IN_LCEIL):
5870 CASE_FLT_FN (BUILT_IN_LLCEIL):
5871 CASE_FLT_FN (BUILT_IN_LFLOOR):
5872 CASE_FLT_FN (BUILT_IN_IFLOOR):
5873 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5874 target = expand_builtin_int_roundingfn (exp, target);
5875 if (target)
5876 return target;
5877 break;
5878
5879 CASE_FLT_FN (BUILT_IN_IRINT):
5880 CASE_FLT_FN (BUILT_IN_LRINT):
5881 CASE_FLT_FN (BUILT_IN_LLRINT):
5882 CASE_FLT_FN (BUILT_IN_IROUND):
5883 CASE_FLT_FN (BUILT_IN_LROUND):
5884 CASE_FLT_FN (BUILT_IN_LLROUND):
5885 target = expand_builtin_int_roundingfn_2 (exp, target);
5886 if (target)
5887 return target;
5888 break;
5889
5890 CASE_FLT_FN (BUILT_IN_POWI):
5891 target = expand_builtin_powi (exp, target);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_ATAN2):
5897 CASE_FLT_FN (BUILT_IN_LDEXP):
5898 CASE_FLT_FN (BUILT_IN_SCALB):
5899 CASE_FLT_FN (BUILT_IN_SCALBN):
5900 CASE_FLT_FN (BUILT_IN_SCALBLN):
5901 if (! flag_unsafe_math_optimizations)
5902 break;
5903
5904 CASE_FLT_FN (BUILT_IN_FMOD):
5905 CASE_FLT_FN (BUILT_IN_REMAINDER):
5906 CASE_FLT_FN (BUILT_IN_DREM):
5907 CASE_FLT_FN (BUILT_IN_POW):
5908 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5909 if (target)
5910 return target;
5911 break;
5912
5913 CASE_FLT_FN (BUILT_IN_CEXPI):
5914 target = expand_builtin_cexpi (exp, target);
5915 gcc_assert (target);
5916 return target;
5917
5918 CASE_FLT_FN (BUILT_IN_SIN):
5919 CASE_FLT_FN (BUILT_IN_COS):
5920 if (! flag_unsafe_math_optimizations)
5921 break;
5922 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5923 if (target)
5924 return target;
5925 break;
5926
5927 CASE_FLT_FN (BUILT_IN_SINCOS):
5928 if (! flag_unsafe_math_optimizations)
5929 break;
5930 target = expand_builtin_sincos (exp);
5931 if (target)
5932 return target;
5933 break;
5934
5935 case BUILT_IN_APPLY_ARGS:
5936 return expand_builtin_apply_args ();
5937
5938 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5939 FUNCTION with a copy of the parameters described by
5940 ARGUMENTS, and ARGSIZE. It returns a block of memory
5941 allocated on the stack into which is stored all the registers
5942 that might possibly be used for returning the result of a
5943 function. ARGUMENTS is the value returned by
5944 __builtin_apply_args. ARGSIZE is the number of bytes of
5945 arguments that must be copied. ??? How should this value be
5946 computed? We'll also need a safe worst case value for varargs
5947 functions. */
5948 case BUILT_IN_APPLY:
5949 if (!validate_arglist (exp, POINTER_TYPE,
5950 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5951 && !validate_arglist (exp, REFERENCE_TYPE,
5952 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5953 return const0_rtx;
5954 else
5955 {
5956 rtx ops[3];
5957
5958 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5959 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5960 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5961
5962 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5963 }
5964
5965 /* __builtin_return (RESULT) causes the function to return the
5966 value described by RESULT. RESULT is address of the block of
5967 memory returned by __builtin_apply. */
5968 case BUILT_IN_RETURN:
5969 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5970 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5971 return const0_rtx;
5972
5973 case BUILT_IN_SAVEREGS:
5974 return expand_builtin_saveregs ();
5975
5976 case BUILT_IN_VA_ARG_PACK:
5977 /* All valid uses of __builtin_va_arg_pack () are removed during
5978 inlining. */
5979 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5980 return const0_rtx;
5981
5982 case BUILT_IN_VA_ARG_PACK_LEN:
5983 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5984 inlining. */
5985 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5986 return const0_rtx;
5987
5988 /* Return the address of the first anonymous stack arg. */
5989 case BUILT_IN_NEXT_ARG:
5990 if (fold_builtin_next_arg (exp, false))
5991 return const0_rtx;
5992 return expand_builtin_next_arg ();
5993
5994 case BUILT_IN_CLEAR_CACHE:
5995 target = expand_builtin___clear_cache (exp);
5996 if (target)
5997 return target;
5998 break;
5999
6000 case BUILT_IN_CLASSIFY_TYPE:
6001 return expand_builtin_classify_type (exp);
6002
6003 case BUILT_IN_CONSTANT_P:
6004 return const0_rtx;
6005
6006 case BUILT_IN_FRAME_ADDRESS:
6007 case BUILT_IN_RETURN_ADDRESS:
6008 return expand_builtin_frame_address (fndecl, exp);
6009
6010 /* Returns the address of the area where the structure is returned.
6011 0 otherwise. */
6012 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6013 if (call_expr_nargs (exp) != 0
6014 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6015 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6016 return const0_rtx;
6017 else
6018 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6019
6020 case BUILT_IN_ALLOCA:
6021 case BUILT_IN_ALLOCA_WITH_ALIGN:
6022 /* If the allocation stems from the declaration of a variable-sized
6023 object, it cannot accumulate. */
6024 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6025 if (target)
6026 return target;
6027 break;
6028
6029 case BUILT_IN_STACK_SAVE:
6030 return expand_stack_save ();
6031
6032 case BUILT_IN_STACK_RESTORE:
6033 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6034 return const0_rtx;
6035
6036 case BUILT_IN_BSWAP16:
6037 case BUILT_IN_BSWAP32:
6038 case BUILT_IN_BSWAP64:
6039 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6040 if (target)
6041 return target;
6042 break;
6043
6044 CASE_INT_FN (BUILT_IN_FFS):
6045 target = expand_builtin_unop (target_mode, exp, target,
6046 subtarget, ffs_optab);
6047 if (target)
6048 return target;
6049 break;
6050
6051 CASE_INT_FN (BUILT_IN_CLZ):
6052 target = expand_builtin_unop (target_mode, exp, target,
6053 subtarget, clz_optab);
6054 if (target)
6055 return target;
6056 break;
6057
6058 CASE_INT_FN (BUILT_IN_CTZ):
6059 target = expand_builtin_unop (target_mode, exp, target,
6060 subtarget, ctz_optab);
6061 if (target)
6062 return target;
6063 break;
6064
6065 CASE_INT_FN (BUILT_IN_CLRSB):
6066 target = expand_builtin_unop (target_mode, exp, target,
6067 subtarget, clrsb_optab);
6068 if (target)
6069 return target;
6070 break;
6071
6072 CASE_INT_FN (BUILT_IN_POPCOUNT):
6073 target = expand_builtin_unop (target_mode, exp, target,
6074 subtarget, popcount_optab);
6075 if (target)
6076 return target;
6077 break;
6078
6079 CASE_INT_FN (BUILT_IN_PARITY):
6080 target = expand_builtin_unop (target_mode, exp, target,
6081 subtarget, parity_optab);
6082 if (target)
6083 return target;
6084 break;
6085
6086 case BUILT_IN_STRLEN:
6087 target = expand_builtin_strlen (exp, target, target_mode);
6088 if (target)
6089 return target;
6090 break;
6091
6092 case BUILT_IN_STRCPY:
6093 target = expand_builtin_strcpy (exp, target);
6094 if (target)
6095 return target;
6096 break;
6097
6098 case BUILT_IN_STRNCPY:
6099 target = expand_builtin_strncpy (exp, target);
6100 if (target)
6101 return target;
6102 break;
6103
6104 case BUILT_IN_STPCPY:
6105 target = expand_builtin_stpcpy (exp, target, mode);
6106 if (target)
6107 return target;
6108 break;
6109
6110 case BUILT_IN_MEMCPY:
6111 target = expand_builtin_memcpy (exp, target);
6112 if (target)
6113 return target;
6114 break;
6115
6116 case BUILT_IN_MEMPCPY:
6117 target = expand_builtin_mempcpy (exp, target, mode);
6118 if (target)
6119 return target;
6120 break;
6121
6122 case BUILT_IN_MEMSET:
6123 target = expand_builtin_memset (exp, target, mode);
6124 if (target)
6125 return target;
6126 break;
6127
6128 case BUILT_IN_BZERO:
6129 target = expand_builtin_bzero (exp);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_STRCMP:
6135 target = expand_builtin_strcmp (exp, target);
6136 if (target)
6137 return target;
6138 break;
6139
6140 case BUILT_IN_STRNCMP:
6141 target = expand_builtin_strncmp (exp, target, mode);
6142 if (target)
6143 return target;
6144 break;
6145
6146 case BUILT_IN_BCMP:
6147 case BUILT_IN_MEMCMP:
6148 target = expand_builtin_memcmp (exp, target, mode);
6149 if (target)
6150 return target;
6151 break;
6152
6153 case BUILT_IN_SETJMP:
6154 /* This should have been lowered to the builtins below. */
6155 gcc_unreachable ();
6156
6157 case BUILT_IN_SETJMP_SETUP:
6158 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6159 and the receiver label. */
6160 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6161 {
6162 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6163 VOIDmode, EXPAND_NORMAL);
6164 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6165 rtx label_r = label_rtx (label);
6166
6167 /* This is copied from the handling of non-local gotos. */
6168 expand_builtin_setjmp_setup (buf_addr, label_r);
6169 nonlocal_goto_handler_labels
6170 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6171 nonlocal_goto_handler_labels);
6172 /* ??? Do not let expand_label treat us as such since we would
6173 not want to be both on the list of non-local labels and on
6174 the list of forced labels. */
6175 FORCED_LABEL (label) = 0;
6176 return const0_rtx;
6177 }
6178 break;
6179
6180 case BUILT_IN_SETJMP_DISPATCHER:
6181 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6182 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6183 {
6184 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6185 rtx label_r = label_rtx (label);
6186
6187 /* Remove the dispatcher label from the list of non-local labels
6188 since the receiver labels have been added to it above. */
6189 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6190 return const0_rtx;
6191 }
6192 break;
6193
6194 case BUILT_IN_SETJMP_RECEIVER:
6195 /* __builtin_setjmp_receiver is passed the receiver label. */
6196 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6197 {
6198 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6199 rtx label_r = label_rtx (label);
6200
6201 expand_builtin_setjmp_receiver (label_r);
6202 return const0_rtx;
6203 }
6204 break;
6205
6206 /* __builtin_longjmp is passed a pointer to an array of five words.
6207 It's similar to the C library longjmp function but works with
6208 __builtin_setjmp above. */
6209 case BUILT_IN_LONGJMP:
6210 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6211 {
6212 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6213 VOIDmode, EXPAND_NORMAL);
6214 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6215
6216 if (value != const1_rtx)
6217 {
6218 error ("%<__builtin_longjmp%> second argument must be 1");
6219 return const0_rtx;
6220 }
6221
6222 expand_builtin_longjmp (buf_addr, value);
6223 return const0_rtx;
6224 }
6225 break;
6226
6227 case BUILT_IN_NONLOCAL_GOTO:
6228 target = expand_builtin_nonlocal_goto (exp);
6229 if (target)
6230 return target;
6231 break;
6232
6233 /* This updates the setjmp buffer that is its argument with the value
6234 of the current stack pointer. */
6235 case BUILT_IN_UPDATE_SETJMP_BUF:
6236 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6237 {
6238 rtx buf_addr
6239 = expand_normal (CALL_EXPR_ARG (exp, 0));
6240
6241 expand_builtin_update_setjmp_buf (buf_addr);
6242 return const0_rtx;
6243 }
6244 break;
6245
6246 case BUILT_IN_TRAP:
6247 expand_builtin_trap ();
6248 return const0_rtx;
6249
6250 case BUILT_IN_UNREACHABLE:
6251 expand_builtin_unreachable ();
6252 return const0_rtx;
6253
6254 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6255 case BUILT_IN_SIGNBITD32:
6256 case BUILT_IN_SIGNBITD64:
6257 case BUILT_IN_SIGNBITD128:
6258 target = expand_builtin_signbit (exp, target);
6259 if (target)
6260 return target;
6261 break;
6262
6263 /* Various hooks for the DWARF 2 __throw routine. */
6264 case BUILT_IN_UNWIND_INIT:
6265 expand_builtin_unwind_init ();
6266 return const0_rtx;
6267 case BUILT_IN_DWARF_CFA:
6268 return virtual_cfa_rtx;
6269 #ifdef DWARF2_UNWIND_INFO
6270 case BUILT_IN_DWARF_SP_COLUMN:
6271 return expand_builtin_dwarf_sp_column ();
6272 case BUILT_IN_INIT_DWARF_REG_SIZES:
6273 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6274 return const0_rtx;
6275 #endif
6276 case BUILT_IN_FROB_RETURN_ADDR:
6277 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6278 case BUILT_IN_EXTRACT_RETURN_ADDR:
6279 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6280 case BUILT_IN_EH_RETURN:
6281 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6282 CALL_EXPR_ARG (exp, 1));
6283 return const0_rtx;
6284 #ifdef EH_RETURN_DATA_REGNO
6285 case BUILT_IN_EH_RETURN_DATA_REGNO:
6286 return expand_builtin_eh_return_data_regno (exp);
6287 #endif
6288 case BUILT_IN_EXTEND_POINTER:
6289 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6290 case BUILT_IN_EH_POINTER:
6291 return expand_builtin_eh_pointer (exp);
6292 case BUILT_IN_EH_FILTER:
6293 return expand_builtin_eh_filter (exp);
6294 case BUILT_IN_EH_COPY_VALUES:
6295 return expand_builtin_eh_copy_values (exp);
6296
6297 case BUILT_IN_VA_START:
6298 return expand_builtin_va_start (exp);
6299 case BUILT_IN_VA_END:
6300 return expand_builtin_va_end (exp);
6301 case BUILT_IN_VA_COPY:
6302 return expand_builtin_va_copy (exp);
6303 case BUILT_IN_EXPECT:
6304 return expand_builtin_expect (exp, target);
6305 case BUILT_IN_ASSUME_ALIGNED:
6306 return expand_builtin_assume_aligned (exp, target);
6307 case BUILT_IN_PREFETCH:
6308 expand_builtin_prefetch (exp);
6309 return const0_rtx;
6310
6311 case BUILT_IN_INIT_TRAMPOLINE:
6312 return expand_builtin_init_trampoline (exp, true);
6313 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6314 return expand_builtin_init_trampoline (exp, false);
6315 case BUILT_IN_ADJUST_TRAMPOLINE:
6316 return expand_builtin_adjust_trampoline (exp);
6317
6318 case BUILT_IN_FORK:
6319 case BUILT_IN_EXECL:
6320 case BUILT_IN_EXECV:
6321 case BUILT_IN_EXECLP:
6322 case BUILT_IN_EXECLE:
6323 case BUILT_IN_EXECVP:
6324 case BUILT_IN_EXECVE:
6325 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6331 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6332 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6333 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6334 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6336 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6342 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6343 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6344 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6345 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6347 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6348 if (target)
6349 return target;
6350 break;
6351
6352 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6353 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6354 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6355 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6356 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6358 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6364 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6365 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6366 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6367 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6369 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6375 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6376 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6377 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6378 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6380 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6381 if (target)
6382 return target;
6383 break;
6384
6385 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6386 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6387 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6388 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6389 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6390 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6391 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6392 if (target)
6393 return target;
6394 break;
6395
6396 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6397 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6398 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6399 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6400 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6402 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6403 if (target)
6404 return target;
6405 break;
6406
6407 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6408 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6409 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6410 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6411 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6413 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6414 if (target)
6415 return target;
6416 break;
6417
6418 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6419 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6420 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6421 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6422 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6423 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6424 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6425 if (target)
6426 return target;
6427 break;
6428
6429 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6430 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6431 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6432 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6433 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6435 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6436 if (target)
6437 return target;
6438 break;
6439
6440 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6441 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6442 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6443 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6444 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6445 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6446 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6447 if (target)
6448 return target;
6449 break;
6450
6451 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6452 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6453 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6454 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6455 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6456 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6457 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6458 if (target)
6459 return target;
6460 break;
6461
6462 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6463 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6464 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6465 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6466 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6467 if (mode == VOIDmode)
6468 mode = TYPE_MODE (boolean_type_node);
6469 if (!target || !register_operand (target, mode))
6470 target = gen_reg_rtx (mode);
6471
6472 mode = get_builtin_sync_mode
6473 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6474 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6475 if (target)
6476 return target;
6477 break;
6478
6479 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6480 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6481 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6482 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6483 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6484 mode = get_builtin_sync_mode
6485 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6486 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6492 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6493 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6494 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6495 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6497 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6503 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6504 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6505 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6506 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6508 expand_builtin_sync_lock_release (mode, exp);
6509 return const0_rtx;
6510
6511 case BUILT_IN_SYNC_SYNCHRONIZE:
6512 expand_builtin_sync_synchronize ();
6513 return const0_rtx;
6514
6515 case BUILT_IN_ATOMIC_EXCHANGE_1:
6516 case BUILT_IN_ATOMIC_EXCHANGE_2:
6517 case BUILT_IN_ATOMIC_EXCHANGE_4:
6518 case BUILT_IN_ATOMIC_EXCHANGE_8:
6519 case BUILT_IN_ATOMIC_EXCHANGE_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6521 target = expand_builtin_atomic_exchange (mode, exp, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6527 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6528 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6529 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6530 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6531 {
6532 unsigned int nargs, z;
6533 vec<tree, va_gc> *vec;
6534
6535 mode =
6536 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6537 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6538 if (target)
6539 return target;
6540
6541 /* If this is turned into an external library call, the weak parameter
6542 must be dropped to match the expected parameter list. */
6543 nargs = call_expr_nargs (exp);
6544 vec_alloc (vec, nargs - 1);
6545 for (z = 0; z < 3; z++)
6546 vec->quick_push (CALL_EXPR_ARG (exp, z));
6547 /* Skip the boolean weak parameter. */
6548 for (z = 4; z < 6; z++)
6549 vec->quick_push (CALL_EXPR_ARG (exp, z));
6550 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6551 break;
6552 }
6553
6554 case BUILT_IN_ATOMIC_LOAD_1:
6555 case BUILT_IN_ATOMIC_LOAD_2:
6556 case BUILT_IN_ATOMIC_LOAD_4:
6557 case BUILT_IN_ATOMIC_LOAD_8:
6558 case BUILT_IN_ATOMIC_LOAD_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6560 target = expand_builtin_atomic_load (mode, exp, target);
6561 if (target)
6562 return target;
6563 break;
6564
6565 case BUILT_IN_ATOMIC_STORE_1:
6566 case BUILT_IN_ATOMIC_STORE_2:
6567 case BUILT_IN_ATOMIC_STORE_4:
6568 case BUILT_IN_ATOMIC_STORE_8:
6569 case BUILT_IN_ATOMIC_STORE_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6571 target = expand_builtin_atomic_store (mode, exp);
6572 if (target)
6573 return const0_rtx;
6574 break;
6575
6576 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6577 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6578 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6579 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6580 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6581 {
6582 enum built_in_function lib;
6583 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6584 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6585 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6586 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6587 ignore, lib);
6588 if (target)
6589 return target;
6590 break;
6591 }
6592 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6593 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6594 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6595 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6596 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6597 {
6598 enum built_in_function lib;
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6600 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6601 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6602 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6603 ignore, lib);
6604 if (target)
6605 return target;
6606 break;
6607 }
6608 case BUILT_IN_ATOMIC_AND_FETCH_1:
6609 case BUILT_IN_ATOMIC_AND_FETCH_2:
6610 case BUILT_IN_ATOMIC_AND_FETCH_4:
6611 case BUILT_IN_ATOMIC_AND_FETCH_8:
6612 case BUILT_IN_ATOMIC_AND_FETCH_16:
6613 {
6614 enum built_in_function lib;
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6616 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6617 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6618 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6619 ignore, lib);
6620 if (target)
6621 return target;
6622 break;
6623 }
6624 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6625 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6626 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6627 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6628 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6629 {
6630 enum built_in_function lib;
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6632 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6633 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6634 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6635 ignore, lib);
6636 if (target)
6637 return target;
6638 break;
6639 }
6640 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6641 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6642 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6643 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6644 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6645 {
6646 enum built_in_function lib;
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6648 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6649 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6650 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6651 ignore, lib);
6652 if (target)
6653 return target;
6654 break;
6655 }
6656 case BUILT_IN_ATOMIC_OR_FETCH_1:
6657 case BUILT_IN_ATOMIC_OR_FETCH_2:
6658 case BUILT_IN_ATOMIC_OR_FETCH_4:
6659 case BUILT_IN_ATOMIC_OR_FETCH_8:
6660 case BUILT_IN_ATOMIC_OR_FETCH_16:
6661 {
6662 enum built_in_function lib;
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6664 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6665 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6666 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6667 ignore, lib);
6668 if (target)
6669 return target;
6670 break;
6671 }
6672 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6673 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6674 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6675 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6676 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6677 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6678 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6679 ignore, BUILT_IN_NONE);
6680 if (target)
6681 return target;
6682 break;
6683
6684 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6685 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6686 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6687 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6688 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6690 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6691 ignore, BUILT_IN_NONE);
6692 if (target)
6693 return target;
6694 break;
6695
6696 case BUILT_IN_ATOMIC_FETCH_AND_1:
6697 case BUILT_IN_ATOMIC_FETCH_AND_2:
6698 case BUILT_IN_ATOMIC_FETCH_AND_4:
6699 case BUILT_IN_ATOMIC_FETCH_AND_8:
6700 case BUILT_IN_ATOMIC_FETCH_AND_16:
6701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6702 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6703 ignore, BUILT_IN_NONE);
6704 if (target)
6705 return target;
6706 break;
6707
6708 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6709 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6710 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6711 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6712 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6713 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6714 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6715 ignore, BUILT_IN_NONE);
6716 if (target)
6717 return target;
6718 break;
6719
6720 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6721 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6722 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6723 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6724 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6726 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6727 ignore, BUILT_IN_NONE);
6728 if (target)
6729 return target;
6730 break;
6731
6732 case BUILT_IN_ATOMIC_FETCH_OR_1:
6733 case BUILT_IN_ATOMIC_FETCH_OR_2:
6734 case BUILT_IN_ATOMIC_FETCH_OR_4:
6735 case BUILT_IN_ATOMIC_FETCH_OR_8:
6736 case BUILT_IN_ATOMIC_FETCH_OR_16:
6737 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6738 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6739 ignore, BUILT_IN_NONE);
6740 if (target)
6741 return target;
6742 break;
6743
6744 case BUILT_IN_ATOMIC_TEST_AND_SET:
6745 return expand_builtin_atomic_test_and_set (exp, target);
6746
6747 case BUILT_IN_ATOMIC_CLEAR:
6748 return expand_builtin_atomic_clear (exp);
6749
6750 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6751 return expand_builtin_atomic_always_lock_free (exp);
6752
6753 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6754 target = expand_builtin_atomic_is_lock_free (exp);
6755 if (target)
6756 return target;
6757 break;
6758
6759 case BUILT_IN_ATOMIC_THREAD_FENCE:
6760 expand_builtin_atomic_thread_fence (exp);
6761 return const0_rtx;
6762
6763 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6764 expand_builtin_atomic_signal_fence (exp);
6765 return const0_rtx;
6766
6767 case BUILT_IN_OBJECT_SIZE:
6768 return expand_builtin_object_size (exp);
6769
6770 case BUILT_IN_MEMCPY_CHK:
6771 case BUILT_IN_MEMPCPY_CHK:
6772 case BUILT_IN_MEMMOVE_CHK:
6773 case BUILT_IN_MEMSET_CHK:
6774 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6775 if (target)
6776 return target;
6777 break;
6778
6779 case BUILT_IN_STRCPY_CHK:
6780 case BUILT_IN_STPCPY_CHK:
6781 case BUILT_IN_STRNCPY_CHK:
6782 case BUILT_IN_STPNCPY_CHK:
6783 case BUILT_IN_STRCAT_CHK:
6784 case BUILT_IN_STRNCAT_CHK:
6785 case BUILT_IN_SNPRINTF_CHK:
6786 case BUILT_IN_VSNPRINTF_CHK:
6787 maybe_emit_chk_warning (exp, fcode);
6788 break;
6789
6790 case BUILT_IN_SPRINTF_CHK:
6791 case BUILT_IN_VSPRINTF_CHK:
6792 maybe_emit_sprintf_chk_warning (exp, fcode);
6793 break;
6794
6795 case BUILT_IN_FREE:
6796 if (warn_free_nonheap_object)
6797 maybe_emit_free_warning (exp);
6798 break;
6799
6800 case BUILT_IN_THREAD_POINTER:
6801 return expand_builtin_thread_pointer (exp, target);
6802
6803 case BUILT_IN_SET_THREAD_POINTER:
6804 expand_builtin_set_thread_pointer (exp);
6805 return const0_rtx;
6806
6807 case BUILT_IN_CILK_DETACH:
6808 expand_builtin_cilk_detach (exp);
6809 return const0_rtx;
6810
6811 case BUILT_IN_CILK_POP_FRAME:
6812 expand_builtin_cilk_pop_frame (exp);
6813 return const0_rtx;
6814
6815 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6816 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6817 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6818 return expand_normal (CALL_EXPR_ARG (exp, 0));
6819
6820 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6821 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6822 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6823 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6824 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6825 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6826 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6827 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6828 /* We allow user CHKP builtins if Pointer Bounds
6829 Checker is off. */
6830 if (!flag_check_pointer_bounds)
6831 {
6832 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6833 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
6834 return expand_normal (CALL_EXPR_ARG (exp, 0));
6835 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6836 return expand_normal (size_zero_node);
6837 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6838 return expand_normal (size_int (-1));
6839 else
6840 return const0_rtx;
6841 }
6842 /* FALLTHROUGH */
6843
6844 case BUILT_IN_CHKP_BNDMK:
6845 case BUILT_IN_CHKP_BNDSTX:
6846 case BUILT_IN_CHKP_BNDCL:
6847 case BUILT_IN_CHKP_BNDCU:
6848 case BUILT_IN_CHKP_BNDLDX:
6849 case BUILT_IN_CHKP_BNDRET:
6850 case BUILT_IN_CHKP_INTERSECT:
6851 case BUILT_IN_CHKP_ARG_BND:
6852 case BUILT_IN_CHKP_NARROW:
6853 case BUILT_IN_CHKP_EXTRACT_LOWER:
6854 case BUILT_IN_CHKP_EXTRACT_UPPER:
6855 /* Software implementation of pointers checker is NYI.
6856 Target support is required. */
6857 error ("Your target platform does not support -fcheck-pointers");
6858 break;
6859
6860 default: /* just do library call, if unknown builtin */
6861 break;
6862 }
6863
6864 /* The switch statement above can drop through to cause the function
6865 to be called normally. */
6866 return expand_call (exp, target, ignore);
6867 }
6868
6869 /* Determine whether a tree node represents a call to a built-in
6870 function. If the tree T is a call to a built-in function with
6871 the right number of arguments of the appropriate types, return
6872 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6873 Otherwise the return value is END_BUILTINS. */
6874
6875 enum built_in_function
6876 builtin_mathfn_code (const_tree t)
6877 {
6878 const_tree fndecl, arg, parmlist;
6879 const_tree argtype, parmtype;
6880 const_call_expr_arg_iterator iter;
6881
6882 if (TREE_CODE (t) != CALL_EXPR
6883 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6884 return END_BUILTINS;
6885
6886 fndecl = get_callee_fndecl (t);
6887 if (fndecl == NULL_TREE
6888 || TREE_CODE (fndecl) != FUNCTION_DECL
6889 || ! DECL_BUILT_IN (fndecl)
6890 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6891 return END_BUILTINS;
6892
6893 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6894 init_const_call_expr_arg_iterator (t, &iter);
6895 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6896 {
6897 /* If a function doesn't take a variable number of arguments,
6898 the last element in the list will have type `void'. */
6899 parmtype = TREE_VALUE (parmlist);
6900 if (VOID_TYPE_P (parmtype))
6901 {
6902 if (more_const_call_expr_args_p (&iter))
6903 return END_BUILTINS;
6904 return DECL_FUNCTION_CODE (fndecl);
6905 }
6906
6907 if (! more_const_call_expr_args_p (&iter))
6908 return END_BUILTINS;
6909
6910 arg = next_const_call_expr_arg (&iter);
6911 argtype = TREE_TYPE (arg);
6912
6913 if (SCALAR_FLOAT_TYPE_P (parmtype))
6914 {
6915 if (! SCALAR_FLOAT_TYPE_P (argtype))
6916 return END_BUILTINS;
6917 }
6918 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6919 {
6920 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6921 return END_BUILTINS;
6922 }
6923 else if (POINTER_TYPE_P (parmtype))
6924 {
6925 if (! POINTER_TYPE_P (argtype))
6926 return END_BUILTINS;
6927 }
6928 else if (INTEGRAL_TYPE_P (parmtype))
6929 {
6930 if (! INTEGRAL_TYPE_P (argtype))
6931 return END_BUILTINS;
6932 }
6933 else
6934 return END_BUILTINS;
6935 }
6936
6937 /* Variable-length argument list. */
6938 return DECL_FUNCTION_CODE (fndecl);
6939 }
6940
6941 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6942 evaluate to a constant. */
6943
6944 static tree
6945 fold_builtin_constant_p (tree arg)
6946 {
6947 /* We return 1 for a numeric type that's known to be a constant
6948 value at compile-time or for an aggregate type that's a
6949 literal constant. */
6950 STRIP_NOPS (arg);
6951
6952 /* If we know this is a constant, emit the constant of one. */
6953 if (CONSTANT_CLASS_P (arg)
6954 || (TREE_CODE (arg) == CONSTRUCTOR
6955 && TREE_CONSTANT (arg)))
6956 return integer_one_node;
6957 if (TREE_CODE (arg) == ADDR_EXPR)
6958 {
6959 tree op = TREE_OPERAND (arg, 0);
6960 if (TREE_CODE (op) == STRING_CST
6961 || (TREE_CODE (op) == ARRAY_REF
6962 && integer_zerop (TREE_OPERAND (op, 1))
6963 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6964 return integer_one_node;
6965 }
6966
6967 /* If this expression has side effects, show we don't know it to be a
6968 constant. Likewise if it's a pointer or aggregate type since in
6969 those case we only want literals, since those are only optimized
6970 when generating RTL, not later.
6971 And finally, if we are compiling an initializer, not code, we
6972 need to return a definite result now; there's not going to be any
6973 more optimization done. */
6974 if (TREE_SIDE_EFFECTS (arg)
6975 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6976 || POINTER_TYPE_P (TREE_TYPE (arg))
6977 || cfun == 0
6978 || folding_initializer
6979 || force_folding_builtin_constant_p)
6980 return integer_zero_node;
6981
6982 return NULL_TREE;
6983 }
6984
6985 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6986 return it as a truthvalue. */
6987
6988 static tree
6989 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6990 {
6991 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6992
6993 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6994 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6995 ret_type = TREE_TYPE (TREE_TYPE (fn));
6996 pred_type = TREE_VALUE (arg_types);
6997 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6998
6999 pred = fold_convert_loc (loc, pred_type, pred);
7000 expected = fold_convert_loc (loc, expected_type, expected);
7001 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7002
7003 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7004 build_int_cst (ret_type, 0));
7005 }
7006
7007 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7008 NULL_TREE if no simplification is possible. */
7009
7010 static tree
7011 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7012 {
7013 tree inner, fndecl, inner_arg0;
7014 enum tree_code code;
7015
7016 /* Distribute the expected value over short-circuiting operators.
7017 See through the cast from truthvalue_type_node to long. */
7018 inner_arg0 = arg0;
7019 while (TREE_CODE (inner_arg0) == NOP_EXPR
7020 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7021 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7022 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7023
7024 /* If this is a builtin_expect within a builtin_expect keep the
7025 inner one. See through a comparison against a constant. It
7026 might have been added to create a thruthvalue. */
7027 inner = inner_arg0;
7028
7029 if (COMPARISON_CLASS_P (inner)
7030 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7031 inner = TREE_OPERAND (inner, 0);
7032
7033 if (TREE_CODE (inner) == CALL_EXPR
7034 && (fndecl = get_callee_fndecl (inner))
7035 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7036 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7037 return arg0;
7038
7039 inner = inner_arg0;
7040 code = TREE_CODE (inner);
7041 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7042 {
7043 tree op0 = TREE_OPERAND (inner, 0);
7044 tree op1 = TREE_OPERAND (inner, 1);
7045
7046 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7047 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7048 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7049
7050 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7051 }
7052
7053 /* If the argument isn't invariant then there's nothing else we can do. */
7054 if (!TREE_CONSTANT (inner_arg0))
7055 return NULL_TREE;
7056
7057 /* If we expect that a comparison against the argument will fold to
7058 a constant return the constant. In practice, this means a true
7059 constant or the address of a non-weak symbol. */
7060 inner = inner_arg0;
7061 STRIP_NOPS (inner);
7062 if (TREE_CODE (inner) == ADDR_EXPR)
7063 {
7064 do
7065 {
7066 inner = TREE_OPERAND (inner, 0);
7067 }
7068 while (TREE_CODE (inner) == COMPONENT_REF
7069 || TREE_CODE (inner) == ARRAY_REF);
7070 if ((TREE_CODE (inner) == VAR_DECL
7071 || TREE_CODE (inner) == FUNCTION_DECL)
7072 && DECL_WEAK (inner))
7073 return NULL_TREE;
7074 }
7075
7076 /* Otherwise, ARG0 already has the proper type for the return value. */
7077 return arg0;
7078 }
7079
7080 /* Fold a call to __builtin_classify_type with argument ARG. */
7081
7082 static tree
7083 fold_builtin_classify_type (tree arg)
7084 {
7085 if (arg == 0)
7086 return build_int_cst (integer_type_node, no_type_class);
7087
7088 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7089 }
7090
7091 /* Fold a call to __builtin_strlen with argument ARG. */
7092
7093 static tree
7094 fold_builtin_strlen (location_t loc, tree type, tree arg)
7095 {
7096 if (!validate_arg (arg, POINTER_TYPE))
7097 return NULL_TREE;
7098 else
7099 {
7100 tree len = c_strlen (arg, 0);
7101
7102 if (len)
7103 return fold_convert_loc (loc, type, len);
7104
7105 return NULL_TREE;
7106 }
7107 }
7108
7109 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7110
7111 static tree
7112 fold_builtin_inf (location_t loc, tree type, int warn)
7113 {
7114 REAL_VALUE_TYPE real;
7115
7116 /* __builtin_inff is intended to be usable to define INFINITY on all
7117 targets. If an infinity is not available, INFINITY expands "to a
7118 positive constant of type float that overflows at translation
7119 time", footnote "In this case, using INFINITY will violate the
7120 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7121 Thus we pedwarn to ensure this constraint violation is
7122 diagnosed. */
7123 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7124 pedwarn (loc, 0, "target format does not support infinity");
7125
7126 real_inf (&real);
7127 return build_real (type, real);
7128 }
7129
7130 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7131
7132 static tree
7133 fold_builtin_nan (tree arg, tree type, int quiet)
7134 {
7135 REAL_VALUE_TYPE real;
7136 const char *str;
7137
7138 if (!validate_arg (arg, POINTER_TYPE))
7139 return NULL_TREE;
7140 str = c_getstr (arg);
7141 if (!str)
7142 return NULL_TREE;
7143
7144 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7145 return NULL_TREE;
7146
7147 return build_real (type, real);
7148 }
7149
7150 /* Return true if the floating point expression T has an integer value.
7151 We also allow +Inf, -Inf and NaN to be considered integer values. */
7152
7153 static bool
7154 integer_valued_real_p (tree t)
7155 {
7156 switch (TREE_CODE (t))
7157 {
7158 case FLOAT_EXPR:
7159 return true;
7160
7161 case ABS_EXPR:
7162 case SAVE_EXPR:
7163 return integer_valued_real_p (TREE_OPERAND (t, 0));
7164
7165 case COMPOUND_EXPR:
7166 case MODIFY_EXPR:
7167 case BIND_EXPR:
7168 return integer_valued_real_p (TREE_OPERAND (t, 1));
7169
7170 case PLUS_EXPR:
7171 case MINUS_EXPR:
7172 case MULT_EXPR:
7173 case MIN_EXPR:
7174 case MAX_EXPR:
7175 return integer_valued_real_p (TREE_OPERAND (t, 0))
7176 && integer_valued_real_p (TREE_OPERAND (t, 1));
7177
7178 case COND_EXPR:
7179 return integer_valued_real_p (TREE_OPERAND (t, 1))
7180 && integer_valued_real_p (TREE_OPERAND (t, 2));
7181
7182 case REAL_CST:
7183 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7184
7185 case NOP_EXPR:
7186 {
7187 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7188 if (TREE_CODE (type) == INTEGER_TYPE)
7189 return true;
7190 if (TREE_CODE (type) == REAL_TYPE)
7191 return integer_valued_real_p (TREE_OPERAND (t, 0));
7192 break;
7193 }
7194
7195 case CALL_EXPR:
7196 switch (builtin_mathfn_code (t))
7197 {
7198 CASE_FLT_FN (BUILT_IN_CEIL):
7199 CASE_FLT_FN (BUILT_IN_FLOOR):
7200 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7201 CASE_FLT_FN (BUILT_IN_RINT):
7202 CASE_FLT_FN (BUILT_IN_ROUND):
7203 CASE_FLT_FN (BUILT_IN_TRUNC):
7204 return true;
7205
7206 CASE_FLT_FN (BUILT_IN_FMIN):
7207 CASE_FLT_FN (BUILT_IN_FMAX):
7208 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7209 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7210
7211 default:
7212 break;
7213 }
7214 break;
7215
7216 default:
7217 break;
7218 }
7219 return false;
7220 }
7221
7222 /* FNDECL is assumed to be a builtin where truncation can be propagated
7223 across (for instance floor((double)f) == (double)floorf (f).
7224 Do the transformation for a call with argument ARG. */
7225
7226 static tree
7227 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7228 {
7229 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7230
7231 if (!validate_arg (arg, REAL_TYPE))
7232 return NULL_TREE;
7233
7234 /* Integer rounding functions are idempotent. */
7235 if (fcode == builtin_mathfn_code (arg))
7236 return arg;
7237
7238 /* If argument is already integer valued, and we don't need to worry
7239 about setting errno, there's no need to perform rounding. */
7240 if (! flag_errno_math && integer_valued_real_p (arg))
7241 return arg;
7242
7243 if (optimize)
7244 {
7245 tree arg0 = strip_float_extensions (arg);
7246 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7247 tree newtype = TREE_TYPE (arg0);
7248 tree decl;
7249
7250 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7251 && (decl = mathfn_built_in (newtype, fcode)))
7252 return fold_convert_loc (loc, ftype,
7253 build_call_expr_loc (loc, decl, 1,
7254 fold_convert_loc (loc,
7255 newtype,
7256 arg0)));
7257 }
7258 return NULL_TREE;
7259 }
7260
7261 /* FNDECL is assumed to be builtin which can narrow the FP type of
7262 the argument, for instance lround((double)f) -> lroundf (f).
7263 Do the transformation for a call with argument ARG. */
7264
7265 static tree
7266 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7267 {
7268 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7269
7270 if (!validate_arg (arg, REAL_TYPE))
7271 return NULL_TREE;
7272
7273 /* If argument is already integer valued, and we don't need to worry
7274 about setting errno, there's no need to perform rounding. */
7275 if (! flag_errno_math && integer_valued_real_p (arg))
7276 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7277 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7278
7279 if (optimize)
7280 {
7281 tree ftype = TREE_TYPE (arg);
7282 tree arg0 = strip_float_extensions (arg);
7283 tree newtype = TREE_TYPE (arg0);
7284 tree decl;
7285
7286 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7287 && (decl = mathfn_built_in (newtype, fcode)))
7288 return build_call_expr_loc (loc, decl, 1,
7289 fold_convert_loc (loc, newtype, arg0));
7290 }
7291
7292 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7293 sizeof (int) == sizeof (long). */
7294 if (TYPE_PRECISION (integer_type_node)
7295 == TYPE_PRECISION (long_integer_type_node))
7296 {
7297 tree newfn = NULL_TREE;
7298 switch (fcode)
7299 {
7300 CASE_FLT_FN (BUILT_IN_ICEIL):
7301 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7302 break;
7303
7304 CASE_FLT_FN (BUILT_IN_IFLOOR):
7305 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7306 break;
7307
7308 CASE_FLT_FN (BUILT_IN_IROUND):
7309 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7310 break;
7311
7312 CASE_FLT_FN (BUILT_IN_IRINT):
7313 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7314 break;
7315
7316 default:
7317 break;
7318 }
7319
7320 if (newfn)
7321 {
7322 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7323 return fold_convert_loc (loc,
7324 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7325 }
7326 }
7327
7328 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7329 sizeof (long long) == sizeof (long). */
7330 if (TYPE_PRECISION (long_long_integer_type_node)
7331 == TYPE_PRECISION (long_integer_type_node))
7332 {
7333 tree newfn = NULL_TREE;
7334 switch (fcode)
7335 {
7336 CASE_FLT_FN (BUILT_IN_LLCEIL):
7337 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7338 break;
7339
7340 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7341 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7342 break;
7343
7344 CASE_FLT_FN (BUILT_IN_LLROUND):
7345 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7346 break;
7347
7348 CASE_FLT_FN (BUILT_IN_LLRINT):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7350 break;
7351
7352 default:
7353 break;
7354 }
7355
7356 if (newfn)
7357 {
7358 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7359 return fold_convert_loc (loc,
7360 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7361 }
7362 }
7363
7364 return NULL_TREE;
7365 }
7366
7367 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7368 return type. Return NULL_TREE if no simplification can be made. */
7369
7370 static tree
7371 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7372 {
7373 tree res;
7374
7375 if (!validate_arg (arg, COMPLEX_TYPE)
7376 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7377 return NULL_TREE;
7378
7379 /* Calculate the result when the argument is a constant. */
7380 if (TREE_CODE (arg) == COMPLEX_CST
7381 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7382 type, mpfr_hypot)))
7383 return res;
7384
7385 if (TREE_CODE (arg) == COMPLEX_EXPR)
7386 {
7387 tree real = TREE_OPERAND (arg, 0);
7388 tree imag = TREE_OPERAND (arg, 1);
7389
7390 /* If either part is zero, cabs is fabs of the other. */
7391 if (real_zerop (real))
7392 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7393 if (real_zerop (imag))
7394 return fold_build1_loc (loc, ABS_EXPR, type, real);
7395
7396 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7397 if (flag_unsafe_math_optimizations
7398 && operand_equal_p (real, imag, OEP_PURE_SAME))
7399 {
7400 const REAL_VALUE_TYPE sqrt2_trunc
7401 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7402 STRIP_NOPS (real);
7403 return fold_build2_loc (loc, MULT_EXPR, type,
7404 fold_build1_loc (loc, ABS_EXPR, type, real),
7405 build_real (type, sqrt2_trunc));
7406 }
7407 }
7408
7409 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7410 if (TREE_CODE (arg) == NEGATE_EXPR
7411 || TREE_CODE (arg) == CONJ_EXPR)
7412 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7413
7414 /* Don't do this when optimizing for size. */
7415 if (flag_unsafe_math_optimizations
7416 && optimize && optimize_function_for_speed_p (cfun))
7417 {
7418 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7419
7420 if (sqrtfn != NULL_TREE)
7421 {
7422 tree rpart, ipart, result;
7423
7424 arg = builtin_save_expr (arg);
7425
7426 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7427 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7428
7429 rpart = builtin_save_expr (rpart);
7430 ipart = builtin_save_expr (ipart);
7431
7432 result = fold_build2_loc (loc, PLUS_EXPR, type,
7433 fold_build2_loc (loc, MULT_EXPR, type,
7434 rpart, rpart),
7435 fold_build2_loc (loc, MULT_EXPR, type,
7436 ipart, ipart));
7437
7438 return build_call_expr_loc (loc, sqrtfn, 1, result);
7439 }
7440 }
7441
7442 return NULL_TREE;
7443 }
7444
7445 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7446 complex tree type of the result. If NEG is true, the imaginary
7447 zero is negative. */
7448
7449 static tree
7450 build_complex_cproj (tree type, bool neg)
7451 {
7452 REAL_VALUE_TYPE rinf, rzero = dconst0;
7453
7454 real_inf (&rinf);
7455 rzero.sign = neg;
7456 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7457 build_real (TREE_TYPE (type), rzero));
7458 }
7459
7460 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7461 return type. Return NULL_TREE if no simplification can be made. */
7462
7463 static tree
7464 fold_builtin_cproj (location_t loc, tree arg, tree type)
7465 {
7466 if (!validate_arg (arg, COMPLEX_TYPE)
7467 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7468 return NULL_TREE;
7469
7470 /* If there are no infinities, return arg. */
7471 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7472 return non_lvalue_loc (loc, arg);
7473
7474 /* Calculate the result when the argument is a constant. */
7475 if (TREE_CODE (arg) == COMPLEX_CST)
7476 {
7477 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7478 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7479
7480 if (real_isinf (real) || real_isinf (imag))
7481 return build_complex_cproj (type, imag->sign);
7482 else
7483 return arg;
7484 }
7485 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7486 {
7487 tree real = TREE_OPERAND (arg, 0);
7488 tree imag = TREE_OPERAND (arg, 1);
7489
7490 STRIP_NOPS (real);
7491 STRIP_NOPS (imag);
7492
7493 /* If the real part is inf and the imag part is known to be
7494 nonnegative, return (inf + 0i). Remember side-effects are
7495 possible in the imag part. */
7496 if (TREE_CODE (real) == REAL_CST
7497 && real_isinf (TREE_REAL_CST_PTR (real))
7498 && tree_expr_nonnegative_p (imag))
7499 return omit_one_operand_loc (loc, type,
7500 build_complex_cproj (type, false),
7501 arg);
7502
7503 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7504 Remember side-effects are possible in the real part. */
7505 if (TREE_CODE (imag) == REAL_CST
7506 && real_isinf (TREE_REAL_CST_PTR (imag)))
7507 return
7508 omit_one_operand_loc (loc, type,
7509 build_complex_cproj (type, TREE_REAL_CST_PTR
7510 (imag)->sign), arg);
7511 }
7512
7513 return NULL_TREE;
7514 }
7515
7516 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7517 Return NULL_TREE if no simplification can be made. */
7518
7519 static tree
7520 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7521 {
7522
7523 enum built_in_function fcode;
7524 tree res;
7525
7526 if (!validate_arg (arg, REAL_TYPE))
7527 return NULL_TREE;
7528
7529 /* Calculate the result when the argument is a constant. */
7530 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7531 return res;
7532
7533 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7534 fcode = builtin_mathfn_code (arg);
7535 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7536 {
7537 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7538 arg = fold_build2_loc (loc, MULT_EXPR, type,
7539 CALL_EXPR_ARG (arg, 0),
7540 build_real (type, dconsthalf));
7541 return build_call_expr_loc (loc, expfn, 1, arg);
7542 }
7543
7544 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7545 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7546 {
7547 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7548
7549 if (powfn)
7550 {
7551 tree arg0 = CALL_EXPR_ARG (arg, 0);
7552 tree tree_root;
7553 /* The inner root was either sqrt or cbrt. */
7554 /* This was a conditional expression but it triggered a bug
7555 in Sun C 5.5. */
7556 REAL_VALUE_TYPE dconstroot;
7557 if (BUILTIN_SQRT_P (fcode))
7558 dconstroot = dconsthalf;
7559 else
7560 dconstroot = dconst_third ();
7561
7562 /* Adjust for the outer root. */
7563 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7564 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7565 tree_root = build_real (type, dconstroot);
7566 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7567 }
7568 }
7569
7570 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7571 if (flag_unsafe_math_optimizations
7572 && (fcode == BUILT_IN_POW
7573 || fcode == BUILT_IN_POWF
7574 || fcode == BUILT_IN_POWL))
7575 {
7576 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 tree arg1 = CALL_EXPR_ARG (arg, 1);
7579 tree narg1;
7580 if (!tree_expr_nonnegative_p (arg0))
7581 arg0 = build1 (ABS_EXPR, type, arg0);
7582 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7583 build_real (type, dconsthalf));
7584 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7585 }
7586
7587 return NULL_TREE;
7588 }
7589
7590 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7591 Return NULL_TREE if no simplification can be made. */
7592
7593 static tree
7594 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7595 {
7596 const enum built_in_function fcode = builtin_mathfn_code (arg);
7597 tree res;
7598
7599 if (!validate_arg (arg, REAL_TYPE))
7600 return NULL_TREE;
7601
7602 /* Calculate the result when the argument is a constant. */
7603 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7604 return res;
7605
7606 if (flag_unsafe_math_optimizations)
7607 {
7608 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7609 if (BUILTIN_EXPONENT_P (fcode))
7610 {
7611 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7612 const REAL_VALUE_TYPE third_trunc =
7613 real_value_truncate (TYPE_MODE (type), dconst_third ());
7614 arg = fold_build2_loc (loc, MULT_EXPR, type,
7615 CALL_EXPR_ARG (arg, 0),
7616 build_real (type, third_trunc));
7617 return build_call_expr_loc (loc, expfn, 1, arg);
7618 }
7619
7620 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7621 if (BUILTIN_SQRT_P (fcode))
7622 {
7623 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7624
7625 if (powfn)
7626 {
7627 tree arg0 = CALL_EXPR_ARG (arg, 0);
7628 tree tree_root;
7629 REAL_VALUE_TYPE dconstroot = dconst_third ();
7630
7631 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7632 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7633 tree_root = build_real (type, dconstroot);
7634 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7635 }
7636 }
7637
7638 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7639 if (BUILTIN_CBRT_P (fcode))
7640 {
7641 tree arg0 = CALL_EXPR_ARG (arg, 0);
7642 if (tree_expr_nonnegative_p (arg0))
7643 {
7644 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7645
7646 if (powfn)
7647 {
7648 tree tree_root;
7649 REAL_VALUE_TYPE dconstroot;
7650
7651 real_arithmetic (&dconstroot, MULT_EXPR,
7652 dconst_third_ptr (), dconst_third_ptr ());
7653 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7654 tree_root = build_real (type, dconstroot);
7655 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7656 }
7657 }
7658 }
7659
7660 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7661 if (fcode == BUILT_IN_POW
7662 || fcode == BUILT_IN_POWF
7663 || fcode == BUILT_IN_POWL)
7664 {
7665 tree arg00 = CALL_EXPR_ARG (arg, 0);
7666 tree arg01 = CALL_EXPR_ARG (arg, 1);
7667 if (tree_expr_nonnegative_p (arg00))
7668 {
7669 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7670 const REAL_VALUE_TYPE dconstroot
7671 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7672 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7673 build_real (type, dconstroot));
7674 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7675 }
7676 }
7677 }
7678 return NULL_TREE;
7679 }
7680
7681 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7682 TYPE is the type of the return value. Return NULL_TREE if no
7683 simplification can be made. */
7684
7685 static tree
7686 fold_builtin_cos (location_t loc,
7687 tree arg, tree type, tree fndecl)
7688 {
7689 tree res, narg;
7690
7691 if (!validate_arg (arg, REAL_TYPE))
7692 return NULL_TREE;
7693
7694 /* Calculate the result when the argument is a constant. */
7695 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7696 return res;
7697
7698 /* Optimize cos(-x) into cos (x). */
7699 if ((narg = fold_strip_sign_ops (arg)))
7700 return build_call_expr_loc (loc, fndecl, 1, narg);
7701
7702 return NULL_TREE;
7703 }
7704
7705 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7706 Return NULL_TREE if no simplification can be made. */
7707
7708 static tree
7709 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7710 {
7711 if (validate_arg (arg, REAL_TYPE))
7712 {
7713 tree res, narg;
7714
7715 /* Calculate the result when the argument is a constant. */
7716 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7717 return res;
7718
7719 /* Optimize cosh(-x) into cosh (x). */
7720 if ((narg = fold_strip_sign_ops (arg)))
7721 return build_call_expr_loc (loc, fndecl, 1, narg);
7722 }
7723
7724 return NULL_TREE;
7725 }
7726
7727 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7728 argument ARG. TYPE is the type of the return value. Return
7729 NULL_TREE if no simplification can be made. */
7730
7731 static tree
7732 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7733 bool hyper)
7734 {
7735 if (validate_arg (arg, COMPLEX_TYPE)
7736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7737 {
7738 tree tmp;
7739
7740 /* Calculate the result when the argument is a constant. */
7741 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7742 return tmp;
7743
7744 /* Optimize fn(-x) into fn(x). */
7745 if ((tmp = fold_strip_sign_ops (arg)))
7746 return build_call_expr_loc (loc, fndecl, 1, tmp);
7747 }
7748
7749 return NULL_TREE;
7750 }
7751
7752 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7753 Return NULL_TREE if no simplification can be made. */
7754
7755 static tree
7756 fold_builtin_tan (tree arg, tree type)
7757 {
7758 enum built_in_function fcode;
7759 tree res;
7760
7761 if (!validate_arg (arg, REAL_TYPE))
7762 return NULL_TREE;
7763
7764 /* Calculate the result when the argument is a constant. */
7765 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7766 return res;
7767
7768 /* Optimize tan(atan(x)) = x. */
7769 fcode = builtin_mathfn_code (arg);
7770 if (flag_unsafe_math_optimizations
7771 && (fcode == BUILT_IN_ATAN
7772 || fcode == BUILT_IN_ATANF
7773 || fcode == BUILT_IN_ATANL))
7774 return CALL_EXPR_ARG (arg, 0);
7775
7776 return NULL_TREE;
7777 }
7778
7779 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7780 NULL_TREE if no simplification can be made. */
7781
7782 static tree
7783 fold_builtin_sincos (location_t loc,
7784 tree arg0, tree arg1, tree arg2)
7785 {
7786 tree type;
7787 tree res, fn, call;
7788
7789 if (!validate_arg (arg0, REAL_TYPE)
7790 || !validate_arg (arg1, POINTER_TYPE)
7791 || !validate_arg (arg2, POINTER_TYPE))
7792 return NULL_TREE;
7793
7794 type = TREE_TYPE (arg0);
7795
7796 /* Calculate the result when the argument is a constant. */
7797 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7798 return res;
7799
7800 /* Canonicalize sincos to cexpi. */
7801 if (!targetm.libc_has_function (function_c99_math_complex))
7802 return NULL_TREE;
7803 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7804 if (!fn)
7805 return NULL_TREE;
7806
7807 call = build_call_expr_loc (loc, fn, 1, arg0);
7808 call = builtin_save_expr (call);
7809
7810 return build2 (COMPOUND_EXPR, void_type_node,
7811 build2 (MODIFY_EXPR, void_type_node,
7812 build_fold_indirect_ref_loc (loc, arg1),
7813 build1 (IMAGPART_EXPR, type, call)),
7814 build2 (MODIFY_EXPR, void_type_node,
7815 build_fold_indirect_ref_loc (loc, arg2),
7816 build1 (REALPART_EXPR, type, call)));
7817 }
7818
7819 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7820 NULL_TREE if no simplification can be made. */
7821
7822 static tree
7823 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7824 {
7825 tree rtype;
7826 tree realp, imagp, ifn;
7827 tree res;
7828
7829 if (!validate_arg (arg0, COMPLEX_TYPE)
7830 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7831 return NULL_TREE;
7832
7833 /* Calculate the result when the argument is a constant. */
7834 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7835 return res;
7836
7837 rtype = TREE_TYPE (TREE_TYPE (arg0));
7838
7839 /* In case we can figure out the real part of arg0 and it is constant zero
7840 fold to cexpi. */
7841 if (!targetm.libc_has_function (function_c99_math_complex))
7842 return NULL_TREE;
7843 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7844 if (!ifn)
7845 return NULL_TREE;
7846
7847 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7848 && real_zerop (realp))
7849 {
7850 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7851 return build_call_expr_loc (loc, ifn, 1, narg);
7852 }
7853
7854 /* In case we can easily decompose real and imaginary parts split cexp
7855 to exp (r) * cexpi (i). */
7856 if (flag_unsafe_math_optimizations
7857 && realp)
7858 {
7859 tree rfn, rcall, icall;
7860
7861 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7862 if (!rfn)
7863 return NULL_TREE;
7864
7865 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7866 if (!imagp)
7867 return NULL_TREE;
7868
7869 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7870 icall = builtin_save_expr (icall);
7871 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7872 rcall = builtin_save_expr (rcall);
7873 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7874 fold_build2_loc (loc, MULT_EXPR, rtype,
7875 rcall,
7876 fold_build1_loc (loc, REALPART_EXPR,
7877 rtype, icall)),
7878 fold_build2_loc (loc, MULT_EXPR, rtype,
7879 rcall,
7880 fold_build1_loc (loc, IMAGPART_EXPR,
7881 rtype, icall)));
7882 }
7883
7884 return NULL_TREE;
7885 }
7886
7887 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7888 Return NULL_TREE if no simplification can be made. */
7889
7890 static tree
7891 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7892 {
7893 if (!validate_arg (arg, REAL_TYPE))
7894 return NULL_TREE;
7895
7896 /* Optimize trunc of constant value. */
7897 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7898 {
7899 REAL_VALUE_TYPE r, x;
7900 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7901
7902 x = TREE_REAL_CST (arg);
7903 real_trunc (&r, TYPE_MODE (type), &x);
7904 return build_real (type, r);
7905 }
7906
7907 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7908 }
7909
7910 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7911 Return NULL_TREE if no simplification can be made. */
7912
7913 static tree
7914 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7915 {
7916 if (!validate_arg (arg, REAL_TYPE))
7917 return NULL_TREE;
7918
7919 /* Optimize floor of constant value. */
7920 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7921 {
7922 REAL_VALUE_TYPE x;
7923
7924 x = TREE_REAL_CST (arg);
7925 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7926 {
7927 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7928 REAL_VALUE_TYPE r;
7929
7930 real_floor (&r, TYPE_MODE (type), &x);
7931 return build_real (type, r);
7932 }
7933 }
7934
7935 /* Fold floor (x) where x is nonnegative to trunc (x). */
7936 if (tree_expr_nonnegative_p (arg))
7937 {
7938 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7939 if (truncfn)
7940 return build_call_expr_loc (loc, truncfn, 1, arg);
7941 }
7942
7943 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7944 }
7945
7946 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7947 Return NULL_TREE if no simplification can be made. */
7948
7949 static tree
7950 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7951 {
7952 if (!validate_arg (arg, REAL_TYPE))
7953 return NULL_TREE;
7954
7955 /* Optimize ceil of constant value. */
7956 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7957 {
7958 REAL_VALUE_TYPE x;
7959
7960 x = TREE_REAL_CST (arg);
7961 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7962 {
7963 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7964 REAL_VALUE_TYPE r;
7965
7966 real_ceil (&r, TYPE_MODE (type), &x);
7967 return build_real (type, r);
7968 }
7969 }
7970
7971 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7972 }
7973
7974 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7975 Return NULL_TREE if no simplification can be made. */
7976
7977 static tree
7978 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7979 {
7980 if (!validate_arg (arg, REAL_TYPE))
7981 return NULL_TREE;
7982
7983 /* Optimize round of constant value. */
7984 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7985 {
7986 REAL_VALUE_TYPE x;
7987
7988 x = TREE_REAL_CST (arg);
7989 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7990 {
7991 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7992 REAL_VALUE_TYPE r;
7993
7994 real_round (&r, TYPE_MODE (type), &x);
7995 return build_real (type, r);
7996 }
7997 }
7998
7999 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8000 }
8001
8002 /* Fold function call to builtin lround, lroundf or lroundl (or the
8003 corresponding long long versions) and other rounding functions. ARG
8004 is the argument to the call. Return NULL_TREE if no simplification
8005 can be made. */
8006
8007 static tree
8008 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8009 {
8010 if (!validate_arg (arg, REAL_TYPE))
8011 return NULL_TREE;
8012
8013 /* Optimize lround of constant value. */
8014 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8015 {
8016 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8017
8018 if (real_isfinite (&x))
8019 {
8020 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8021 tree ftype = TREE_TYPE (arg);
8022 double_int val;
8023 REAL_VALUE_TYPE r;
8024
8025 switch (DECL_FUNCTION_CODE (fndecl))
8026 {
8027 CASE_FLT_FN (BUILT_IN_IFLOOR):
8028 CASE_FLT_FN (BUILT_IN_LFLOOR):
8029 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8030 real_floor (&r, TYPE_MODE (ftype), &x);
8031 break;
8032
8033 CASE_FLT_FN (BUILT_IN_ICEIL):
8034 CASE_FLT_FN (BUILT_IN_LCEIL):
8035 CASE_FLT_FN (BUILT_IN_LLCEIL):
8036 real_ceil (&r, TYPE_MODE (ftype), &x);
8037 break;
8038
8039 CASE_FLT_FN (BUILT_IN_IROUND):
8040 CASE_FLT_FN (BUILT_IN_LROUND):
8041 CASE_FLT_FN (BUILT_IN_LLROUND):
8042 real_round (&r, TYPE_MODE (ftype), &x);
8043 break;
8044
8045 default:
8046 gcc_unreachable ();
8047 }
8048
8049 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8050 if (double_int_fits_to_tree_p (itype, val))
8051 return double_int_to_tree (itype, val);
8052 }
8053 }
8054
8055 switch (DECL_FUNCTION_CODE (fndecl))
8056 {
8057 CASE_FLT_FN (BUILT_IN_LFLOOR):
8058 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8059 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8060 if (tree_expr_nonnegative_p (arg))
8061 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8062 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8063 break;
8064 default:;
8065 }
8066
8067 return fold_fixed_mathfn (loc, fndecl, arg);
8068 }
8069
8070 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8071 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8072 the argument to the call. Return NULL_TREE if no simplification can
8073 be made. */
8074
8075 static tree
8076 fold_builtin_bitop (tree fndecl, tree arg)
8077 {
8078 if (!validate_arg (arg, INTEGER_TYPE))
8079 return NULL_TREE;
8080
8081 /* Optimize for constant argument. */
8082 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8083 {
8084 HOST_WIDE_INT hi, width, result;
8085 unsigned HOST_WIDE_INT lo;
8086 tree type;
8087
8088 type = TREE_TYPE (arg);
8089 width = TYPE_PRECISION (type);
8090 lo = TREE_INT_CST_LOW (arg);
8091
8092 /* Clear all the bits that are beyond the type's precision. */
8093 if (width > HOST_BITS_PER_WIDE_INT)
8094 {
8095 hi = TREE_INT_CST_HIGH (arg);
8096 if (width < HOST_BITS_PER_DOUBLE_INT)
8097 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8098 }
8099 else
8100 {
8101 hi = 0;
8102 if (width < HOST_BITS_PER_WIDE_INT)
8103 lo &= ~(HOST_WIDE_INT_M1U << width);
8104 }
8105
8106 switch (DECL_FUNCTION_CODE (fndecl))
8107 {
8108 CASE_INT_FN (BUILT_IN_FFS):
8109 if (lo != 0)
8110 result = ffs_hwi (lo);
8111 else if (hi != 0)
8112 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8113 else
8114 result = 0;
8115 break;
8116
8117 CASE_INT_FN (BUILT_IN_CLZ):
8118 if (hi != 0)
8119 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8120 else if (lo != 0)
8121 result = width - floor_log2 (lo) - 1;
8122 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8123 result = width;
8124 break;
8125
8126 CASE_INT_FN (BUILT_IN_CTZ):
8127 if (lo != 0)
8128 result = ctz_hwi (lo);
8129 else if (hi != 0)
8130 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8131 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8132 result = width;
8133 break;
8134
8135 CASE_INT_FN (BUILT_IN_CLRSB):
8136 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8137 return NULL_TREE;
8138 if (width > HOST_BITS_PER_WIDE_INT
8139 && (hi & ((unsigned HOST_WIDE_INT) 1
8140 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8141 {
8142 hi = ~hi & ~(HOST_WIDE_INT_M1U
8143 << (width - HOST_BITS_PER_WIDE_INT - 1));
8144 lo = ~lo;
8145 }
8146 else if (width <= HOST_BITS_PER_WIDE_INT
8147 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8148 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8149 if (hi != 0)
8150 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8151 else if (lo != 0)
8152 result = width - floor_log2 (lo) - 2;
8153 else
8154 result = width - 1;
8155 break;
8156
8157 CASE_INT_FN (BUILT_IN_POPCOUNT):
8158 result = 0;
8159 while (lo)
8160 result++, lo &= lo - 1;
8161 while (hi)
8162 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8163 break;
8164
8165 CASE_INT_FN (BUILT_IN_PARITY):
8166 result = 0;
8167 while (lo)
8168 result++, lo &= lo - 1;
8169 while (hi)
8170 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8171 result &= 1;
8172 break;
8173
8174 default:
8175 gcc_unreachable ();
8176 }
8177
8178 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8179 }
8180
8181 return NULL_TREE;
8182 }
8183
8184 /* Fold function call to builtin_bswap and the short, long and long long
8185 variants. Return NULL_TREE if no simplification can be made. */
8186 static tree
8187 fold_builtin_bswap (tree fndecl, tree arg)
8188 {
8189 if (! validate_arg (arg, INTEGER_TYPE))
8190 return NULL_TREE;
8191
8192 /* Optimize constant value. */
8193 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8194 {
8195 HOST_WIDE_INT hi, width, r_hi = 0;
8196 unsigned HOST_WIDE_INT lo, r_lo = 0;
8197 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8198
8199 width = TYPE_PRECISION (type);
8200 lo = TREE_INT_CST_LOW (arg);
8201 hi = TREE_INT_CST_HIGH (arg);
8202
8203 switch (DECL_FUNCTION_CODE (fndecl))
8204 {
8205 case BUILT_IN_BSWAP16:
8206 case BUILT_IN_BSWAP32:
8207 case BUILT_IN_BSWAP64:
8208 {
8209 int s;
8210
8211 for (s = 0; s < width; s += 8)
8212 {
8213 int d = width - s - 8;
8214 unsigned HOST_WIDE_INT byte;
8215
8216 if (s < HOST_BITS_PER_WIDE_INT)
8217 byte = (lo >> s) & 0xff;
8218 else
8219 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8220
8221 if (d < HOST_BITS_PER_WIDE_INT)
8222 r_lo |= byte << d;
8223 else
8224 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8225 }
8226 }
8227
8228 break;
8229
8230 default:
8231 gcc_unreachable ();
8232 }
8233
8234 if (width < HOST_BITS_PER_WIDE_INT)
8235 return build_int_cst (type, r_lo);
8236 else
8237 return build_int_cst_wide (type, r_lo, r_hi);
8238 }
8239
8240 return NULL_TREE;
8241 }
8242
8243 /* A subroutine of fold_builtin to fold the various logarithmic
8244 functions. Return NULL_TREE if no simplification can me made.
8245 FUNC is the corresponding MPFR logarithm function. */
8246
8247 static tree
8248 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8249 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8250 {
8251 if (validate_arg (arg, REAL_TYPE))
8252 {
8253 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8254 tree res;
8255 const enum built_in_function fcode = builtin_mathfn_code (arg);
8256
8257 /* Calculate the result when the argument is a constant. */
8258 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8259 return res;
8260
8261 /* Special case, optimize logN(expN(x)) = x. */
8262 if (flag_unsafe_math_optimizations
8263 && ((func == mpfr_log
8264 && (fcode == BUILT_IN_EXP
8265 || fcode == BUILT_IN_EXPF
8266 || fcode == BUILT_IN_EXPL))
8267 || (func == mpfr_log2
8268 && (fcode == BUILT_IN_EXP2
8269 || fcode == BUILT_IN_EXP2F
8270 || fcode == BUILT_IN_EXP2L))
8271 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8272 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8273
8274 /* Optimize logN(func()) for various exponential functions. We
8275 want to determine the value "x" and the power "exponent" in
8276 order to transform logN(x**exponent) into exponent*logN(x). */
8277 if (flag_unsafe_math_optimizations)
8278 {
8279 tree exponent = 0, x = 0;
8280
8281 switch (fcode)
8282 {
8283 CASE_FLT_FN (BUILT_IN_EXP):
8284 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8285 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8286 dconst_e ()));
8287 exponent = CALL_EXPR_ARG (arg, 0);
8288 break;
8289 CASE_FLT_FN (BUILT_IN_EXP2):
8290 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8291 x = build_real (type, dconst2);
8292 exponent = CALL_EXPR_ARG (arg, 0);
8293 break;
8294 CASE_FLT_FN (BUILT_IN_EXP10):
8295 CASE_FLT_FN (BUILT_IN_POW10):
8296 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8297 {
8298 REAL_VALUE_TYPE dconst10;
8299 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8300 x = build_real (type, dconst10);
8301 }
8302 exponent = CALL_EXPR_ARG (arg, 0);
8303 break;
8304 CASE_FLT_FN (BUILT_IN_SQRT):
8305 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8306 x = CALL_EXPR_ARG (arg, 0);
8307 exponent = build_real (type, dconsthalf);
8308 break;
8309 CASE_FLT_FN (BUILT_IN_CBRT):
8310 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8311 x = CALL_EXPR_ARG (arg, 0);
8312 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8313 dconst_third ()));
8314 break;
8315 CASE_FLT_FN (BUILT_IN_POW):
8316 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8317 x = CALL_EXPR_ARG (arg, 0);
8318 exponent = CALL_EXPR_ARG (arg, 1);
8319 break;
8320 default:
8321 break;
8322 }
8323
8324 /* Now perform the optimization. */
8325 if (x && exponent)
8326 {
8327 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8328 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8329 }
8330 }
8331 }
8332
8333 return NULL_TREE;
8334 }
8335
8336 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8337 NULL_TREE if no simplification can be made. */
8338
8339 static tree
8340 fold_builtin_hypot (location_t loc, tree fndecl,
8341 tree arg0, tree arg1, tree type)
8342 {
8343 tree res, narg0, narg1;
8344
8345 if (!validate_arg (arg0, REAL_TYPE)
8346 || !validate_arg (arg1, REAL_TYPE))
8347 return NULL_TREE;
8348
8349 /* Calculate the result when the argument is a constant. */
8350 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8351 return res;
8352
8353 /* If either argument to hypot has a negate or abs, strip that off.
8354 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8355 narg0 = fold_strip_sign_ops (arg0);
8356 narg1 = fold_strip_sign_ops (arg1);
8357 if (narg0 || narg1)
8358 {
8359 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8360 narg1 ? narg1 : arg1);
8361 }
8362
8363 /* If either argument is zero, hypot is fabs of the other. */
8364 if (real_zerop (arg0))
8365 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8366 else if (real_zerop (arg1))
8367 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8368
8369 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8370 if (flag_unsafe_math_optimizations
8371 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8372 {
8373 const REAL_VALUE_TYPE sqrt2_trunc
8374 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8375 return fold_build2_loc (loc, MULT_EXPR, type,
8376 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8377 build_real (type, sqrt2_trunc));
8378 }
8379
8380 return NULL_TREE;
8381 }
8382
8383
8384 /* Fold a builtin function call to pow, powf, or powl. Return
8385 NULL_TREE if no simplification can be made. */
8386 static tree
8387 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8388 {
8389 tree res;
8390
8391 if (!validate_arg (arg0, REAL_TYPE)
8392 || !validate_arg (arg1, REAL_TYPE))
8393 return NULL_TREE;
8394
8395 /* Calculate the result when the argument is a constant. */
8396 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8397 return res;
8398
8399 /* Optimize pow(1.0,y) = 1.0. */
8400 if (real_onep (arg0))
8401 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8402
8403 if (TREE_CODE (arg1) == REAL_CST
8404 && !TREE_OVERFLOW (arg1))
8405 {
8406 REAL_VALUE_TYPE cint;
8407 REAL_VALUE_TYPE c;
8408 HOST_WIDE_INT n;
8409
8410 c = TREE_REAL_CST (arg1);
8411
8412 /* Optimize pow(x,0.0) = 1.0. */
8413 if (REAL_VALUES_EQUAL (c, dconst0))
8414 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8415 arg0);
8416
8417 /* Optimize pow(x,1.0) = x. */
8418 if (REAL_VALUES_EQUAL (c, dconst1))
8419 return arg0;
8420
8421 /* Optimize pow(x,-1.0) = 1.0/x. */
8422 if (REAL_VALUES_EQUAL (c, dconstm1))
8423 return fold_build2_loc (loc, RDIV_EXPR, type,
8424 build_real (type, dconst1), arg0);
8425
8426 /* Optimize pow(x,0.5) = sqrt(x). */
8427 if (flag_unsafe_math_optimizations
8428 && REAL_VALUES_EQUAL (c, dconsthalf))
8429 {
8430 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8431
8432 if (sqrtfn != NULL_TREE)
8433 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8434 }
8435
8436 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8437 if (flag_unsafe_math_optimizations)
8438 {
8439 const REAL_VALUE_TYPE dconstroot
8440 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8441
8442 if (REAL_VALUES_EQUAL (c, dconstroot))
8443 {
8444 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8445 if (cbrtfn != NULL_TREE)
8446 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8447 }
8448 }
8449
8450 /* Check for an integer exponent. */
8451 n = real_to_integer (&c);
8452 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8453 if (real_identical (&c, &cint))
8454 {
8455 /* Attempt to evaluate pow at compile-time, unless this should
8456 raise an exception. */
8457 if (TREE_CODE (arg0) == REAL_CST
8458 && !TREE_OVERFLOW (arg0)
8459 && (n > 0
8460 || (!flag_trapping_math && !flag_errno_math)
8461 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8462 {
8463 REAL_VALUE_TYPE x;
8464 bool inexact;
8465
8466 x = TREE_REAL_CST (arg0);
8467 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8468 if (flag_unsafe_math_optimizations || !inexact)
8469 return build_real (type, x);
8470 }
8471
8472 /* Strip sign ops from even integer powers. */
8473 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8474 {
8475 tree narg0 = fold_strip_sign_ops (arg0);
8476 if (narg0)
8477 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8478 }
8479 }
8480 }
8481
8482 if (flag_unsafe_math_optimizations)
8483 {
8484 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8485
8486 /* Optimize pow(expN(x),y) = expN(x*y). */
8487 if (BUILTIN_EXPONENT_P (fcode))
8488 {
8489 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8490 tree arg = CALL_EXPR_ARG (arg0, 0);
8491 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8492 return build_call_expr_loc (loc, expfn, 1, arg);
8493 }
8494
8495 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8496 if (BUILTIN_SQRT_P (fcode))
8497 {
8498 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8499 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8500 build_real (type, dconsthalf));
8501 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8502 }
8503
8504 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8505 if (BUILTIN_CBRT_P (fcode))
8506 {
8507 tree arg = CALL_EXPR_ARG (arg0, 0);
8508 if (tree_expr_nonnegative_p (arg))
8509 {
8510 const REAL_VALUE_TYPE dconstroot
8511 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8512 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8513 build_real (type, dconstroot));
8514 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8515 }
8516 }
8517
8518 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8519 if (fcode == BUILT_IN_POW
8520 || fcode == BUILT_IN_POWF
8521 || fcode == BUILT_IN_POWL)
8522 {
8523 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8524 if (tree_expr_nonnegative_p (arg00))
8525 {
8526 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8527 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8528 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8529 }
8530 }
8531 }
8532
8533 return NULL_TREE;
8534 }
8535
8536 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8537 Return NULL_TREE if no simplification can be made. */
8538 static tree
8539 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8540 tree arg0, tree arg1, tree type)
8541 {
8542 if (!validate_arg (arg0, REAL_TYPE)
8543 || !validate_arg (arg1, INTEGER_TYPE))
8544 return NULL_TREE;
8545
8546 /* Optimize pow(1.0,y) = 1.0. */
8547 if (real_onep (arg0))
8548 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8549
8550 if (tree_fits_shwi_p (arg1))
8551 {
8552 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8553
8554 /* Evaluate powi at compile-time. */
8555 if (TREE_CODE (arg0) == REAL_CST
8556 && !TREE_OVERFLOW (arg0))
8557 {
8558 REAL_VALUE_TYPE x;
8559 x = TREE_REAL_CST (arg0);
8560 real_powi (&x, TYPE_MODE (type), &x, c);
8561 return build_real (type, x);
8562 }
8563
8564 /* Optimize pow(x,0) = 1.0. */
8565 if (c == 0)
8566 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8567 arg0);
8568
8569 /* Optimize pow(x,1) = x. */
8570 if (c == 1)
8571 return arg0;
8572
8573 /* Optimize pow(x,-1) = 1.0/x. */
8574 if (c == -1)
8575 return fold_build2_loc (loc, RDIV_EXPR, type,
8576 build_real (type, dconst1), arg0);
8577 }
8578
8579 return NULL_TREE;
8580 }
8581
8582 /* A subroutine of fold_builtin to fold the various exponent
8583 functions. Return NULL_TREE if no simplification can be made.
8584 FUNC is the corresponding MPFR exponent function. */
8585
8586 static tree
8587 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8588 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8589 {
8590 if (validate_arg (arg, REAL_TYPE))
8591 {
8592 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8593 tree res;
8594
8595 /* Calculate the result when the argument is a constant. */
8596 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8597 return res;
8598
8599 /* Optimize expN(logN(x)) = x. */
8600 if (flag_unsafe_math_optimizations)
8601 {
8602 const enum built_in_function fcode = builtin_mathfn_code (arg);
8603
8604 if ((func == mpfr_exp
8605 && (fcode == BUILT_IN_LOG
8606 || fcode == BUILT_IN_LOGF
8607 || fcode == BUILT_IN_LOGL))
8608 || (func == mpfr_exp2
8609 && (fcode == BUILT_IN_LOG2
8610 || fcode == BUILT_IN_LOG2F
8611 || fcode == BUILT_IN_LOG2L))
8612 || (func == mpfr_exp10
8613 && (fcode == BUILT_IN_LOG10
8614 || fcode == BUILT_IN_LOG10F
8615 || fcode == BUILT_IN_LOG10L)))
8616 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8617 }
8618 }
8619
8620 return NULL_TREE;
8621 }
8622
8623 /* Return true if VAR is a VAR_DECL or a component thereof. */
8624
8625 static bool
8626 var_decl_component_p (tree var)
8627 {
8628 tree inner = var;
8629 while (handled_component_p (inner))
8630 inner = TREE_OPERAND (inner, 0);
8631 return SSA_VAR_P (inner);
8632 }
8633
8634 /* Fold function call to builtin memset. Return
8635 NULL_TREE if no simplification can be made. */
8636
8637 static tree
8638 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8639 tree type, bool ignore)
8640 {
8641 tree var, ret, etype;
8642 unsigned HOST_WIDE_INT length, cval;
8643
8644 if (! validate_arg (dest, POINTER_TYPE)
8645 || ! validate_arg (c, INTEGER_TYPE)
8646 || ! validate_arg (len, INTEGER_TYPE))
8647 return NULL_TREE;
8648
8649 if (! host_integerp (len, 1))
8650 return NULL_TREE;
8651
8652 /* If the LEN parameter is zero, return DEST. */
8653 if (integer_zerop (len))
8654 return omit_one_operand_loc (loc, type, dest, c);
8655
8656 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8657 return NULL_TREE;
8658
8659 var = dest;
8660 STRIP_NOPS (var);
8661 if (TREE_CODE (var) != ADDR_EXPR)
8662 return NULL_TREE;
8663
8664 var = TREE_OPERAND (var, 0);
8665 if (TREE_THIS_VOLATILE (var))
8666 return NULL_TREE;
8667
8668 etype = TREE_TYPE (var);
8669 if (TREE_CODE (etype) == ARRAY_TYPE)
8670 etype = TREE_TYPE (etype);
8671
8672 if (!INTEGRAL_TYPE_P (etype)
8673 && !POINTER_TYPE_P (etype))
8674 return NULL_TREE;
8675
8676 if (! var_decl_component_p (var))
8677 return NULL_TREE;
8678
8679 length = tree_low_cst (len, 1);
8680 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8681 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8682 return NULL_TREE;
8683
8684 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8685 return NULL_TREE;
8686
8687 if (integer_zerop (c))
8688 cval = 0;
8689 else
8690 {
8691 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8692 return NULL_TREE;
8693
8694 cval = TREE_INT_CST_LOW (c);
8695 cval &= 0xff;
8696 cval |= cval << 8;
8697 cval |= cval << 16;
8698 cval |= (cval << 31) << 1;
8699 }
8700
8701 ret = build_int_cst_type (etype, cval);
8702 var = build_fold_indirect_ref_loc (loc,
8703 fold_convert_loc (loc,
8704 build_pointer_type (etype),
8705 dest));
8706 ret = build2 (MODIFY_EXPR, etype, var, ret);
8707 if (ignore)
8708 return ret;
8709
8710 return omit_one_operand_loc (loc, type, dest, ret);
8711 }
8712
8713 /* Fold function call to builtin memset. Return
8714 NULL_TREE if no simplification can be made. */
8715
8716 static tree
8717 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8718 {
8719 if (! validate_arg (dest, POINTER_TYPE)
8720 || ! validate_arg (size, INTEGER_TYPE))
8721 return NULL_TREE;
8722
8723 if (!ignore)
8724 return NULL_TREE;
8725
8726 /* New argument list transforming bzero(ptr x, int y) to
8727 memset(ptr x, int 0, size_t y). This is done this way
8728 so that if it isn't expanded inline, we fallback to
8729 calling bzero instead of memset. */
8730
8731 return fold_builtin_memset (loc, dest, integer_zero_node,
8732 fold_convert_loc (loc, size_type_node, size),
8733 void_type_node, ignore);
8734 }
8735
8736 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8737 NULL_TREE if no simplification can be made.
8738 If ENDP is 0, return DEST (like memcpy).
8739 If ENDP is 1, return DEST+LEN (like mempcpy).
8740 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8741 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8742 (memmove). */
8743
8744 static tree
8745 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8746 tree len, tree type, bool ignore, int endp)
8747 {
8748 tree destvar, srcvar, expr;
8749
8750 if (! validate_arg (dest, POINTER_TYPE)
8751 || ! validate_arg (src, POINTER_TYPE)
8752 || ! validate_arg (len, INTEGER_TYPE))
8753 return NULL_TREE;
8754
8755 /* If the LEN parameter is zero, return DEST. */
8756 if (integer_zerop (len))
8757 return omit_one_operand_loc (loc, type, dest, src);
8758
8759 /* If SRC and DEST are the same (and not volatile), return
8760 DEST{,+LEN,+LEN-1}. */
8761 if (operand_equal_p (src, dest, 0))
8762 expr = len;
8763 else
8764 {
8765 tree srctype, desttype;
8766 unsigned int src_align, dest_align;
8767 tree off0;
8768
8769 if (endp == 3)
8770 {
8771 src_align = get_pointer_alignment (src);
8772 dest_align = get_pointer_alignment (dest);
8773
8774 /* Both DEST and SRC must be pointer types.
8775 ??? This is what old code did. Is the testing for pointer types
8776 really mandatory?
8777
8778 If either SRC is readonly or length is 1, we can use memcpy. */
8779 if (!dest_align || !src_align)
8780 return NULL_TREE;
8781 if (readonly_data_expr (src)
8782 || (host_integerp (len, 1)
8783 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8784 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8785 {
8786 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8787 if (!fn)
8788 return NULL_TREE;
8789 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8790 }
8791
8792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8793 if (TREE_CODE (src) == ADDR_EXPR
8794 && TREE_CODE (dest) == ADDR_EXPR)
8795 {
8796 tree src_base, dest_base, fn;
8797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8798 HOST_WIDE_INT size = -1;
8799 HOST_WIDE_INT maxsize = -1;
8800
8801 srcvar = TREE_OPERAND (src, 0);
8802 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8803 &size, &maxsize);
8804 destvar = TREE_OPERAND (dest, 0);
8805 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8806 &size, &maxsize);
8807 if (host_integerp (len, 1))
8808 maxsize = tree_low_cst (len, 1);
8809 else
8810 maxsize = -1;
8811 src_offset /= BITS_PER_UNIT;
8812 dest_offset /= BITS_PER_UNIT;
8813 if (SSA_VAR_P (src_base)
8814 && SSA_VAR_P (dest_base))
8815 {
8816 if (operand_equal_p (src_base, dest_base, 0)
8817 && ranges_overlap_p (src_offset, maxsize,
8818 dest_offset, maxsize))
8819 return NULL_TREE;
8820 }
8821 else if (TREE_CODE (src_base) == MEM_REF
8822 && TREE_CODE (dest_base) == MEM_REF)
8823 {
8824 double_int off;
8825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8826 TREE_OPERAND (dest_base, 0), 0))
8827 return NULL_TREE;
8828 off = mem_ref_offset (src_base) +
8829 double_int::from_shwi (src_offset);
8830 if (!off.fits_shwi ())
8831 return NULL_TREE;
8832 src_offset = off.low;
8833 off = mem_ref_offset (dest_base) +
8834 double_int::from_shwi (dest_offset);
8835 if (!off.fits_shwi ())
8836 return NULL_TREE;
8837 dest_offset = off.low;
8838 if (ranges_overlap_p (src_offset, maxsize,
8839 dest_offset, maxsize))
8840 return NULL_TREE;
8841 }
8842 else
8843 return NULL_TREE;
8844
8845 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8846 if (!fn)
8847 return NULL_TREE;
8848 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8849 }
8850
8851 /* If the destination and source do not alias optimize into
8852 memcpy as well. */
8853 if ((is_gimple_min_invariant (dest)
8854 || TREE_CODE (dest) == SSA_NAME)
8855 && (is_gimple_min_invariant (src)
8856 || TREE_CODE (src) == SSA_NAME))
8857 {
8858 ao_ref destr, srcr;
8859 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8860 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8861 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8862 {
8863 tree fn;
8864 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8865 if (!fn)
8866 return NULL_TREE;
8867 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8868 }
8869 }
8870
8871 return NULL_TREE;
8872 }
8873
8874 if (!tree_fits_shwi_p (len))
8875 return NULL_TREE;
8876 /* FIXME:
8877 This logic lose for arguments like (type *)malloc (sizeof (type)),
8878 since we strip the casts of up to VOID return value from malloc.
8879 Perhaps we ought to inherit type from non-VOID argument here? */
8880 STRIP_NOPS (src);
8881 STRIP_NOPS (dest);
8882 if (!POINTER_TYPE_P (TREE_TYPE (src))
8883 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8884 return NULL_TREE;
8885 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8886 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8887 {
8888 tree tem = TREE_OPERAND (src, 0);
8889 STRIP_NOPS (tem);
8890 if (tem != TREE_OPERAND (src, 0))
8891 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8892 }
8893 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8894 {
8895 tree tem = TREE_OPERAND (dest, 0);
8896 STRIP_NOPS (tem);
8897 if (tem != TREE_OPERAND (dest, 0))
8898 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8899 }
8900 srctype = TREE_TYPE (TREE_TYPE (src));
8901 if (TREE_CODE (srctype) == ARRAY_TYPE
8902 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8903 {
8904 srctype = TREE_TYPE (srctype);
8905 STRIP_NOPS (src);
8906 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8907 }
8908 desttype = TREE_TYPE (TREE_TYPE (dest));
8909 if (TREE_CODE (desttype) == ARRAY_TYPE
8910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8911 {
8912 desttype = TREE_TYPE (desttype);
8913 STRIP_NOPS (dest);
8914 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8915 }
8916 if (TREE_ADDRESSABLE (srctype)
8917 || TREE_ADDRESSABLE (desttype))
8918 return NULL_TREE;
8919
8920 src_align = get_pointer_alignment (src);
8921 dest_align = get_pointer_alignment (dest);
8922 if (dest_align < TYPE_ALIGN (desttype)
8923 || src_align < TYPE_ALIGN (srctype))
8924 return NULL_TREE;
8925
8926 if (!ignore)
8927 dest = builtin_save_expr (dest);
8928
8929 /* Build accesses at offset zero with a ref-all character type. */
8930 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8931 ptr_mode, true), 0);
8932
8933 destvar = dest;
8934 STRIP_NOPS (destvar);
8935 if (TREE_CODE (destvar) == ADDR_EXPR
8936 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8937 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8938 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8939 else
8940 destvar = NULL_TREE;
8941
8942 srcvar = src;
8943 STRIP_NOPS (srcvar);
8944 if (TREE_CODE (srcvar) == ADDR_EXPR
8945 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8946 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8947 {
8948 if (!destvar
8949 || src_align >= TYPE_ALIGN (desttype))
8950 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8951 srcvar, off0);
8952 else if (!STRICT_ALIGNMENT)
8953 {
8954 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8955 src_align);
8956 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8957 }
8958 else
8959 srcvar = NULL_TREE;
8960 }
8961 else
8962 srcvar = NULL_TREE;
8963
8964 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8965 return NULL_TREE;
8966
8967 if (srcvar == NULL_TREE)
8968 {
8969 STRIP_NOPS (src);
8970 if (src_align >= TYPE_ALIGN (desttype))
8971 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8972 else
8973 {
8974 if (STRICT_ALIGNMENT)
8975 return NULL_TREE;
8976 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8977 src_align);
8978 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8979 }
8980 }
8981 else if (destvar == NULL_TREE)
8982 {
8983 STRIP_NOPS (dest);
8984 if (dest_align >= TYPE_ALIGN (srctype))
8985 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8986 else
8987 {
8988 if (STRICT_ALIGNMENT)
8989 return NULL_TREE;
8990 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8991 dest_align);
8992 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8993 }
8994 }
8995
8996 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8997 }
8998
8999 if (ignore)
9000 return expr;
9001
9002 if (endp == 0 || endp == 3)
9003 return omit_one_operand_loc (loc, type, dest, expr);
9004
9005 if (expr == len)
9006 expr = NULL_TREE;
9007
9008 if (endp == 2)
9009 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9010 ssize_int (1));
9011
9012 dest = fold_build_pointer_plus_loc (loc, dest, len);
9013 dest = fold_convert_loc (loc, type, dest);
9014 if (expr)
9015 dest = omit_one_operand_loc (loc, type, dest, expr);
9016 return dest;
9017 }
9018
9019 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9020 If LEN is not NULL, it represents the length of the string to be
9021 copied. Return NULL_TREE if no simplification can be made. */
9022
9023 tree
9024 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9025 {
9026 tree fn;
9027
9028 if (!validate_arg (dest, POINTER_TYPE)
9029 || !validate_arg (src, POINTER_TYPE))
9030 return NULL_TREE;
9031
9032 /* If SRC and DEST are the same (and not volatile), return DEST. */
9033 if (operand_equal_p (src, dest, 0))
9034 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9035
9036 if (optimize_function_for_size_p (cfun))
9037 return NULL_TREE;
9038
9039 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9040 if (!fn)
9041 return NULL_TREE;
9042
9043 if (!len)
9044 {
9045 len = c_strlen (src, 1);
9046 if (! len || TREE_SIDE_EFFECTS (len))
9047 return NULL_TREE;
9048 }
9049
9050 len = fold_convert_loc (loc, size_type_node, len);
9051 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9052 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9053 build_call_expr_loc (loc, fn, 3, dest, src, len));
9054 }
9055
9056 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9057 Return NULL_TREE if no simplification can be made. */
9058
9059 static tree
9060 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9061 {
9062 tree fn, len, lenp1, call, type;
9063
9064 if (!validate_arg (dest, POINTER_TYPE)
9065 || !validate_arg (src, POINTER_TYPE))
9066 return NULL_TREE;
9067
9068 len = c_strlen (src, 1);
9069 if (!len
9070 || TREE_CODE (len) != INTEGER_CST)
9071 return NULL_TREE;
9072
9073 if (optimize_function_for_size_p (cfun)
9074 /* If length is zero it's small enough. */
9075 && !integer_zerop (len))
9076 return NULL_TREE;
9077
9078 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9079 if (!fn)
9080 return NULL_TREE;
9081
9082 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9083 fold_convert_loc (loc, size_type_node, len),
9084 build_int_cst (size_type_node, 1));
9085 /* We use dest twice in building our expression. Save it from
9086 multiple expansions. */
9087 dest = builtin_save_expr (dest);
9088 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9089
9090 type = TREE_TYPE (TREE_TYPE (fndecl));
9091 dest = fold_build_pointer_plus_loc (loc, dest, len);
9092 dest = fold_convert_loc (loc, type, dest);
9093 dest = omit_one_operand_loc (loc, type, dest, call);
9094 return dest;
9095 }
9096
9097 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9098 If SLEN is not NULL, it represents the length of the source string.
9099 Return NULL_TREE if no simplification can be made. */
9100
9101 tree
9102 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9103 tree src, tree len, tree slen)
9104 {
9105 tree fn;
9106
9107 if (!validate_arg (dest, POINTER_TYPE)
9108 || !validate_arg (src, POINTER_TYPE)
9109 || !validate_arg (len, INTEGER_TYPE))
9110 return NULL_TREE;
9111
9112 /* If the LEN parameter is zero, return DEST. */
9113 if (integer_zerop (len))
9114 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9115
9116 /* We can't compare slen with len as constants below if len is not a
9117 constant. */
9118 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9119 return NULL_TREE;
9120
9121 if (!slen)
9122 slen = c_strlen (src, 1);
9123
9124 /* Now, we must be passed a constant src ptr parameter. */
9125 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9126 return NULL_TREE;
9127
9128 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9129
9130 /* We do not support simplification of this case, though we do
9131 support it when expanding trees into RTL. */
9132 /* FIXME: generate a call to __builtin_memset. */
9133 if (tree_int_cst_lt (slen, len))
9134 return NULL_TREE;
9135
9136 /* OK transform into builtin memcpy. */
9137 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9138 if (!fn)
9139 return NULL_TREE;
9140
9141 len = fold_convert_loc (loc, size_type_node, len);
9142 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9143 build_call_expr_loc (loc, fn, 3, dest, src, len));
9144 }
9145
9146 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9147 arguments to the call, and TYPE is its return type.
9148 Return NULL_TREE if no simplification can be made. */
9149
9150 static tree
9151 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9152 {
9153 if (!validate_arg (arg1, POINTER_TYPE)
9154 || !validate_arg (arg2, INTEGER_TYPE)
9155 || !validate_arg (len, INTEGER_TYPE))
9156 return NULL_TREE;
9157 else
9158 {
9159 const char *p1;
9160
9161 if (TREE_CODE (arg2) != INTEGER_CST
9162 || !host_integerp (len, 1))
9163 return NULL_TREE;
9164
9165 p1 = c_getstr (arg1);
9166 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9167 {
9168 char c;
9169 const char *r;
9170 tree tem;
9171
9172 if (target_char_cast (arg2, &c))
9173 return NULL_TREE;
9174
9175 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9176
9177 if (r == NULL)
9178 return build_int_cst (TREE_TYPE (arg1), 0);
9179
9180 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9181 return fold_convert_loc (loc, type, tem);
9182 }
9183 return NULL_TREE;
9184 }
9185 }
9186
9187 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9188 Return NULL_TREE if no simplification can be made. */
9189
9190 static tree
9191 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9192 {
9193 const char *p1, *p2;
9194
9195 if (!validate_arg (arg1, POINTER_TYPE)
9196 || !validate_arg (arg2, POINTER_TYPE)
9197 || !validate_arg (len, INTEGER_TYPE))
9198 return NULL_TREE;
9199
9200 /* If the LEN parameter is zero, return zero. */
9201 if (integer_zerop (len))
9202 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9203 arg1, arg2);
9204
9205 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9206 if (operand_equal_p (arg1, arg2, 0))
9207 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9208
9209 p1 = c_getstr (arg1);
9210 p2 = c_getstr (arg2);
9211
9212 /* If all arguments are constant, and the value of len is not greater
9213 than the lengths of arg1 and arg2, evaluate at compile-time. */
9214 if (host_integerp (len, 1) && p1 && p2
9215 && compare_tree_int (len, strlen (p1) + 1) <= 0
9216 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9217 {
9218 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9219
9220 if (r > 0)
9221 return integer_one_node;
9222 else if (r < 0)
9223 return integer_minus_one_node;
9224 else
9225 return integer_zero_node;
9226 }
9227
9228 /* If len parameter is one, return an expression corresponding to
9229 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9230 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9231 {
9232 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9233 tree cst_uchar_ptr_node
9234 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9235
9236 tree ind1
9237 = fold_convert_loc (loc, integer_type_node,
9238 build1 (INDIRECT_REF, cst_uchar_node,
9239 fold_convert_loc (loc,
9240 cst_uchar_ptr_node,
9241 arg1)));
9242 tree ind2
9243 = fold_convert_loc (loc, integer_type_node,
9244 build1 (INDIRECT_REF, cst_uchar_node,
9245 fold_convert_loc (loc,
9246 cst_uchar_ptr_node,
9247 arg2)));
9248 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9249 }
9250
9251 return NULL_TREE;
9252 }
9253
9254 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9255 Return NULL_TREE if no simplification can be made. */
9256
9257 static tree
9258 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9259 {
9260 const char *p1, *p2;
9261
9262 if (!validate_arg (arg1, POINTER_TYPE)
9263 || !validate_arg (arg2, POINTER_TYPE))
9264 return NULL_TREE;
9265
9266 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9267 if (operand_equal_p (arg1, arg2, 0))
9268 return integer_zero_node;
9269
9270 p1 = c_getstr (arg1);
9271 p2 = c_getstr (arg2);
9272
9273 if (p1 && p2)
9274 {
9275 const int i = strcmp (p1, p2);
9276 if (i < 0)
9277 return integer_minus_one_node;
9278 else if (i > 0)
9279 return integer_one_node;
9280 else
9281 return integer_zero_node;
9282 }
9283
9284 /* If the second arg is "", return *(const unsigned char*)arg1. */
9285 if (p2 && *p2 == '\0')
9286 {
9287 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9288 tree cst_uchar_ptr_node
9289 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9290
9291 return fold_convert_loc (loc, integer_type_node,
9292 build1 (INDIRECT_REF, cst_uchar_node,
9293 fold_convert_loc (loc,
9294 cst_uchar_ptr_node,
9295 arg1)));
9296 }
9297
9298 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9299 if (p1 && *p1 == '\0')
9300 {
9301 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9302 tree cst_uchar_ptr_node
9303 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9304
9305 tree temp
9306 = fold_convert_loc (loc, integer_type_node,
9307 build1 (INDIRECT_REF, cst_uchar_node,
9308 fold_convert_loc (loc,
9309 cst_uchar_ptr_node,
9310 arg2)));
9311 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9312 }
9313
9314 return NULL_TREE;
9315 }
9316
9317 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9318 Return NULL_TREE if no simplification can be made. */
9319
9320 static tree
9321 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9322 {
9323 const char *p1, *p2;
9324
9325 if (!validate_arg (arg1, POINTER_TYPE)
9326 || !validate_arg (arg2, POINTER_TYPE)
9327 || !validate_arg (len, INTEGER_TYPE))
9328 return NULL_TREE;
9329
9330 /* If the LEN parameter is zero, return zero. */
9331 if (integer_zerop (len))
9332 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9333 arg1, arg2);
9334
9335 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9336 if (operand_equal_p (arg1, arg2, 0))
9337 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9338
9339 p1 = c_getstr (arg1);
9340 p2 = c_getstr (arg2);
9341
9342 if (host_integerp (len, 1) && p1 && p2)
9343 {
9344 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9345 if (i > 0)
9346 return integer_one_node;
9347 else if (i < 0)
9348 return integer_minus_one_node;
9349 else
9350 return integer_zero_node;
9351 }
9352
9353 /* If the second arg is "", and the length is greater than zero,
9354 return *(const unsigned char*)arg1. */
9355 if (p2 && *p2 == '\0'
9356 && TREE_CODE (len) == INTEGER_CST
9357 && tree_int_cst_sgn (len) == 1)
9358 {
9359 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9360 tree cst_uchar_ptr_node
9361 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9362
9363 return fold_convert_loc (loc, integer_type_node,
9364 build1 (INDIRECT_REF, cst_uchar_node,
9365 fold_convert_loc (loc,
9366 cst_uchar_ptr_node,
9367 arg1)));
9368 }
9369
9370 /* If the first arg is "", and the length is greater than zero,
9371 return -*(const unsigned char*)arg2. */
9372 if (p1 && *p1 == '\0'
9373 && TREE_CODE (len) == INTEGER_CST
9374 && tree_int_cst_sgn (len) == 1)
9375 {
9376 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9377 tree cst_uchar_ptr_node
9378 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9379
9380 tree temp = fold_convert_loc (loc, integer_type_node,
9381 build1 (INDIRECT_REF, cst_uchar_node,
9382 fold_convert_loc (loc,
9383 cst_uchar_ptr_node,
9384 arg2)));
9385 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9386 }
9387
9388 /* If len parameter is one, return an expression corresponding to
9389 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9390 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9391 {
9392 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9393 tree cst_uchar_ptr_node
9394 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9395
9396 tree ind1 = fold_convert_loc (loc, integer_type_node,
9397 build1 (INDIRECT_REF, cst_uchar_node,
9398 fold_convert_loc (loc,
9399 cst_uchar_ptr_node,
9400 arg1)));
9401 tree ind2 = fold_convert_loc (loc, integer_type_node,
9402 build1 (INDIRECT_REF, cst_uchar_node,
9403 fold_convert_loc (loc,
9404 cst_uchar_ptr_node,
9405 arg2)));
9406 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9407 }
9408
9409 return NULL_TREE;
9410 }
9411
9412 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9413 ARG. Return NULL_TREE if no simplification can be made. */
9414
9415 static tree
9416 fold_builtin_signbit (location_t loc, tree arg, tree type)
9417 {
9418 if (!validate_arg (arg, REAL_TYPE))
9419 return NULL_TREE;
9420
9421 /* If ARG is a compile-time constant, determine the result. */
9422 if (TREE_CODE (arg) == REAL_CST
9423 && !TREE_OVERFLOW (arg))
9424 {
9425 REAL_VALUE_TYPE c;
9426
9427 c = TREE_REAL_CST (arg);
9428 return (REAL_VALUE_NEGATIVE (c)
9429 ? build_one_cst (type)
9430 : build_zero_cst (type));
9431 }
9432
9433 /* If ARG is non-negative, the result is always zero. */
9434 if (tree_expr_nonnegative_p (arg))
9435 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9436
9437 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9438 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9439 return fold_convert (type,
9440 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9441 build_real (TREE_TYPE (arg), dconst0)));
9442
9443 return NULL_TREE;
9444 }
9445
9446 /* Fold function call to builtin copysign, copysignf or copysignl with
9447 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9448 be made. */
9449
9450 static tree
9451 fold_builtin_copysign (location_t loc, tree fndecl,
9452 tree arg1, tree arg2, tree type)
9453 {
9454 tree tem;
9455
9456 if (!validate_arg (arg1, REAL_TYPE)
9457 || !validate_arg (arg2, REAL_TYPE))
9458 return NULL_TREE;
9459
9460 /* copysign(X,X) is X. */
9461 if (operand_equal_p (arg1, arg2, 0))
9462 return fold_convert_loc (loc, type, arg1);
9463
9464 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9465 if (TREE_CODE (arg1) == REAL_CST
9466 && TREE_CODE (arg2) == REAL_CST
9467 && !TREE_OVERFLOW (arg1)
9468 && !TREE_OVERFLOW (arg2))
9469 {
9470 REAL_VALUE_TYPE c1, c2;
9471
9472 c1 = TREE_REAL_CST (arg1);
9473 c2 = TREE_REAL_CST (arg2);
9474 /* c1.sign := c2.sign. */
9475 real_copysign (&c1, &c2);
9476 return build_real (type, c1);
9477 }
9478
9479 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9480 Remember to evaluate Y for side-effects. */
9481 if (tree_expr_nonnegative_p (arg2))
9482 return omit_one_operand_loc (loc, type,
9483 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9484 arg2);
9485
9486 /* Strip sign changing operations for the first argument. */
9487 tem = fold_strip_sign_ops (arg1);
9488 if (tem)
9489 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9490
9491 return NULL_TREE;
9492 }
9493
9494 /* Fold a call to builtin isascii with argument ARG. */
9495
9496 static tree
9497 fold_builtin_isascii (location_t loc, tree arg)
9498 {
9499 if (!validate_arg (arg, INTEGER_TYPE))
9500 return NULL_TREE;
9501 else
9502 {
9503 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9504 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9505 build_int_cst (integer_type_node,
9506 ~ (unsigned HOST_WIDE_INT) 0x7f));
9507 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9508 arg, integer_zero_node);
9509 }
9510 }
9511
9512 /* Fold a call to builtin toascii with argument ARG. */
9513
9514 static tree
9515 fold_builtin_toascii (location_t loc, tree arg)
9516 {
9517 if (!validate_arg (arg, INTEGER_TYPE))
9518 return NULL_TREE;
9519
9520 /* Transform toascii(c) -> (c & 0x7f). */
9521 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9522 build_int_cst (integer_type_node, 0x7f));
9523 }
9524
9525 /* Fold a call to builtin isdigit with argument ARG. */
9526
9527 static tree
9528 fold_builtin_isdigit (location_t loc, tree arg)
9529 {
9530 if (!validate_arg (arg, INTEGER_TYPE))
9531 return NULL_TREE;
9532 else
9533 {
9534 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9535 /* According to the C standard, isdigit is unaffected by locale.
9536 However, it definitely is affected by the target character set. */
9537 unsigned HOST_WIDE_INT target_digit0
9538 = lang_hooks.to_target_charset ('0');
9539
9540 if (target_digit0 == 0)
9541 return NULL_TREE;
9542
9543 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9544 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9545 build_int_cst (unsigned_type_node, target_digit0));
9546 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9547 build_int_cst (unsigned_type_node, 9));
9548 }
9549 }
9550
9551 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9552
9553 static tree
9554 fold_builtin_fabs (location_t loc, tree arg, tree type)
9555 {
9556 if (!validate_arg (arg, REAL_TYPE))
9557 return NULL_TREE;
9558
9559 arg = fold_convert_loc (loc, type, arg);
9560 if (TREE_CODE (arg) == REAL_CST)
9561 return fold_abs_const (arg, type);
9562 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9563 }
9564
9565 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9566
9567 static tree
9568 fold_builtin_abs (location_t loc, tree arg, tree type)
9569 {
9570 if (!validate_arg (arg, INTEGER_TYPE))
9571 return NULL_TREE;
9572
9573 arg = fold_convert_loc (loc, type, arg);
9574 if (TREE_CODE (arg) == INTEGER_CST)
9575 return fold_abs_const (arg, type);
9576 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9577 }
9578
9579 /* Fold a fma operation with arguments ARG[012]. */
9580
9581 tree
9582 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9583 tree type, tree arg0, tree arg1, tree arg2)
9584 {
9585 if (TREE_CODE (arg0) == REAL_CST
9586 && TREE_CODE (arg1) == REAL_CST
9587 && TREE_CODE (arg2) == REAL_CST)
9588 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9589
9590 return NULL_TREE;
9591 }
9592
9593 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9594
9595 static tree
9596 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9597 {
9598 if (validate_arg (arg0, REAL_TYPE)
9599 && validate_arg (arg1, REAL_TYPE)
9600 && validate_arg (arg2, REAL_TYPE))
9601 {
9602 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9603 if (tem)
9604 return tem;
9605
9606 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9607 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9608 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9609 }
9610 return NULL_TREE;
9611 }
9612
9613 /* Fold a call to builtin fmin or fmax. */
9614
9615 static tree
9616 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9617 tree type, bool max)
9618 {
9619 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9620 {
9621 /* Calculate the result when the argument is a constant. */
9622 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9623
9624 if (res)
9625 return res;
9626
9627 /* If either argument is NaN, return the other one. Avoid the
9628 transformation if we get (and honor) a signalling NaN. Using
9629 omit_one_operand() ensures we create a non-lvalue. */
9630 if (TREE_CODE (arg0) == REAL_CST
9631 && real_isnan (&TREE_REAL_CST (arg0))
9632 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9633 || ! TREE_REAL_CST (arg0).signalling))
9634 return omit_one_operand_loc (loc, type, arg1, arg0);
9635 if (TREE_CODE (arg1) == REAL_CST
9636 && real_isnan (&TREE_REAL_CST (arg1))
9637 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9638 || ! TREE_REAL_CST (arg1).signalling))
9639 return omit_one_operand_loc (loc, type, arg0, arg1);
9640
9641 /* Transform fmin/fmax(x,x) -> x. */
9642 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9643 return omit_one_operand_loc (loc, type, arg0, arg1);
9644
9645 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9646 functions to return the numeric arg if the other one is NaN.
9647 These tree codes don't honor that, so only transform if
9648 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9649 handled, so we don't have to worry about it either. */
9650 if (flag_finite_math_only)
9651 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9652 fold_convert_loc (loc, type, arg0),
9653 fold_convert_loc (loc, type, arg1));
9654 }
9655 return NULL_TREE;
9656 }
9657
9658 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9659
9660 static tree
9661 fold_builtin_carg (location_t loc, tree arg, tree type)
9662 {
9663 if (validate_arg (arg, COMPLEX_TYPE)
9664 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9665 {
9666 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9667
9668 if (atan2_fn)
9669 {
9670 tree new_arg = builtin_save_expr (arg);
9671 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9672 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9673 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9674 }
9675 }
9676
9677 return NULL_TREE;
9678 }
9679
9680 /* Fold a call to builtin logb/ilogb. */
9681
9682 static tree
9683 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9684 {
9685 if (! validate_arg (arg, REAL_TYPE))
9686 return NULL_TREE;
9687
9688 STRIP_NOPS (arg);
9689
9690 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9691 {
9692 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9693
9694 switch (value->cl)
9695 {
9696 case rvc_nan:
9697 case rvc_inf:
9698 /* If arg is Inf or NaN and we're logb, return it. */
9699 if (TREE_CODE (rettype) == REAL_TYPE)
9700 {
9701 /* For logb(-Inf) we have to return +Inf. */
9702 if (real_isinf (value) && real_isneg (value))
9703 {
9704 REAL_VALUE_TYPE tem;
9705 real_inf (&tem);
9706 return build_real (rettype, tem);
9707 }
9708 return fold_convert_loc (loc, rettype, arg);
9709 }
9710 /* Fall through... */
9711 case rvc_zero:
9712 /* Zero may set errno and/or raise an exception for logb, also
9713 for ilogb we don't know FP_ILOGB0. */
9714 return NULL_TREE;
9715 case rvc_normal:
9716 /* For normal numbers, proceed iff radix == 2. In GCC,
9717 normalized significands are in the range [0.5, 1.0). We
9718 want the exponent as if they were [1.0, 2.0) so get the
9719 exponent and subtract 1. */
9720 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9721 return fold_convert_loc (loc, rettype,
9722 build_int_cst (integer_type_node,
9723 REAL_EXP (value)-1));
9724 break;
9725 }
9726 }
9727
9728 return NULL_TREE;
9729 }
9730
9731 /* Fold a call to builtin significand, if radix == 2. */
9732
9733 static tree
9734 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9735 {
9736 if (! validate_arg (arg, REAL_TYPE))
9737 return NULL_TREE;
9738
9739 STRIP_NOPS (arg);
9740
9741 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9742 {
9743 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9744
9745 switch (value->cl)
9746 {
9747 case rvc_zero:
9748 case rvc_nan:
9749 case rvc_inf:
9750 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9751 return fold_convert_loc (loc, rettype, arg);
9752 case rvc_normal:
9753 /* For normal numbers, proceed iff radix == 2. */
9754 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9755 {
9756 REAL_VALUE_TYPE result = *value;
9757 /* In GCC, normalized significands are in the range [0.5,
9758 1.0). We want them to be [1.0, 2.0) so set the
9759 exponent to 1. */
9760 SET_REAL_EXP (&result, 1);
9761 return build_real (rettype, result);
9762 }
9763 break;
9764 }
9765 }
9766
9767 return NULL_TREE;
9768 }
9769
9770 /* Fold a call to builtin frexp, we can assume the base is 2. */
9771
9772 static tree
9773 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9774 {
9775 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9776 return NULL_TREE;
9777
9778 STRIP_NOPS (arg0);
9779
9780 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9781 return NULL_TREE;
9782
9783 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9784
9785 /* Proceed if a valid pointer type was passed in. */
9786 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9787 {
9788 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9789 tree frac, exp;
9790
9791 switch (value->cl)
9792 {
9793 case rvc_zero:
9794 /* For +-0, return (*exp = 0, +-0). */
9795 exp = integer_zero_node;
9796 frac = arg0;
9797 break;
9798 case rvc_nan:
9799 case rvc_inf:
9800 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9801 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9802 case rvc_normal:
9803 {
9804 /* Since the frexp function always expects base 2, and in
9805 GCC normalized significands are already in the range
9806 [0.5, 1.0), we have exactly what frexp wants. */
9807 REAL_VALUE_TYPE frac_rvt = *value;
9808 SET_REAL_EXP (&frac_rvt, 0);
9809 frac = build_real (rettype, frac_rvt);
9810 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9811 }
9812 break;
9813 default:
9814 gcc_unreachable ();
9815 }
9816
9817 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9818 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9819 TREE_SIDE_EFFECTS (arg1) = 1;
9820 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9821 }
9822
9823 return NULL_TREE;
9824 }
9825
9826 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9827 then we can assume the base is two. If it's false, then we have to
9828 check the mode of the TYPE parameter in certain cases. */
9829
9830 static tree
9831 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9832 tree type, bool ldexp)
9833 {
9834 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9835 {
9836 STRIP_NOPS (arg0);
9837 STRIP_NOPS (arg1);
9838
9839 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9840 if (real_zerop (arg0) || integer_zerop (arg1)
9841 || (TREE_CODE (arg0) == REAL_CST
9842 && !real_isfinite (&TREE_REAL_CST (arg0))))
9843 return omit_one_operand_loc (loc, type, arg0, arg1);
9844
9845 /* If both arguments are constant, then try to evaluate it. */
9846 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9847 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9848 && tree_fits_shwi_p (arg1))
9849 {
9850 /* Bound the maximum adjustment to twice the range of the
9851 mode's valid exponents. Use abs to ensure the range is
9852 positive as a sanity check. */
9853 const long max_exp_adj = 2 *
9854 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9855 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9856
9857 /* Get the user-requested adjustment. */
9858 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9859
9860 /* The requested adjustment must be inside this range. This
9861 is a preliminary cap to avoid things like overflow, we
9862 may still fail to compute the result for other reasons. */
9863 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9864 {
9865 REAL_VALUE_TYPE initial_result;
9866
9867 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9868
9869 /* Ensure we didn't overflow. */
9870 if (! real_isinf (&initial_result))
9871 {
9872 const REAL_VALUE_TYPE trunc_result
9873 = real_value_truncate (TYPE_MODE (type), initial_result);
9874
9875 /* Only proceed if the target mode can hold the
9876 resulting value. */
9877 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9878 return build_real (type, trunc_result);
9879 }
9880 }
9881 }
9882 }
9883
9884 return NULL_TREE;
9885 }
9886
9887 /* Fold a call to builtin modf. */
9888
9889 static tree
9890 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9891 {
9892 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9893 return NULL_TREE;
9894
9895 STRIP_NOPS (arg0);
9896
9897 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9898 return NULL_TREE;
9899
9900 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9901
9902 /* Proceed if a valid pointer type was passed in. */
9903 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9904 {
9905 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9906 REAL_VALUE_TYPE trunc, frac;
9907
9908 switch (value->cl)
9909 {
9910 case rvc_nan:
9911 case rvc_zero:
9912 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9913 trunc = frac = *value;
9914 break;
9915 case rvc_inf:
9916 /* For +-Inf, return (*arg1 = arg0, +-0). */
9917 frac = dconst0;
9918 frac.sign = value->sign;
9919 trunc = *value;
9920 break;
9921 case rvc_normal:
9922 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9923 real_trunc (&trunc, VOIDmode, value);
9924 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9925 /* If the original number was negative and already
9926 integral, then the fractional part is -0.0. */
9927 if (value->sign && frac.cl == rvc_zero)
9928 frac.sign = value->sign;
9929 break;
9930 }
9931
9932 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9933 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9934 build_real (rettype, trunc));
9935 TREE_SIDE_EFFECTS (arg1) = 1;
9936 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9937 build_real (rettype, frac));
9938 }
9939
9940 return NULL_TREE;
9941 }
9942
9943 /* Given a location LOC, an interclass builtin function decl FNDECL
9944 and its single argument ARG, return an folded expression computing
9945 the same, or NULL_TREE if we either couldn't or didn't want to fold
9946 (the latter happen if there's an RTL instruction available). */
9947
9948 static tree
9949 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9950 {
9951 enum machine_mode mode;
9952
9953 if (!validate_arg (arg, REAL_TYPE))
9954 return NULL_TREE;
9955
9956 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9957 return NULL_TREE;
9958
9959 mode = TYPE_MODE (TREE_TYPE (arg));
9960
9961 /* If there is no optab, try generic code. */
9962 switch (DECL_FUNCTION_CODE (fndecl))
9963 {
9964 tree result;
9965
9966 CASE_FLT_FN (BUILT_IN_ISINF):
9967 {
9968 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9969 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9970 tree const type = TREE_TYPE (arg);
9971 REAL_VALUE_TYPE r;
9972 char buf[128];
9973
9974 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9975 real_from_string (&r, buf);
9976 result = build_call_expr (isgr_fn, 2,
9977 fold_build1_loc (loc, ABS_EXPR, type, arg),
9978 build_real (type, r));
9979 return result;
9980 }
9981 CASE_FLT_FN (BUILT_IN_FINITE):
9982 case BUILT_IN_ISFINITE:
9983 {
9984 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9985 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9986 tree const type = TREE_TYPE (arg);
9987 REAL_VALUE_TYPE r;
9988 char buf[128];
9989
9990 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9991 real_from_string (&r, buf);
9992 result = build_call_expr (isle_fn, 2,
9993 fold_build1_loc (loc, ABS_EXPR, type, arg),
9994 build_real (type, r));
9995 /*result = fold_build2_loc (loc, UNGT_EXPR,
9996 TREE_TYPE (TREE_TYPE (fndecl)),
9997 fold_build1_loc (loc, ABS_EXPR, type, arg),
9998 build_real (type, r));
9999 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10000 TREE_TYPE (TREE_TYPE (fndecl)),
10001 result);*/
10002 return result;
10003 }
10004 case BUILT_IN_ISNORMAL:
10005 {
10006 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10007 islessequal(fabs(x),DBL_MAX). */
10008 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10009 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10010 tree const type = TREE_TYPE (arg);
10011 REAL_VALUE_TYPE rmax, rmin;
10012 char buf[128];
10013
10014 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10015 real_from_string (&rmax, buf);
10016 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10017 real_from_string (&rmin, buf);
10018 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10019 result = build_call_expr (isle_fn, 2, arg,
10020 build_real (type, rmax));
10021 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10022 build_call_expr (isge_fn, 2, arg,
10023 build_real (type, rmin)));
10024 return result;
10025 }
10026 default:
10027 break;
10028 }
10029
10030 return NULL_TREE;
10031 }
10032
10033 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10034 ARG is the argument for the call. */
10035
10036 static tree
10037 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10038 {
10039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10040 REAL_VALUE_TYPE r;
10041
10042 if (!validate_arg (arg, REAL_TYPE))
10043 return NULL_TREE;
10044
10045 switch (builtin_index)
10046 {
10047 case BUILT_IN_ISINF:
10048 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10049 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10050
10051 if (TREE_CODE (arg) == REAL_CST)
10052 {
10053 r = TREE_REAL_CST (arg);
10054 if (real_isinf (&r))
10055 return real_compare (GT_EXPR, &r, &dconst0)
10056 ? integer_one_node : integer_minus_one_node;
10057 else
10058 return integer_zero_node;
10059 }
10060
10061 return NULL_TREE;
10062
10063 case BUILT_IN_ISINF_SIGN:
10064 {
10065 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10066 /* In a boolean context, GCC will fold the inner COND_EXPR to
10067 1. So e.g. "if (isinf_sign(x))" would be folded to just
10068 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10069 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10070 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10071 tree tmp = NULL_TREE;
10072
10073 arg = builtin_save_expr (arg);
10074
10075 if (signbit_fn && isinf_fn)
10076 {
10077 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10078 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10079
10080 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10081 signbit_call, integer_zero_node);
10082 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10083 isinf_call, integer_zero_node);
10084
10085 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10086 integer_minus_one_node, integer_one_node);
10087 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10088 isinf_call, tmp,
10089 integer_zero_node);
10090 }
10091
10092 return tmp;
10093 }
10094
10095 case BUILT_IN_ISFINITE:
10096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10097 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10098 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10099
10100 if (TREE_CODE (arg) == REAL_CST)
10101 {
10102 r = TREE_REAL_CST (arg);
10103 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10104 }
10105
10106 return NULL_TREE;
10107
10108 case BUILT_IN_ISNAN:
10109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10110 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10111
10112 if (TREE_CODE (arg) == REAL_CST)
10113 {
10114 r = TREE_REAL_CST (arg);
10115 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10116 }
10117
10118 arg = builtin_save_expr (arg);
10119 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10120
10121 default:
10122 gcc_unreachable ();
10123 }
10124 }
10125
10126 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10127 This builtin will generate code to return the appropriate floating
10128 point classification depending on the value of the floating point
10129 number passed in. The possible return values must be supplied as
10130 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10131 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10132 one floating point argument which is "type generic". */
10133
10134 static tree
10135 fold_builtin_fpclassify (location_t loc, tree exp)
10136 {
10137 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10138 arg, type, res, tmp;
10139 enum machine_mode mode;
10140 REAL_VALUE_TYPE r;
10141 char buf[128];
10142
10143 /* Verify the required arguments in the original call. */
10144 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10145 INTEGER_TYPE, INTEGER_TYPE,
10146 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10147 return NULL_TREE;
10148
10149 fp_nan = CALL_EXPR_ARG (exp, 0);
10150 fp_infinite = CALL_EXPR_ARG (exp, 1);
10151 fp_normal = CALL_EXPR_ARG (exp, 2);
10152 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10153 fp_zero = CALL_EXPR_ARG (exp, 4);
10154 arg = CALL_EXPR_ARG (exp, 5);
10155 type = TREE_TYPE (arg);
10156 mode = TYPE_MODE (type);
10157 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10158
10159 /* fpclassify(x) ->
10160 isnan(x) ? FP_NAN :
10161 (fabs(x) == Inf ? FP_INFINITE :
10162 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10163 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10164
10165 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10166 build_real (type, dconst0));
10167 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10168 tmp, fp_zero, fp_subnormal);
10169
10170 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10171 real_from_string (&r, buf);
10172 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10173 arg, build_real (type, r));
10174 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10175
10176 if (HONOR_INFINITIES (mode))
10177 {
10178 real_inf (&r);
10179 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10180 build_real (type, r));
10181 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10182 fp_infinite, res);
10183 }
10184
10185 if (HONOR_NANS (mode))
10186 {
10187 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10188 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10189 }
10190
10191 return res;
10192 }
10193
10194 /* Fold a call to an unordered comparison function such as
10195 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10196 being called and ARG0 and ARG1 are the arguments for the call.
10197 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10198 the opposite of the desired result. UNORDERED_CODE is used
10199 for modes that can hold NaNs and ORDERED_CODE is used for
10200 the rest. */
10201
10202 static tree
10203 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10204 enum tree_code unordered_code,
10205 enum tree_code ordered_code)
10206 {
10207 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10208 enum tree_code code;
10209 tree type0, type1;
10210 enum tree_code code0, code1;
10211 tree cmp_type = NULL_TREE;
10212
10213 type0 = TREE_TYPE (arg0);
10214 type1 = TREE_TYPE (arg1);
10215
10216 code0 = TREE_CODE (type0);
10217 code1 = TREE_CODE (type1);
10218
10219 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10220 /* Choose the wider of two real types. */
10221 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10222 ? type0 : type1;
10223 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10224 cmp_type = type0;
10225 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10226 cmp_type = type1;
10227
10228 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10229 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10230
10231 if (unordered_code == UNORDERED_EXPR)
10232 {
10233 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10234 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10235 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10236 }
10237
10238 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10239 : ordered_code;
10240 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10241 fold_build2_loc (loc, code, type, arg0, arg1));
10242 }
10243
10244 /* Fold a call to built-in function FNDECL with 0 arguments.
10245 IGNORE is true if the result of the function call is ignored. This
10246 function returns NULL_TREE if no simplification was possible. */
10247
10248 static tree
10249 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10250 {
10251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10253 switch (fcode)
10254 {
10255 CASE_FLT_FN (BUILT_IN_INF):
10256 case BUILT_IN_INFD32:
10257 case BUILT_IN_INFD64:
10258 case BUILT_IN_INFD128:
10259 return fold_builtin_inf (loc, type, true);
10260
10261 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10262 return fold_builtin_inf (loc, type, false);
10263
10264 case BUILT_IN_CLASSIFY_TYPE:
10265 return fold_builtin_classify_type (NULL_TREE);
10266
10267 case BUILT_IN_UNREACHABLE:
10268 if (flag_sanitize & SANITIZE_UNREACHABLE
10269 && (current_function_decl == NULL
10270 || !lookup_attribute ("no_sanitize_undefined",
10271 DECL_ATTRIBUTES (current_function_decl))))
10272 return ubsan_instrument_unreachable (loc);
10273 break;
10274
10275 default:
10276 break;
10277 }
10278 return NULL_TREE;
10279 }
10280
10281 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10282 IGNORE is true if the result of the function call is ignored. This
10283 function returns NULL_TREE if no simplification was possible. */
10284
10285 static tree
10286 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10287 {
10288 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10289 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10290 switch (fcode)
10291 {
10292 case BUILT_IN_CONSTANT_P:
10293 {
10294 tree val = fold_builtin_constant_p (arg0);
10295
10296 /* Gimplification will pull the CALL_EXPR for the builtin out of
10297 an if condition. When not optimizing, we'll not CSE it back.
10298 To avoid link error types of regressions, return false now. */
10299 if (!val && !optimize)
10300 val = integer_zero_node;
10301
10302 return val;
10303 }
10304
10305 case BUILT_IN_CLASSIFY_TYPE:
10306 return fold_builtin_classify_type (arg0);
10307
10308 case BUILT_IN_STRLEN:
10309 return fold_builtin_strlen (loc, type, arg0);
10310
10311 CASE_FLT_FN (BUILT_IN_FABS):
10312 case BUILT_IN_FABSD32:
10313 case BUILT_IN_FABSD64:
10314 case BUILT_IN_FABSD128:
10315 return fold_builtin_fabs (loc, arg0, type);
10316
10317 case BUILT_IN_ABS:
10318 case BUILT_IN_LABS:
10319 case BUILT_IN_LLABS:
10320 case BUILT_IN_IMAXABS:
10321 return fold_builtin_abs (loc, arg0, type);
10322
10323 CASE_FLT_FN (BUILT_IN_CONJ):
10324 if (validate_arg (arg0, COMPLEX_TYPE)
10325 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10326 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10327 break;
10328
10329 CASE_FLT_FN (BUILT_IN_CREAL):
10330 if (validate_arg (arg0, COMPLEX_TYPE)
10331 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10332 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10333 break;
10334
10335 CASE_FLT_FN (BUILT_IN_CIMAG):
10336 if (validate_arg (arg0, COMPLEX_TYPE)
10337 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10338 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10339 break;
10340
10341 CASE_FLT_FN (BUILT_IN_CCOS):
10342 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10343
10344 CASE_FLT_FN (BUILT_IN_CCOSH):
10345 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10346
10347 CASE_FLT_FN (BUILT_IN_CPROJ):
10348 return fold_builtin_cproj (loc, arg0, type);
10349
10350 CASE_FLT_FN (BUILT_IN_CSIN):
10351 if (validate_arg (arg0, COMPLEX_TYPE)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10353 return do_mpc_arg1 (arg0, type, mpc_sin);
10354 break;
10355
10356 CASE_FLT_FN (BUILT_IN_CSINH):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return do_mpc_arg1 (arg0, type, mpc_sinh);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_CTAN):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return do_mpc_arg1 (arg0, type, mpc_tan);
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_CTANH):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return do_mpc_arg1 (arg0, type, mpc_tanh);
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_CLOG):
10375 if (validate_arg (arg0, COMPLEX_TYPE)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10377 return do_mpc_arg1 (arg0, type, mpc_log);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_CSQRT):
10381 if (validate_arg (arg0, COMPLEX_TYPE)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10383 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_CASIN):
10387 if (validate_arg (arg0, COMPLEX_TYPE)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10389 return do_mpc_arg1 (arg0, type, mpc_asin);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_CACOS):
10393 if (validate_arg (arg0, COMPLEX_TYPE)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10395 return do_mpc_arg1 (arg0, type, mpc_acos);
10396 break;
10397
10398 CASE_FLT_FN (BUILT_IN_CATAN):
10399 if (validate_arg (arg0, COMPLEX_TYPE)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10401 return do_mpc_arg1 (arg0, type, mpc_atan);
10402 break;
10403
10404 CASE_FLT_FN (BUILT_IN_CASINH):
10405 if (validate_arg (arg0, COMPLEX_TYPE)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10407 return do_mpc_arg1 (arg0, type, mpc_asinh);
10408 break;
10409
10410 CASE_FLT_FN (BUILT_IN_CACOSH):
10411 if (validate_arg (arg0, COMPLEX_TYPE)
10412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10413 return do_mpc_arg1 (arg0, type, mpc_acosh);
10414 break;
10415
10416 CASE_FLT_FN (BUILT_IN_CATANH):
10417 if (validate_arg (arg0, COMPLEX_TYPE)
10418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10419 return do_mpc_arg1 (arg0, type, mpc_atanh);
10420 break;
10421
10422 CASE_FLT_FN (BUILT_IN_CABS):
10423 return fold_builtin_cabs (loc, arg0, type, fndecl);
10424
10425 CASE_FLT_FN (BUILT_IN_CARG):
10426 return fold_builtin_carg (loc, arg0, type);
10427
10428 CASE_FLT_FN (BUILT_IN_SQRT):
10429 return fold_builtin_sqrt (loc, arg0, type);
10430
10431 CASE_FLT_FN (BUILT_IN_CBRT):
10432 return fold_builtin_cbrt (loc, arg0, type);
10433
10434 CASE_FLT_FN (BUILT_IN_ASIN):
10435 if (validate_arg (arg0, REAL_TYPE))
10436 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10437 &dconstm1, &dconst1, true);
10438 break;
10439
10440 CASE_FLT_FN (BUILT_IN_ACOS):
10441 if (validate_arg (arg0, REAL_TYPE))
10442 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10443 &dconstm1, &dconst1, true);
10444 break;
10445
10446 CASE_FLT_FN (BUILT_IN_ATAN):
10447 if (validate_arg (arg0, REAL_TYPE))
10448 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10449 break;
10450
10451 CASE_FLT_FN (BUILT_IN_ASINH):
10452 if (validate_arg (arg0, REAL_TYPE))
10453 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10454 break;
10455
10456 CASE_FLT_FN (BUILT_IN_ACOSH):
10457 if (validate_arg (arg0, REAL_TYPE))
10458 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10459 &dconst1, NULL, true);
10460 break;
10461
10462 CASE_FLT_FN (BUILT_IN_ATANH):
10463 if (validate_arg (arg0, REAL_TYPE))
10464 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10465 &dconstm1, &dconst1, false);
10466 break;
10467
10468 CASE_FLT_FN (BUILT_IN_SIN):
10469 if (validate_arg (arg0, REAL_TYPE))
10470 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10471 break;
10472
10473 CASE_FLT_FN (BUILT_IN_COS):
10474 return fold_builtin_cos (loc, arg0, type, fndecl);
10475
10476 CASE_FLT_FN (BUILT_IN_TAN):
10477 return fold_builtin_tan (arg0, type);
10478
10479 CASE_FLT_FN (BUILT_IN_CEXP):
10480 return fold_builtin_cexp (loc, arg0, type);
10481
10482 CASE_FLT_FN (BUILT_IN_CEXPI):
10483 if (validate_arg (arg0, REAL_TYPE))
10484 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10485 break;
10486
10487 CASE_FLT_FN (BUILT_IN_SINH):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10490 break;
10491
10492 CASE_FLT_FN (BUILT_IN_COSH):
10493 return fold_builtin_cosh (loc, arg0, type, fndecl);
10494
10495 CASE_FLT_FN (BUILT_IN_TANH):
10496 if (validate_arg (arg0, REAL_TYPE))
10497 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10498 break;
10499
10500 CASE_FLT_FN (BUILT_IN_ERF):
10501 if (validate_arg (arg0, REAL_TYPE))
10502 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10503 break;
10504
10505 CASE_FLT_FN (BUILT_IN_ERFC):
10506 if (validate_arg (arg0, REAL_TYPE))
10507 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10508 break;
10509
10510 CASE_FLT_FN (BUILT_IN_TGAMMA):
10511 if (validate_arg (arg0, REAL_TYPE))
10512 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10513 break;
10514
10515 CASE_FLT_FN (BUILT_IN_EXP):
10516 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10517
10518 CASE_FLT_FN (BUILT_IN_EXP2):
10519 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10520
10521 CASE_FLT_FN (BUILT_IN_EXP10):
10522 CASE_FLT_FN (BUILT_IN_POW10):
10523 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10524
10525 CASE_FLT_FN (BUILT_IN_EXPM1):
10526 if (validate_arg (arg0, REAL_TYPE))
10527 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10528 break;
10529
10530 CASE_FLT_FN (BUILT_IN_LOG):
10531 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10532
10533 CASE_FLT_FN (BUILT_IN_LOG2):
10534 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10535
10536 CASE_FLT_FN (BUILT_IN_LOG10):
10537 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10538
10539 CASE_FLT_FN (BUILT_IN_LOG1P):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10542 &dconstm1, NULL, false);
10543 break;
10544
10545 CASE_FLT_FN (BUILT_IN_J0):
10546 if (validate_arg (arg0, REAL_TYPE))
10547 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10548 NULL, NULL, 0);
10549 break;
10550
10551 CASE_FLT_FN (BUILT_IN_J1):
10552 if (validate_arg (arg0, REAL_TYPE))
10553 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10554 NULL, NULL, 0);
10555 break;
10556
10557 CASE_FLT_FN (BUILT_IN_Y0):
10558 if (validate_arg (arg0, REAL_TYPE))
10559 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10560 &dconst0, NULL, false);
10561 break;
10562
10563 CASE_FLT_FN (BUILT_IN_Y1):
10564 if (validate_arg (arg0, REAL_TYPE))
10565 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10566 &dconst0, NULL, false);
10567 break;
10568
10569 CASE_FLT_FN (BUILT_IN_NAN):
10570 case BUILT_IN_NAND32:
10571 case BUILT_IN_NAND64:
10572 case BUILT_IN_NAND128:
10573 return fold_builtin_nan (arg0, type, true);
10574
10575 CASE_FLT_FN (BUILT_IN_NANS):
10576 return fold_builtin_nan (arg0, type, false);
10577
10578 CASE_FLT_FN (BUILT_IN_FLOOR):
10579 return fold_builtin_floor (loc, fndecl, arg0);
10580
10581 CASE_FLT_FN (BUILT_IN_CEIL):
10582 return fold_builtin_ceil (loc, fndecl, arg0);
10583
10584 CASE_FLT_FN (BUILT_IN_TRUNC):
10585 return fold_builtin_trunc (loc, fndecl, arg0);
10586
10587 CASE_FLT_FN (BUILT_IN_ROUND):
10588 return fold_builtin_round (loc, fndecl, arg0);
10589
10590 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10591 CASE_FLT_FN (BUILT_IN_RINT):
10592 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10593
10594 CASE_FLT_FN (BUILT_IN_ICEIL):
10595 CASE_FLT_FN (BUILT_IN_LCEIL):
10596 CASE_FLT_FN (BUILT_IN_LLCEIL):
10597 CASE_FLT_FN (BUILT_IN_LFLOOR):
10598 CASE_FLT_FN (BUILT_IN_IFLOOR):
10599 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10600 CASE_FLT_FN (BUILT_IN_IROUND):
10601 CASE_FLT_FN (BUILT_IN_LROUND):
10602 CASE_FLT_FN (BUILT_IN_LLROUND):
10603 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10604
10605 CASE_FLT_FN (BUILT_IN_IRINT):
10606 CASE_FLT_FN (BUILT_IN_LRINT):
10607 CASE_FLT_FN (BUILT_IN_LLRINT):
10608 return fold_fixed_mathfn (loc, fndecl, arg0);
10609
10610 case BUILT_IN_BSWAP16:
10611 case BUILT_IN_BSWAP32:
10612 case BUILT_IN_BSWAP64:
10613 return fold_builtin_bswap (fndecl, arg0);
10614
10615 CASE_INT_FN (BUILT_IN_FFS):
10616 CASE_INT_FN (BUILT_IN_CLZ):
10617 CASE_INT_FN (BUILT_IN_CTZ):
10618 CASE_INT_FN (BUILT_IN_CLRSB):
10619 CASE_INT_FN (BUILT_IN_POPCOUNT):
10620 CASE_INT_FN (BUILT_IN_PARITY):
10621 return fold_builtin_bitop (fndecl, arg0);
10622
10623 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10624 return fold_builtin_signbit (loc, arg0, type);
10625
10626 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10627 return fold_builtin_significand (loc, arg0, type);
10628
10629 CASE_FLT_FN (BUILT_IN_ILOGB):
10630 CASE_FLT_FN (BUILT_IN_LOGB):
10631 return fold_builtin_logb (loc, arg0, type);
10632
10633 case BUILT_IN_ISASCII:
10634 return fold_builtin_isascii (loc, arg0);
10635
10636 case BUILT_IN_TOASCII:
10637 return fold_builtin_toascii (loc, arg0);
10638
10639 case BUILT_IN_ISDIGIT:
10640 return fold_builtin_isdigit (loc, arg0);
10641
10642 CASE_FLT_FN (BUILT_IN_FINITE):
10643 case BUILT_IN_FINITED32:
10644 case BUILT_IN_FINITED64:
10645 case BUILT_IN_FINITED128:
10646 case BUILT_IN_ISFINITE:
10647 {
10648 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10649 if (ret)
10650 return ret;
10651 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10652 }
10653
10654 CASE_FLT_FN (BUILT_IN_ISINF):
10655 case BUILT_IN_ISINFD32:
10656 case BUILT_IN_ISINFD64:
10657 case BUILT_IN_ISINFD128:
10658 {
10659 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10660 if (ret)
10661 return ret;
10662 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10663 }
10664
10665 case BUILT_IN_ISNORMAL:
10666 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10667
10668 case BUILT_IN_ISINF_SIGN:
10669 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10670
10671 CASE_FLT_FN (BUILT_IN_ISNAN):
10672 case BUILT_IN_ISNAND32:
10673 case BUILT_IN_ISNAND64:
10674 case BUILT_IN_ISNAND128:
10675 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10676
10677 case BUILT_IN_PRINTF:
10678 case BUILT_IN_PRINTF_UNLOCKED:
10679 case BUILT_IN_VPRINTF:
10680 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10681
10682 case BUILT_IN_FREE:
10683 if (integer_zerop (arg0))
10684 return build_empty_stmt (loc);
10685 break;
10686
10687 default:
10688 break;
10689 }
10690
10691 return NULL_TREE;
10692
10693 }
10694
10695 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10696 IGNORE is true if the result of the function call is ignored. This
10697 function returns NULL_TREE if no simplification was possible. */
10698
10699 static tree
10700 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10701 {
10702 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10703 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10704
10705 switch (fcode)
10706 {
10707 CASE_FLT_FN (BUILT_IN_JN):
10708 if (validate_arg (arg0, INTEGER_TYPE)
10709 && validate_arg (arg1, REAL_TYPE))
10710 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10711 break;
10712
10713 CASE_FLT_FN (BUILT_IN_YN):
10714 if (validate_arg (arg0, INTEGER_TYPE)
10715 && validate_arg (arg1, REAL_TYPE))
10716 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10717 &dconst0, false);
10718 break;
10719
10720 CASE_FLT_FN (BUILT_IN_DREM):
10721 CASE_FLT_FN (BUILT_IN_REMAINDER):
10722 if (validate_arg (arg0, REAL_TYPE)
10723 && validate_arg (arg1, REAL_TYPE))
10724 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10725 break;
10726
10727 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10728 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10729 if (validate_arg (arg0, REAL_TYPE)
10730 && validate_arg (arg1, POINTER_TYPE))
10731 return do_mpfr_lgamma_r (arg0, arg1, type);
10732 break;
10733
10734 CASE_FLT_FN (BUILT_IN_ATAN2):
10735 if (validate_arg (arg0, REAL_TYPE)
10736 && validate_arg (arg1, REAL_TYPE))
10737 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10738 break;
10739
10740 CASE_FLT_FN (BUILT_IN_FDIM):
10741 if (validate_arg (arg0, REAL_TYPE)
10742 && validate_arg (arg1, REAL_TYPE))
10743 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10744 break;
10745
10746 CASE_FLT_FN (BUILT_IN_HYPOT):
10747 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10748
10749 CASE_FLT_FN (BUILT_IN_CPOW):
10750 if (validate_arg (arg0, COMPLEX_TYPE)
10751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10752 && validate_arg (arg1, COMPLEX_TYPE)
10753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10754 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10755 break;
10756
10757 CASE_FLT_FN (BUILT_IN_LDEXP):
10758 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10759 CASE_FLT_FN (BUILT_IN_SCALBN):
10760 CASE_FLT_FN (BUILT_IN_SCALBLN):
10761 return fold_builtin_load_exponent (loc, arg0, arg1,
10762 type, /*ldexp=*/false);
10763
10764 CASE_FLT_FN (BUILT_IN_FREXP):
10765 return fold_builtin_frexp (loc, arg0, arg1, type);
10766
10767 CASE_FLT_FN (BUILT_IN_MODF):
10768 return fold_builtin_modf (loc, arg0, arg1, type);
10769
10770 case BUILT_IN_BZERO:
10771 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10772
10773 case BUILT_IN_FPUTS:
10774 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10775
10776 case BUILT_IN_FPUTS_UNLOCKED:
10777 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10778
10779 case BUILT_IN_STRSTR:
10780 return fold_builtin_strstr (loc, arg0, arg1, type);
10781
10782 case BUILT_IN_STRCAT:
10783 return fold_builtin_strcat (loc, arg0, arg1);
10784
10785 case BUILT_IN_STRSPN:
10786 return fold_builtin_strspn (loc, arg0, arg1);
10787
10788 case BUILT_IN_STRCSPN:
10789 return fold_builtin_strcspn (loc, arg0, arg1);
10790
10791 case BUILT_IN_STRCHR:
10792 case BUILT_IN_INDEX:
10793 return fold_builtin_strchr (loc, arg0, arg1, type);
10794
10795 case BUILT_IN_STRRCHR:
10796 case BUILT_IN_RINDEX:
10797 return fold_builtin_strrchr (loc, arg0, arg1, type);
10798
10799 case BUILT_IN_STRCPY:
10800 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10801
10802 case BUILT_IN_STPCPY:
10803 if (ignore)
10804 {
10805 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10806 if (!fn)
10807 break;
10808
10809 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10810 }
10811 else
10812 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10813 break;
10814
10815 case BUILT_IN_STRCMP:
10816 return fold_builtin_strcmp (loc, arg0, arg1);
10817
10818 case BUILT_IN_STRPBRK:
10819 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10820
10821 case BUILT_IN_EXPECT:
10822 return fold_builtin_expect (loc, arg0, arg1);
10823
10824 CASE_FLT_FN (BUILT_IN_POW):
10825 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10826
10827 CASE_FLT_FN (BUILT_IN_POWI):
10828 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10829
10830 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10831 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10832
10833 CASE_FLT_FN (BUILT_IN_FMIN):
10834 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10835
10836 CASE_FLT_FN (BUILT_IN_FMAX):
10837 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10838
10839 case BUILT_IN_ISGREATER:
10840 return fold_builtin_unordered_cmp (loc, fndecl,
10841 arg0, arg1, UNLE_EXPR, LE_EXPR);
10842 case BUILT_IN_ISGREATEREQUAL:
10843 return fold_builtin_unordered_cmp (loc, fndecl,
10844 arg0, arg1, UNLT_EXPR, LT_EXPR);
10845 case BUILT_IN_ISLESS:
10846 return fold_builtin_unordered_cmp (loc, fndecl,
10847 arg0, arg1, UNGE_EXPR, GE_EXPR);
10848 case BUILT_IN_ISLESSEQUAL:
10849 return fold_builtin_unordered_cmp (loc, fndecl,
10850 arg0, arg1, UNGT_EXPR, GT_EXPR);
10851 case BUILT_IN_ISLESSGREATER:
10852 return fold_builtin_unordered_cmp (loc, fndecl,
10853 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10854 case BUILT_IN_ISUNORDERED:
10855 return fold_builtin_unordered_cmp (loc, fndecl,
10856 arg0, arg1, UNORDERED_EXPR,
10857 NOP_EXPR);
10858
10859 /* We do the folding for va_start in the expander. */
10860 case BUILT_IN_VA_START:
10861 break;
10862
10863 case BUILT_IN_SPRINTF:
10864 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10865
10866 case BUILT_IN_OBJECT_SIZE:
10867 return fold_builtin_object_size (arg0, arg1);
10868
10869 case BUILT_IN_PRINTF:
10870 case BUILT_IN_PRINTF_UNLOCKED:
10871 case BUILT_IN_VPRINTF:
10872 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10873
10874 case BUILT_IN_PRINTF_CHK:
10875 case BUILT_IN_VPRINTF_CHK:
10876 if (!validate_arg (arg0, INTEGER_TYPE)
10877 || TREE_SIDE_EFFECTS (arg0))
10878 return NULL_TREE;
10879 else
10880 return fold_builtin_printf (loc, fndecl,
10881 arg1, NULL_TREE, ignore, fcode);
10882 break;
10883
10884 case BUILT_IN_FPRINTF:
10885 case BUILT_IN_FPRINTF_UNLOCKED:
10886 case BUILT_IN_VFPRINTF:
10887 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10888 ignore, fcode);
10889
10890 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10891 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10892
10893 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10894 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10895
10896 default:
10897 break;
10898 }
10899 return NULL_TREE;
10900 }
10901
10902 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10903 and ARG2. IGNORE is true if the result of the function call is ignored.
10904 This function returns NULL_TREE if no simplification was possible. */
10905
10906 static tree
10907 fold_builtin_3 (location_t loc, tree fndecl,
10908 tree arg0, tree arg1, tree arg2, bool ignore)
10909 {
10910 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10911 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10912 switch (fcode)
10913 {
10914
10915 CASE_FLT_FN (BUILT_IN_SINCOS):
10916 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10917
10918 CASE_FLT_FN (BUILT_IN_FMA):
10919 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10920 break;
10921
10922 CASE_FLT_FN (BUILT_IN_REMQUO):
10923 if (validate_arg (arg0, REAL_TYPE)
10924 && validate_arg (arg1, REAL_TYPE)
10925 && validate_arg (arg2, POINTER_TYPE))
10926 return do_mpfr_remquo (arg0, arg1, arg2);
10927 break;
10928
10929 case BUILT_IN_MEMSET:
10930 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10931
10932 case BUILT_IN_BCOPY:
10933 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10934 void_type_node, true, /*endp=*/3);
10935
10936 case BUILT_IN_MEMCPY:
10937 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10938 type, ignore, /*endp=*/0);
10939
10940 case BUILT_IN_MEMPCPY:
10941 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10942 type, ignore, /*endp=*/1);
10943
10944 case BUILT_IN_MEMMOVE:
10945 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10946 type, ignore, /*endp=*/3);
10947
10948 case BUILT_IN_STRNCAT:
10949 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10950
10951 case BUILT_IN_STRNCPY:
10952 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10953
10954 case BUILT_IN_STRNCMP:
10955 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10956
10957 case BUILT_IN_MEMCHR:
10958 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10959
10960 case BUILT_IN_BCMP:
10961 case BUILT_IN_MEMCMP:
10962 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10963
10964 case BUILT_IN_SPRINTF:
10965 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10966
10967 case BUILT_IN_SNPRINTF:
10968 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10969
10970 case BUILT_IN_STRCPY_CHK:
10971 case BUILT_IN_STPCPY_CHK:
10972 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10973 ignore, fcode);
10974
10975 case BUILT_IN_STRCAT_CHK:
10976 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10977
10978 case BUILT_IN_PRINTF_CHK:
10979 case BUILT_IN_VPRINTF_CHK:
10980 if (!validate_arg (arg0, INTEGER_TYPE)
10981 || TREE_SIDE_EFFECTS (arg0))
10982 return NULL_TREE;
10983 else
10984 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10985 break;
10986
10987 case BUILT_IN_FPRINTF:
10988 case BUILT_IN_FPRINTF_UNLOCKED:
10989 case BUILT_IN_VFPRINTF:
10990 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10991 ignore, fcode);
10992
10993 case BUILT_IN_FPRINTF_CHK:
10994 case BUILT_IN_VFPRINTF_CHK:
10995 if (!validate_arg (arg1, INTEGER_TYPE)
10996 || TREE_SIDE_EFFECTS (arg1))
10997 return NULL_TREE;
10998 else
10999 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11000 ignore, fcode);
11001
11002 default:
11003 break;
11004 }
11005 return NULL_TREE;
11006 }
11007
11008 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11009 ARG2, and ARG3. IGNORE is true if the result of the function call is
11010 ignored. This function returns NULL_TREE if no simplification was
11011 possible. */
11012
11013 static tree
11014 fold_builtin_4 (location_t loc, tree fndecl,
11015 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11016 {
11017 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11018
11019 switch (fcode)
11020 {
11021 case BUILT_IN_MEMCPY_CHK:
11022 case BUILT_IN_MEMPCPY_CHK:
11023 case BUILT_IN_MEMMOVE_CHK:
11024 case BUILT_IN_MEMSET_CHK:
11025 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11026 NULL_TREE, ignore,
11027 DECL_FUNCTION_CODE (fndecl));
11028
11029 case BUILT_IN_STRNCPY_CHK:
11030 case BUILT_IN_STPNCPY_CHK:
11031 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11032 ignore, fcode);
11033
11034 case BUILT_IN_STRNCAT_CHK:
11035 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11036
11037 case BUILT_IN_SNPRINTF:
11038 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11039
11040 case BUILT_IN_FPRINTF_CHK:
11041 case BUILT_IN_VFPRINTF_CHK:
11042 if (!validate_arg (arg1, INTEGER_TYPE)
11043 || TREE_SIDE_EFFECTS (arg1))
11044 return NULL_TREE;
11045 else
11046 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11047 ignore, fcode);
11048 break;
11049
11050 default:
11051 break;
11052 }
11053 return NULL_TREE;
11054 }
11055
11056 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11057 arguments, where NARGS <= 4. IGNORE is true if the result of the
11058 function call is ignored. This function returns NULL_TREE if no
11059 simplification was possible. Note that this only folds builtins with
11060 fixed argument patterns. Foldings that do varargs-to-varargs
11061 transformations, or that match calls with more than 4 arguments,
11062 need to be handled with fold_builtin_varargs instead. */
11063
11064 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11065
11066 static tree
11067 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11068 {
11069 tree ret = NULL_TREE;
11070
11071 switch (nargs)
11072 {
11073 case 0:
11074 ret = fold_builtin_0 (loc, fndecl, ignore);
11075 break;
11076 case 1:
11077 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11078 break;
11079 case 2:
11080 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11081 break;
11082 case 3:
11083 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11084 break;
11085 case 4:
11086 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11087 ignore);
11088 break;
11089 default:
11090 break;
11091 }
11092 if (ret)
11093 {
11094 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11095 SET_EXPR_LOCATION (ret, loc);
11096 TREE_NO_WARNING (ret) = 1;
11097 return ret;
11098 }
11099 return NULL_TREE;
11100 }
11101
11102 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11103 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11104 of arguments in ARGS to be omitted. OLDNARGS is the number of
11105 elements in ARGS. */
11106
11107 static tree
11108 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11109 int skip, tree fndecl, int n, va_list newargs)
11110 {
11111 int nargs = oldnargs - skip + n;
11112 tree *buffer;
11113
11114 if (n > 0)
11115 {
11116 int i, j;
11117
11118 buffer = XALLOCAVEC (tree, nargs);
11119 for (i = 0; i < n; i++)
11120 buffer[i] = va_arg (newargs, tree);
11121 for (j = skip; j < oldnargs; j++, i++)
11122 buffer[i] = args[j];
11123 }
11124 else
11125 buffer = args + skip;
11126
11127 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11128 }
11129
11130 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11131 list ARGS along with N new arguments specified as the "..."
11132 parameters. SKIP is the number of arguments in ARGS to be omitted.
11133 OLDNARGS is the number of elements in ARGS. */
11134
11135 static tree
11136 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11137 int skip, tree fndecl, int n, ...)
11138 {
11139 va_list ap;
11140 tree t;
11141
11142 va_start (ap, n);
11143 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11144 va_end (ap);
11145
11146 return t;
11147 }
11148
11149 /* Return true if FNDECL shouldn't be folded right now.
11150 If a built-in function has an inline attribute always_inline
11151 wrapper, defer folding it after always_inline functions have
11152 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11153 might not be performed. */
11154
11155 bool
11156 avoid_folding_inline_builtin (tree fndecl)
11157 {
11158 return (DECL_DECLARED_INLINE_P (fndecl)
11159 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11160 && cfun
11161 && !cfun->always_inline_functions_inlined
11162 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11163 }
11164
11165 /* A wrapper function for builtin folding that prevents warnings for
11166 "statement without effect" and the like, caused by removing the
11167 call node earlier than the warning is generated. */
11168
11169 tree
11170 fold_call_expr (location_t loc, tree exp, bool ignore)
11171 {
11172 tree ret = NULL_TREE;
11173 tree fndecl = get_callee_fndecl (exp);
11174 if (fndecl
11175 && TREE_CODE (fndecl) == FUNCTION_DECL
11176 && DECL_BUILT_IN (fndecl)
11177 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11178 yet. Defer folding until we see all the arguments
11179 (after inlining). */
11180 && !CALL_EXPR_VA_ARG_PACK (exp))
11181 {
11182 int nargs = call_expr_nargs (exp);
11183
11184 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11185 instead last argument is __builtin_va_arg_pack (). Defer folding
11186 even in that case, until arguments are finalized. */
11187 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11188 {
11189 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11190 if (fndecl2
11191 && TREE_CODE (fndecl2) == FUNCTION_DECL
11192 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11193 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11194 return NULL_TREE;
11195 }
11196
11197 if (avoid_folding_inline_builtin (fndecl))
11198 return NULL_TREE;
11199
11200 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11201 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11202 CALL_EXPR_ARGP (exp), ignore);
11203 else
11204 {
11205 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11206 {
11207 tree *args = CALL_EXPR_ARGP (exp);
11208 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11209 }
11210 if (!ret)
11211 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11212 if (ret)
11213 return ret;
11214 }
11215 }
11216 return NULL_TREE;
11217 }
11218
11219 /* Conveniently construct a function call expression. FNDECL names the
11220 function to be called and N arguments are passed in the array
11221 ARGARRAY. */
11222
11223 tree
11224 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11225 {
11226 tree fntype = TREE_TYPE (fndecl);
11227 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11228
11229 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11230 }
11231
11232 /* Conveniently construct a function call expression. FNDECL names the
11233 function to be called and the arguments are passed in the vector
11234 VEC. */
11235
11236 tree
11237 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11238 {
11239 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11240 vec_safe_address (vec));
11241 }
11242
11243
11244 /* Conveniently construct a function call expression. FNDECL names the
11245 function to be called, N is the number of arguments, and the "..."
11246 parameters are the argument expressions. */
11247
11248 tree
11249 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11250 {
11251 va_list ap;
11252 tree *argarray = XALLOCAVEC (tree, n);
11253 int i;
11254
11255 va_start (ap, n);
11256 for (i = 0; i < n; i++)
11257 argarray[i] = va_arg (ap, tree);
11258 va_end (ap);
11259 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11260 }
11261
11262 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11263 varargs macros aren't supported by all bootstrap compilers. */
11264
11265 tree
11266 build_call_expr (tree fndecl, int n, ...)
11267 {
11268 va_list ap;
11269 tree *argarray = XALLOCAVEC (tree, n);
11270 int i;
11271
11272 va_start (ap, n);
11273 for (i = 0; i < n; i++)
11274 argarray[i] = va_arg (ap, tree);
11275 va_end (ap);
11276 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11277 }
11278
11279 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11280 N arguments are passed in the array ARGARRAY. */
11281
11282 tree
11283 fold_builtin_call_array (location_t loc, tree type,
11284 tree fn,
11285 int n,
11286 tree *argarray)
11287 {
11288 tree ret = NULL_TREE;
11289 tree exp;
11290
11291 if (TREE_CODE (fn) == ADDR_EXPR)
11292 {
11293 tree fndecl = TREE_OPERAND (fn, 0);
11294 if (TREE_CODE (fndecl) == FUNCTION_DECL
11295 && DECL_BUILT_IN (fndecl))
11296 {
11297 /* If last argument is __builtin_va_arg_pack (), arguments to this
11298 function are not finalized yet. Defer folding until they are. */
11299 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11300 {
11301 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11302 if (fndecl2
11303 && TREE_CODE (fndecl2) == FUNCTION_DECL
11304 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11305 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11306 return build_call_array_loc (loc, type, fn, n, argarray);
11307 }
11308 if (avoid_folding_inline_builtin (fndecl))
11309 return build_call_array_loc (loc, type, fn, n, argarray);
11310 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11311 {
11312 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11313 if (ret)
11314 return ret;
11315
11316 return build_call_array_loc (loc, type, fn, n, argarray);
11317 }
11318 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11319 {
11320 /* First try the transformations that don't require consing up
11321 an exp. */
11322 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11323 if (ret)
11324 return ret;
11325 }
11326
11327 /* If we got this far, we need to build an exp. */
11328 exp = build_call_array_loc (loc, type, fn, n, argarray);
11329 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11330 return ret ? ret : exp;
11331 }
11332 }
11333
11334 return build_call_array_loc (loc, type, fn, n, argarray);
11335 }
11336
11337 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11338 along with N new arguments specified as the "..." parameters. SKIP
11339 is the number of arguments in EXP to be omitted. This function is used
11340 to do varargs-to-varargs transformations. */
11341
11342 static tree
11343 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11344 {
11345 va_list ap;
11346 tree t;
11347
11348 va_start (ap, n);
11349 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11350 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11351 va_end (ap);
11352
11353 return t;
11354 }
11355
11356 /* Validate a single argument ARG against a tree code CODE representing
11357 a type. */
11358
11359 static bool
11360 validate_arg (const_tree arg, enum tree_code code)
11361 {
11362 if (!arg)
11363 return false;
11364 else if (code == POINTER_TYPE)
11365 return POINTER_TYPE_P (TREE_TYPE (arg));
11366 else if (code == INTEGER_TYPE)
11367 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11368 return code == TREE_CODE (TREE_TYPE (arg));
11369 }
11370
11371 /* This function validates the types of a function call argument list
11372 against a specified list of tree_codes. If the last specifier is a 0,
11373 that represents an ellipses, otherwise the last specifier must be a
11374 VOID_TYPE.
11375
11376 This is the GIMPLE version of validate_arglist. Eventually we want to
11377 completely convert builtins.c to work from GIMPLEs and the tree based
11378 validate_arglist will then be removed. */
11379
11380 bool
11381 validate_gimple_arglist (const_gimple call, ...)
11382 {
11383 enum tree_code code;
11384 bool res = 0;
11385 va_list ap;
11386 const_tree arg;
11387 size_t i;
11388
11389 va_start (ap, call);
11390 i = 0;
11391
11392 do
11393 {
11394 code = (enum tree_code) va_arg (ap, int);
11395 switch (code)
11396 {
11397 case 0:
11398 /* This signifies an ellipses, any further arguments are all ok. */
11399 res = true;
11400 goto end;
11401 case VOID_TYPE:
11402 /* This signifies an endlink, if no arguments remain, return
11403 true, otherwise return false. */
11404 res = (i == gimple_call_num_args (call));
11405 goto end;
11406 default:
11407 /* If no parameters remain or the parameter's code does not
11408 match the specified code, return false. Otherwise continue
11409 checking any remaining arguments. */
11410 arg = gimple_call_arg (call, i++);
11411 if (!validate_arg (arg, code))
11412 goto end;
11413 break;
11414 }
11415 }
11416 while (1);
11417
11418 /* We need gotos here since we can only have one VA_CLOSE in a
11419 function. */
11420 end: ;
11421 va_end (ap);
11422
11423 return res;
11424 }
11425
11426 /* Default target-specific builtin expander that does nothing. */
11427
11428 rtx
11429 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11430 rtx target ATTRIBUTE_UNUSED,
11431 rtx subtarget ATTRIBUTE_UNUSED,
11432 enum machine_mode mode ATTRIBUTE_UNUSED,
11433 int ignore ATTRIBUTE_UNUSED)
11434 {
11435 return NULL_RTX;
11436 }
11437
11438 /* Returns true is EXP represents data that would potentially reside
11439 in a readonly section. */
11440
11441 static bool
11442 readonly_data_expr (tree exp)
11443 {
11444 STRIP_NOPS (exp);
11445
11446 if (TREE_CODE (exp) != ADDR_EXPR)
11447 return false;
11448
11449 exp = get_base_address (TREE_OPERAND (exp, 0));
11450 if (!exp)
11451 return false;
11452
11453 /* Make sure we call decl_readonly_section only for trees it
11454 can handle (since it returns true for everything it doesn't
11455 understand). */
11456 if (TREE_CODE (exp) == STRING_CST
11457 || TREE_CODE (exp) == CONSTRUCTOR
11458 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11459 return decl_readonly_section (exp, 0);
11460 else
11461 return false;
11462 }
11463
11464 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11465 to the call, and TYPE is its return type.
11466
11467 Return NULL_TREE if no simplification was possible, otherwise return the
11468 simplified form of the call as a tree.
11469
11470 The simplified form may be a constant or other expression which
11471 computes the same value, but in a more efficient manner (including
11472 calls to other builtin functions).
11473
11474 The call may contain arguments which need to be evaluated, but
11475 which are not useful to determine the result of the call. In
11476 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11477 COMPOUND_EXPR will be an argument which must be evaluated.
11478 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11479 COMPOUND_EXPR in the chain will contain the tree for the simplified
11480 form of the builtin function call. */
11481
11482 static tree
11483 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11484 {
11485 if (!validate_arg (s1, POINTER_TYPE)
11486 || !validate_arg (s2, POINTER_TYPE))
11487 return NULL_TREE;
11488 else
11489 {
11490 tree fn;
11491 const char *p1, *p2;
11492
11493 p2 = c_getstr (s2);
11494 if (p2 == NULL)
11495 return NULL_TREE;
11496
11497 p1 = c_getstr (s1);
11498 if (p1 != NULL)
11499 {
11500 const char *r = strstr (p1, p2);
11501 tree tem;
11502
11503 if (r == NULL)
11504 return build_int_cst (TREE_TYPE (s1), 0);
11505
11506 /* Return an offset into the constant string argument. */
11507 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11508 return fold_convert_loc (loc, type, tem);
11509 }
11510
11511 /* The argument is const char *, and the result is char *, so we need
11512 a type conversion here to avoid a warning. */
11513 if (p2[0] == '\0')
11514 return fold_convert_loc (loc, type, s1);
11515
11516 if (p2[1] != '\0')
11517 return NULL_TREE;
11518
11519 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11520 if (!fn)
11521 return NULL_TREE;
11522
11523 /* New argument list transforming strstr(s1, s2) to
11524 strchr(s1, s2[0]). */
11525 return build_call_expr_loc (loc, fn, 2, s1,
11526 build_int_cst (integer_type_node, p2[0]));
11527 }
11528 }
11529
11530 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11531 the call, and TYPE is its return type.
11532
11533 Return NULL_TREE if no simplification was possible, otherwise return the
11534 simplified form of the call as a tree.
11535
11536 The simplified form may be a constant or other expression which
11537 computes the same value, but in a more efficient manner (including
11538 calls to other builtin functions).
11539
11540 The call may contain arguments which need to be evaluated, but
11541 which are not useful to determine the result of the call. In
11542 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11543 COMPOUND_EXPR will be an argument which must be evaluated.
11544 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11545 COMPOUND_EXPR in the chain will contain the tree for the simplified
11546 form of the builtin function call. */
11547
11548 static tree
11549 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11550 {
11551 if (!validate_arg (s1, POINTER_TYPE)
11552 || !validate_arg (s2, INTEGER_TYPE))
11553 return NULL_TREE;
11554 else
11555 {
11556 const char *p1;
11557
11558 if (TREE_CODE (s2) != INTEGER_CST)
11559 return NULL_TREE;
11560
11561 p1 = c_getstr (s1);
11562 if (p1 != NULL)
11563 {
11564 char c;
11565 const char *r;
11566 tree tem;
11567
11568 if (target_char_cast (s2, &c))
11569 return NULL_TREE;
11570
11571 r = strchr (p1, c);
11572
11573 if (r == NULL)
11574 return build_int_cst (TREE_TYPE (s1), 0);
11575
11576 /* Return an offset into the constant string argument. */
11577 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11578 return fold_convert_loc (loc, type, tem);
11579 }
11580 return NULL_TREE;
11581 }
11582 }
11583
11584 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11585 the call, and TYPE is its return type.
11586
11587 Return NULL_TREE if no simplification was possible, otherwise return the
11588 simplified form of the call as a tree.
11589
11590 The simplified form may be a constant or other expression which
11591 computes the same value, but in a more efficient manner (including
11592 calls to other builtin functions).
11593
11594 The call may contain arguments which need to be evaluated, but
11595 which are not useful to determine the result of the call. In
11596 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11597 COMPOUND_EXPR will be an argument which must be evaluated.
11598 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11599 COMPOUND_EXPR in the chain will contain the tree for the simplified
11600 form of the builtin function call. */
11601
11602 static tree
11603 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11604 {
11605 if (!validate_arg (s1, POINTER_TYPE)
11606 || !validate_arg (s2, INTEGER_TYPE))
11607 return NULL_TREE;
11608 else
11609 {
11610 tree fn;
11611 const char *p1;
11612
11613 if (TREE_CODE (s2) != INTEGER_CST)
11614 return NULL_TREE;
11615
11616 p1 = c_getstr (s1);
11617 if (p1 != NULL)
11618 {
11619 char c;
11620 const char *r;
11621 tree tem;
11622
11623 if (target_char_cast (s2, &c))
11624 return NULL_TREE;
11625
11626 r = strrchr (p1, c);
11627
11628 if (r == NULL)
11629 return build_int_cst (TREE_TYPE (s1), 0);
11630
11631 /* Return an offset into the constant string argument. */
11632 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11633 return fold_convert_loc (loc, type, tem);
11634 }
11635
11636 if (! integer_zerop (s2))
11637 return NULL_TREE;
11638
11639 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11640 if (!fn)
11641 return NULL_TREE;
11642
11643 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11644 return build_call_expr_loc (loc, fn, 2, s1, s2);
11645 }
11646 }
11647
11648 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11649 to the call, and TYPE is its return type.
11650
11651 Return NULL_TREE if no simplification was possible, otherwise return the
11652 simplified form of the call as a tree.
11653
11654 The simplified form may be a constant or other expression which
11655 computes the same value, but in a more efficient manner (including
11656 calls to other builtin functions).
11657
11658 The call may contain arguments which need to be evaluated, but
11659 which are not useful to determine the result of the call. In
11660 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11661 COMPOUND_EXPR will be an argument which must be evaluated.
11662 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11663 COMPOUND_EXPR in the chain will contain the tree for the simplified
11664 form of the builtin function call. */
11665
11666 static tree
11667 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11668 {
11669 if (!validate_arg (s1, POINTER_TYPE)
11670 || !validate_arg (s2, POINTER_TYPE))
11671 return NULL_TREE;
11672 else
11673 {
11674 tree fn;
11675 const char *p1, *p2;
11676
11677 p2 = c_getstr (s2);
11678 if (p2 == NULL)
11679 return NULL_TREE;
11680
11681 p1 = c_getstr (s1);
11682 if (p1 != NULL)
11683 {
11684 const char *r = strpbrk (p1, p2);
11685 tree tem;
11686
11687 if (r == NULL)
11688 return build_int_cst (TREE_TYPE (s1), 0);
11689
11690 /* Return an offset into the constant string argument. */
11691 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11692 return fold_convert_loc (loc, type, tem);
11693 }
11694
11695 if (p2[0] == '\0')
11696 /* strpbrk(x, "") == NULL.
11697 Evaluate and ignore s1 in case it had side-effects. */
11698 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11699
11700 if (p2[1] != '\0')
11701 return NULL_TREE; /* Really call strpbrk. */
11702
11703 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11704 if (!fn)
11705 return NULL_TREE;
11706
11707 /* New argument list transforming strpbrk(s1, s2) to
11708 strchr(s1, s2[0]). */
11709 return build_call_expr_loc (loc, fn, 2, s1,
11710 build_int_cst (integer_type_node, p2[0]));
11711 }
11712 }
11713
11714 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11715 to the call.
11716
11717 Return NULL_TREE if no simplification was possible, otherwise return the
11718 simplified form of the call as a tree.
11719
11720 The simplified form may be a constant or other expression which
11721 computes the same value, but in a more efficient manner (including
11722 calls to other builtin functions).
11723
11724 The call may contain arguments which need to be evaluated, but
11725 which are not useful to determine the result of the call. In
11726 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11727 COMPOUND_EXPR will be an argument which must be evaluated.
11728 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11729 COMPOUND_EXPR in the chain will contain the tree for the simplified
11730 form of the builtin function call. */
11731
11732 static tree
11733 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11734 {
11735 if (!validate_arg (dst, POINTER_TYPE)
11736 || !validate_arg (src, POINTER_TYPE))
11737 return NULL_TREE;
11738 else
11739 {
11740 const char *p = c_getstr (src);
11741
11742 /* If the string length is zero, return the dst parameter. */
11743 if (p && *p == '\0')
11744 return dst;
11745
11746 if (optimize_insn_for_speed_p ())
11747 {
11748 /* See if we can store by pieces into (dst + strlen(dst)). */
11749 tree newdst, call;
11750 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11751 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11752
11753 if (!strlen_fn || !strcpy_fn)
11754 return NULL_TREE;
11755
11756 /* If we don't have a movstr we don't want to emit an strcpy
11757 call. We have to do that if the length of the source string
11758 isn't computable (in that case we can use memcpy probably
11759 later expanding to a sequence of mov instructions). If we
11760 have movstr instructions we can emit strcpy calls. */
11761 if (!HAVE_movstr)
11762 {
11763 tree len = c_strlen (src, 1);
11764 if (! len || TREE_SIDE_EFFECTS (len))
11765 return NULL_TREE;
11766 }
11767
11768 /* Stabilize the argument list. */
11769 dst = builtin_save_expr (dst);
11770
11771 /* Create strlen (dst). */
11772 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11773 /* Create (dst p+ strlen (dst)). */
11774
11775 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11776 newdst = builtin_save_expr (newdst);
11777
11778 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11779 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11780 }
11781 return NULL_TREE;
11782 }
11783 }
11784
11785 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11786 arguments to the call.
11787
11788 Return NULL_TREE if no simplification was possible, otherwise return the
11789 simplified form of the call as a tree.
11790
11791 The simplified form may be a constant or other expression which
11792 computes the same value, but in a more efficient manner (including
11793 calls to other builtin functions).
11794
11795 The call may contain arguments which need to be evaluated, but
11796 which are not useful to determine the result of the call. In
11797 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11798 COMPOUND_EXPR will be an argument which must be evaluated.
11799 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11800 COMPOUND_EXPR in the chain will contain the tree for the simplified
11801 form of the builtin function call. */
11802
11803 static tree
11804 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11805 {
11806 if (!validate_arg (dst, POINTER_TYPE)
11807 || !validate_arg (src, POINTER_TYPE)
11808 || !validate_arg (len, INTEGER_TYPE))
11809 return NULL_TREE;
11810 else
11811 {
11812 const char *p = c_getstr (src);
11813
11814 /* If the requested length is zero, or the src parameter string
11815 length is zero, return the dst parameter. */
11816 if (integer_zerop (len) || (p && *p == '\0'))
11817 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11818
11819 /* If the requested len is greater than or equal to the string
11820 length, call strcat. */
11821 if (TREE_CODE (len) == INTEGER_CST && p
11822 && compare_tree_int (len, strlen (p)) >= 0)
11823 {
11824 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11825
11826 /* If the replacement _DECL isn't initialized, don't do the
11827 transformation. */
11828 if (!fn)
11829 return NULL_TREE;
11830
11831 return build_call_expr_loc (loc, fn, 2, dst, src);
11832 }
11833 return NULL_TREE;
11834 }
11835 }
11836
11837 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11838 to the call.
11839
11840 Return NULL_TREE if no simplification was possible, otherwise return the
11841 simplified form of the call as a tree.
11842
11843 The simplified form may be a constant or other expression which
11844 computes the same value, but in a more efficient manner (including
11845 calls to other builtin functions).
11846
11847 The call may contain arguments which need to be evaluated, but
11848 which are not useful to determine the result of the call. In
11849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11850 COMPOUND_EXPR will be an argument which must be evaluated.
11851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11852 COMPOUND_EXPR in the chain will contain the tree for the simplified
11853 form of the builtin function call. */
11854
11855 static tree
11856 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11857 {
11858 if (!validate_arg (s1, POINTER_TYPE)
11859 || !validate_arg (s2, POINTER_TYPE))
11860 return NULL_TREE;
11861 else
11862 {
11863 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11864
11865 /* If both arguments are constants, evaluate at compile-time. */
11866 if (p1 && p2)
11867 {
11868 const size_t r = strspn (p1, p2);
11869 return build_int_cst (size_type_node, r);
11870 }
11871
11872 /* If either argument is "", return NULL_TREE. */
11873 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11874 /* Evaluate and ignore both arguments in case either one has
11875 side-effects. */
11876 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11877 s1, s2);
11878 return NULL_TREE;
11879 }
11880 }
11881
11882 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11883 to the call.
11884
11885 Return NULL_TREE if no simplification was possible, otherwise return the
11886 simplified form of the call as a tree.
11887
11888 The simplified form may be a constant or other expression which
11889 computes the same value, but in a more efficient manner (including
11890 calls to other builtin functions).
11891
11892 The call may contain arguments which need to be evaluated, but
11893 which are not useful to determine the result of the call. In
11894 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11895 COMPOUND_EXPR will be an argument which must be evaluated.
11896 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11897 COMPOUND_EXPR in the chain will contain the tree for the simplified
11898 form of the builtin function call. */
11899
11900 static tree
11901 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11902 {
11903 if (!validate_arg (s1, POINTER_TYPE)
11904 || !validate_arg (s2, POINTER_TYPE))
11905 return NULL_TREE;
11906 else
11907 {
11908 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11909
11910 /* If both arguments are constants, evaluate at compile-time. */
11911 if (p1 && p2)
11912 {
11913 const size_t r = strcspn (p1, p2);
11914 return build_int_cst (size_type_node, r);
11915 }
11916
11917 /* If the first argument is "", return NULL_TREE. */
11918 if (p1 && *p1 == '\0')
11919 {
11920 /* Evaluate and ignore argument s2 in case it has
11921 side-effects. */
11922 return omit_one_operand_loc (loc, size_type_node,
11923 size_zero_node, s2);
11924 }
11925
11926 /* If the second argument is "", return __builtin_strlen(s1). */
11927 if (p2 && *p2 == '\0')
11928 {
11929 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11930
11931 /* If the replacement _DECL isn't initialized, don't do the
11932 transformation. */
11933 if (!fn)
11934 return NULL_TREE;
11935
11936 return build_call_expr_loc (loc, fn, 1, s1);
11937 }
11938 return NULL_TREE;
11939 }
11940 }
11941
11942 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11943 to the call. IGNORE is true if the value returned
11944 by the builtin will be ignored. UNLOCKED is true is true if this
11945 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11946 the known length of the string. Return NULL_TREE if no simplification
11947 was possible. */
11948
11949 tree
11950 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11951 bool ignore, bool unlocked, tree len)
11952 {
11953 /* If we're using an unlocked function, assume the other unlocked
11954 functions exist explicitly. */
11955 tree const fn_fputc = (unlocked
11956 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11957 : builtin_decl_implicit (BUILT_IN_FPUTC));
11958 tree const fn_fwrite = (unlocked
11959 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11960 : builtin_decl_implicit (BUILT_IN_FWRITE));
11961
11962 /* If the return value is used, don't do the transformation. */
11963 if (!ignore)
11964 return NULL_TREE;
11965
11966 /* Verify the arguments in the original call. */
11967 if (!validate_arg (arg0, POINTER_TYPE)
11968 || !validate_arg (arg1, POINTER_TYPE))
11969 return NULL_TREE;
11970
11971 if (! len)
11972 len = c_strlen (arg0, 0);
11973
11974 /* Get the length of the string passed to fputs. If the length
11975 can't be determined, punt. */
11976 if (!len
11977 || TREE_CODE (len) != INTEGER_CST)
11978 return NULL_TREE;
11979
11980 switch (compare_tree_int (len, 1))
11981 {
11982 case -1: /* length is 0, delete the call entirely . */
11983 return omit_one_operand_loc (loc, integer_type_node,
11984 integer_zero_node, arg1);;
11985
11986 case 0: /* length is 1, call fputc. */
11987 {
11988 const char *p = c_getstr (arg0);
11989
11990 if (p != NULL)
11991 {
11992 if (fn_fputc)
11993 return build_call_expr_loc (loc, fn_fputc, 2,
11994 build_int_cst
11995 (integer_type_node, p[0]), arg1);
11996 else
11997 return NULL_TREE;
11998 }
11999 }
12000 /* FALLTHROUGH */
12001 case 1: /* length is greater than 1, call fwrite. */
12002 {
12003 /* If optimizing for size keep fputs. */
12004 if (optimize_function_for_size_p (cfun))
12005 return NULL_TREE;
12006 /* New argument list transforming fputs(string, stream) to
12007 fwrite(string, 1, len, stream). */
12008 if (fn_fwrite)
12009 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12010 size_one_node, len, arg1);
12011 else
12012 return NULL_TREE;
12013 }
12014 default:
12015 gcc_unreachable ();
12016 }
12017 return NULL_TREE;
12018 }
12019
12020 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12021 produced. False otherwise. This is done so that we don't output the error
12022 or warning twice or three times. */
12023
12024 bool
12025 fold_builtin_next_arg (tree exp, bool va_start_p)
12026 {
12027 tree fntype = TREE_TYPE (current_function_decl);
12028 int nargs = call_expr_nargs (exp);
12029 tree arg;
12030 /* There is good chance the current input_location points inside the
12031 definition of the va_start macro (perhaps on the token for
12032 builtin) in a system header, so warnings will not be emitted.
12033 Use the location in real source code. */
12034 source_location current_location =
12035 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12036 NULL);
12037
12038 if (!stdarg_p (fntype))
12039 {
12040 error ("%<va_start%> used in function with fixed args");
12041 return true;
12042 }
12043
12044 if (va_start_p)
12045 {
12046 if (va_start_p && (nargs != 2))
12047 {
12048 error ("wrong number of arguments to function %<va_start%>");
12049 return true;
12050 }
12051 arg = CALL_EXPR_ARG (exp, 1);
12052 }
12053 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12054 when we checked the arguments and if needed issued a warning. */
12055 else
12056 {
12057 if (nargs == 0)
12058 {
12059 /* Evidently an out of date version of <stdarg.h>; can't validate
12060 va_start's second argument, but can still work as intended. */
12061 warning_at (current_location,
12062 OPT_Wvarargs,
12063 "%<__builtin_next_arg%> called without an argument");
12064 return true;
12065 }
12066 else if (nargs > 1)
12067 {
12068 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12069 return true;
12070 }
12071 arg = CALL_EXPR_ARG (exp, 0);
12072 }
12073
12074 if (TREE_CODE (arg) == SSA_NAME)
12075 arg = SSA_NAME_VAR (arg);
12076
12077 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12078 or __builtin_next_arg (0) the first time we see it, after checking
12079 the arguments and if needed issuing a warning. */
12080 if (!integer_zerop (arg))
12081 {
12082 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12083
12084 /* Strip off all nops for the sake of the comparison. This
12085 is not quite the same as STRIP_NOPS. It does more.
12086 We must also strip off INDIRECT_EXPR for C++ reference
12087 parameters. */
12088 while (CONVERT_EXPR_P (arg)
12089 || TREE_CODE (arg) == INDIRECT_REF)
12090 arg = TREE_OPERAND (arg, 0);
12091 if (arg != last_parm)
12092 {
12093 /* FIXME: Sometimes with the tree optimizers we can get the
12094 not the last argument even though the user used the last
12095 argument. We just warn and set the arg to be the last
12096 argument so that we will get wrong-code because of
12097 it. */
12098 warning_at (current_location,
12099 OPT_Wvarargs,
12100 "second parameter of %<va_start%> not last named argument");
12101 }
12102
12103 /* Undefined by C99 7.15.1.4p4 (va_start):
12104 "If the parameter parmN is declared with the register storage
12105 class, with a function or array type, or with a type that is
12106 not compatible with the type that results after application of
12107 the default argument promotions, the behavior is undefined."
12108 */
12109 else if (DECL_REGISTER (arg))
12110 {
12111 warning_at (current_location,
12112 OPT_Wvarargs,
12113 "undefined behaviour when second parameter of "
12114 "%<va_start%> is declared with %<register%> storage");
12115 }
12116
12117 /* We want to verify the second parameter just once before the tree
12118 optimizers are run and then avoid keeping it in the tree,
12119 as otherwise we could warn even for correct code like:
12120 void foo (int i, ...)
12121 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12122 if (va_start_p)
12123 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12124 else
12125 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12126 }
12127 return false;
12128 }
12129
12130
12131 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12132 ORIG may be null if this is a 2-argument call. We don't attempt to
12133 simplify calls with more than 3 arguments.
12134
12135 Return NULL_TREE if no simplification was possible, otherwise return the
12136 simplified form of the call as a tree. If IGNORED is true, it means that
12137 the caller does not use the returned value of the function. */
12138
12139 static tree
12140 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12141 tree orig, int ignored)
12142 {
12143 tree call, retval;
12144 const char *fmt_str = NULL;
12145
12146 /* Verify the required arguments in the original call. We deal with two
12147 types of sprintf() calls: 'sprintf (str, fmt)' and
12148 'sprintf (dest, "%s", orig)'. */
12149 if (!validate_arg (dest, POINTER_TYPE)
12150 || !validate_arg (fmt, POINTER_TYPE))
12151 return NULL_TREE;
12152 if (orig && !validate_arg (orig, POINTER_TYPE))
12153 return NULL_TREE;
12154
12155 /* Check whether the format is a literal string constant. */
12156 fmt_str = c_getstr (fmt);
12157 if (fmt_str == NULL)
12158 return NULL_TREE;
12159
12160 call = NULL_TREE;
12161 retval = NULL_TREE;
12162
12163 if (!init_target_chars ())
12164 return NULL_TREE;
12165
12166 /* If the format doesn't contain % args or %%, use strcpy. */
12167 if (strchr (fmt_str, target_percent) == NULL)
12168 {
12169 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12170
12171 if (!fn)
12172 return NULL_TREE;
12173
12174 /* Don't optimize sprintf (buf, "abc", ptr++). */
12175 if (orig)
12176 return NULL_TREE;
12177
12178 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12179 'format' is known to contain no % formats. */
12180 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12181 if (!ignored)
12182 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12183 }
12184
12185 /* If the format is "%s", use strcpy if the result isn't used. */
12186 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12187 {
12188 tree fn;
12189 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12190
12191 if (!fn)
12192 return NULL_TREE;
12193
12194 /* Don't crash on sprintf (str1, "%s"). */
12195 if (!orig)
12196 return NULL_TREE;
12197
12198 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12199 if (!ignored)
12200 {
12201 retval = c_strlen (orig, 1);
12202 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12203 return NULL_TREE;
12204 }
12205 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12206 }
12207
12208 if (call && retval)
12209 {
12210 retval = fold_convert_loc
12211 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12212 retval);
12213 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12214 }
12215 else
12216 return call;
12217 }
12218
12219 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12220 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12221 attempt to simplify calls with more than 4 arguments.
12222
12223 Return NULL_TREE if no simplification was possible, otherwise return the
12224 simplified form of the call as a tree. If IGNORED is true, it means that
12225 the caller does not use the returned value of the function. */
12226
12227 static tree
12228 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12229 tree orig, int ignored)
12230 {
12231 tree call, retval;
12232 const char *fmt_str = NULL;
12233 unsigned HOST_WIDE_INT destlen;
12234
12235 /* Verify the required arguments in the original call. We deal with two
12236 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12237 'snprintf (dest, cst, "%s", orig)'. */
12238 if (!validate_arg (dest, POINTER_TYPE)
12239 || !validate_arg (destsize, INTEGER_TYPE)
12240 || !validate_arg (fmt, POINTER_TYPE))
12241 return NULL_TREE;
12242 if (orig && !validate_arg (orig, POINTER_TYPE))
12243 return NULL_TREE;
12244
12245 if (!host_integerp (destsize, 1))
12246 return NULL_TREE;
12247
12248 /* Check whether the format is a literal string constant. */
12249 fmt_str = c_getstr (fmt);
12250 if (fmt_str == NULL)
12251 return NULL_TREE;
12252
12253 call = NULL_TREE;
12254 retval = NULL_TREE;
12255
12256 if (!init_target_chars ())
12257 return NULL_TREE;
12258
12259 destlen = tree_low_cst (destsize, 1);
12260
12261 /* If the format doesn't contain % args or %%, use strcpy. */
12262 if (strchr (fmt_str, target_percent) == NULL)
12263 {
12264 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12265 size_t len = strlen (fmt_str);
12266
12267 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12268 if (orig)
12269 return NULL_TREE;
12270
12271 /* We could expand this as
12272 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12273 or to
12274 memcpy (str, fmt_with_nul_at_cstm1, cst);
12275 but in the former case that might increase code size
12276 and in the latter case grow .rodata section too much.
12277 So punt for now. */
12278 if (len >= destlen)
12279 return NULL_TREE;
12280
12281 if (!fn)
12282 return NULL_TREE;
12283
12284 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12285 'format' is known to contain no % formats and
12286 strlen (fmt) < cst. */
12287 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12288
12289 if (!ignored)
12290 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12291 }
12292
12293 /* If the format is "%s", use strcpy if the result isn't used. */
12294 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12295 {
12296 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12297 unsigned HOST_WIDE_INT origlen;
12298
12299 /* Don't crash on snprintf (str1, cst, "%s"). */
12300 if (!orig)
12301 return NULL_TREE;
12302
12303 retval = c_strlen (orig, 1);
12304 if (!retval || !host_integerp (retval, 1))
12305 return NULL_TREE;
12306
12307 origlen = tree_low_cst (retval, 1);
12308 /* We could expand this as
12309 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12310 or to
12311 memcpy (str1, str2_with_nul_at_cstm1, cst);
12312 but in the former case that might increase code size
12313 and in the latter case grow .rodata section too much.
12314 So punt for now. */
12315 if (origlen >= destlen)
12316 return NULL_TREE;
12317
12318 /* Convert snprintf (str1, cst, "%s", str2) into
12319 strcpy (str1, str2) if strlen (str2) < cst. */
12320 if (!fn)
12321 return NULL_TREE;
12322
12323 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12324
12325 if (ignored)
12326 retval = NULL_TREE;
12327 }
12328
12329 if (call && retval)
12330 {
12331 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12332 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12333 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12334 }
12335 else
12336 return call;
12337 }
12338
12339 /* Expand a call EXP to __builtin_object_size. */
12340
12341 rtx
12342 expand_builtin_object_size (tree exp)
12343 {
12344 tree ost;
12345 int object_size_type;
12346 tree fndecl = get_callee_fndecl (exp);
12347
12348 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12349 {
12350 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12351 exp, fndecl);
12352 expand_builtin_trap ();
12353 return const0_rtx;
12354 }
12355
12356 ost = CALL_EXPR_ARG (exp, 1);
12357 STRIP_NOPS (ost);
12358
12359 if (TREE_CODE (ost) != INTEGER_CST
12360 || tree_int_cst_sgn (ost) < 0
12361 || compare_tree_int (ost, 3) > 0)
12362 {
12363 error ("%Klast argument of %D is not integer constant between 0 and 3",
12364 exp, fndecl);
12365 expand_builtin_trap ();
12366 return const0_rtx;
12367 }
12368
12369 object_size_type = tree_low_cst (ost, 0);
12370
12371 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12372 }
12373
12374 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12375 FCODE is the BUILT_IN_* to use.
12376 Return NULL_RTX if we failed; the caller should emit a normal call,
12377 otherwise try to get the result in TARGET, if convenient (and in
12378 mode MODE if that's convenient). */
12379
12380 static rtx
12381 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12382 enum built_in_function fcode)
12383 {
12384 tree dest, src, len, size;
12385
12386 if (!validate_arglist (exp,
12387 POINTER_TYPE,
12388 fcode == BUILT_IN_MEMSET_CHK
12389 ? INTEGER_TYPE : POINTER_TYPE,
12390 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12391 return NULL_RTX;
12392
12393 dest = CALL_EXPR_ARG (exp, 0);
12394 src = CALL_EXPR_ARG (exp, 1);
12395 len = CALL_EXPR_ARG (exp, 2);
12396 size = CALL_EXPR_ARG (exp, 3);
12397
12398 if (! host_integerp (size, 1))
12399 return NULL_RTX;
12400
12401 if (host_integerp (len, 1) || integer_all_onesp (size))
12402 {
12403 tree fn;
12404
12405 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12406 {
12407 warning_at (tree_nonartificial_location (exp),
12408 0, "%Kcall to %D will always overflow destination buffer",
12409 exp, get_callee_fndecl (exp));
12410 return NULL_RTX;
12411 }
12412
12413 fn = NULL_TREE;
12414 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12415 mem{cpy,pcpy,move,set} is available. */
12416 switch (fcode)
12417 {
12418 case BUILT_IN_MEMCPY_CHK:
12419 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12420 break;
12421 case BUILT_IN_MEMPCPY_CHK:
12422 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12423 break;
12424 case BUILT_IN_MEMMOVE_CHK:
12425 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12426 break;
12427 case BUILT_IN_MEMSET_CHK:
12428 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12429 break;
12430 default:
12431 break;
12432 }
12433
12434 if (! fn)
12435 return NULL_RTX;
12436
12437 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12438 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12439 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12440 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12441 }
12442 else if (fcode == BUILT_IN_MEMSET_CHK)
12443 return NULL_RTX;
12444 else
12445 {
12446 unsigned int dest_align = get_pointer_alignment (dest);
12447
12448 /* If DEST is not a pointer type, call the normal function. */
12449 if (dest_align == 0)
12450 return NULL_RTX;
12451
12452 /* If SRC and DEST are the same (and not volatile), do nothing. */
12453 if (operand_equal_p (src, dest, 0))
12454 {
12455 tree expr;
12456
12457 if (fcode != BUILT_IN_MEMPCPY_CHK)
12458 {
12459 /* Evaluate and ignore LEN in case it has side-effects. */
12460 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12461 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12462 }
12463
12464 expr = fold_build_pointer_plus (dest, len);
12465 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12466 }
12467
12468 /* __memmove_chk special case. */
12469 if (fcode == BUILT_IN_MEMMOVE_CHK)
12470 {
12471 unsigned int src_align = get_pointer_alignment (src);
12472
12473 if (src_align == 0)
12474 return NULL_RTX;
12475
12476 /* If src is categorized for a readonly section we can use
12477 normal __memcpy_chk. */
12478 if (readonly_data_expr (src))
12479 {
12480 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12481 if (!fn)
12482 return NULL_RTX;
12483 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12484 dest, src, len, size);
12485 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12486 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12487 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12488 }
12489 }
12490 return NULL_RTX;
12491 }
12492 }
12493
12494 /* Emit warning if a buffer overflow is detected at compile time. */
12495
12496 static void
12497 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12498 {
12499 int is_strlen = 0;
12500 tree len, size;
12501 location_t loc = tree_nonartificial_location (exp);
12502
12503 switch (fcode)
12504 {
12505 case BUILT_IN_STRCPY_CHK:
12506 case BUILT_IN_STPCPY_CHK:
12507 /* For __strcat_chk the warning will be emitted only if overflowing
12508 by at least strlen (dest) + 1 bytes. */
12509 case BUILT_IN_STRCAT_CHK:
12510 len = CALL_EXPR_ARG (exp, 1);
12511 size = CALL_EXPR_ARG (exp, 2);
12512 is_strlen = 1;
12513 break;
12514 case BUILT_IN_STRNCAT_CHK:
12515 case BUILT_IN_STRNCPY_CHK:
12516 case BUILT_IN_STPNCPY_CHK:
12517 len = CALL_EXPR_ARG (exp, 2);
12518 size = CALL_EXPR_ARG (exp, 3);
12519 break;
12520 case BUILT_IN_SNPRINTF_CHK:
12521 case BUILT_IN_VSNPRINTF_CHK:
12522 len = CALL_EXPR_ARG (exp, 1);
12523 size = CALL_EXPR_ARG (exp, 3);
12524 break;
12525 default:
12526 gcc_unreachable ();
12527 }
12528
12529 if (!len || !size)
12530 return;
12531
12532 if (! host_integerp (size, 1) || integer_all_onesp (size))
12533 return;
12534
12535 if (is_strlen)
12536 {
12537 len = c_strlen (len, 1);
12538 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12539 return;
12540 }
12541 else if (fcode == BUILT_IN_STRNCAT_CHK)
12542 {
12543 tree src = CALL_EXPR_ARG (exp, 1);
12544 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12545 return;
12546 src = c_strlen (src, 1);
12547 if (! src || ! host_integerp (src, 1))
12548 {
12549 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12550 exp, get_callee_fndecl (exp));
12551 return;
12552 }
12553 else if (tree_int_cst_lt (src, size))
12554 return;
12555 }
12556 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12557 return;
12558
12559 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12560 exp, get_callee_fndecl (exp));
12561 }
12562
12563 /* Emit warning if a buffer overflow is detected at compile time
12564 in __sprintf_chk/__vsprintf_chk calls. */
12565
12566 static void
12567 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12568 {
12569 tree size, len, fmt;
12570 const char *fmt_str;
12571 int nargs = call_expr_nargs (exp);
12572
12573 /* Verify the required arguments in the original call. */
12574
12575 if (nargs < 4)
12576 return;
12577 size = CALL_EXPR_ARG (exp, 2);
12578 fmt = CALL_EXPR_ARG (exp, 3);
12579
12580 if (! host_integerp (size, 1) || integer_all_onesp (size))
12581 return;
12582
12583 /* Check whether the format is a literal string constant. */
12584 fmt_str = c_getstr (fmt);
12585 if (fmt_str == NULL)
12586 return;
12587
12588 if (!init_target_chars ())
12589 return;
12590
12591 /* If the format doesn't contain % args or %%, we know its size. */
12592 if (strchr (fmt_str, target_percent) == 0)
12593 len = build_int_cstu (size_type_node, strlen (fmt_str));
12594 /* If the format is "%s" and first ... argument is a string literal,
12595 we know it too. */
12596 else if (fcode == BUILT_IN_SPRINTF_CHK
12597 && strcmp (fmt_str, target_percent_s) == 0)
12598 {
12599 tree arg;
12600
12601 if (nargs < 5)
12602 return;
12603 arg = CALL_EXPR_ARG (exp, 4);
12604 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12605 return;
12606
12607 len = c_strlen (arg, 1);
12608 if (!len || ! host_integerp (len, 1))
12609 return;
12610 }
12611 else
12612 return;
12613
12614 if (! tree_int_cst_lt (len, size))
12615 warning_at (tree_nonartificial_location (exp),
12616 0, "%Kcall to %D will always overflow destination buffer",
12617 exp, get_callee_fndecl (exp));
12618 }
12619
12620 /* Emit warning if a free is called with address of a variable. */
12621
12622 static void
12623 maybe_emit_free_warning (tree exp)
12624 {
12625 tree arg = CALL_EXPR_ARG (exp, 0);
12626
12627 STRIP_NOPS (arg);
12628 if (TREE_CODE (arg) != ADDR_EXPR)
12629 return;
12630
12631 arg = get_base_address (TREE_OPERAND (arg, 0));
12632 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12633 return;
12634
12635 if (SSA_VAR_P (arg))
12636 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12637 "%Kattempt to free a non-heap object %qD", exp, arg);
12638 else
12639 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12640 "%Kattempt to free a non-heap object", exp);
12641 }
12642
12643 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12644 if possible. */
12645
12646 tree
12647 fold_builtin_object_size (tree ptr, tree ost)
12648 {
12649 unsigned HOST_WIDE_INT bytes;
12650 int object_size_type;
12651
12652 if (!validate_arg (ptr, POINTER_TYPE)
12653 || !validate_arg (ost, INTEGER_TYPE))
12654 return NULL_TREE;
12655
12656 STRIP_NOPS (ost);
12657
12658 if (TREE_CODE (ost) != INTEGER_CST
12659 || tree_int_cst_sgn (ost) < 0
12660 || compare_tree_int (ost, 3) > 0)
12661 return NULL_TREE;
12662
12663 object_size_type = tree_low_cst (ost, 0);
12664
12665 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12666 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12667 and (size_t) 0 for types 2 and 3. */
12668 if (TREE_SIDE_EFFECTS (ptr))
12669 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12670
12671 if (TREE_CODE (ptr) == ADDR_EXPR)
12672 {
12673 bytes = compute_builtin_object_size (ptr, object_size_type);
12674 if (double_int_fits_to_tree_p (size_type_node,
12675 double_int::from_uhwi (bytes)))
12676 return build_int_cstu (size_type_node, bytes);
12677 }
12678 else if (TREE_CODE (ptr) == SSA_NAME)
12679 {
12680 /* If object size is not known yet, delay folding until
12681 later. Maybe subsequent passes will help determining
12682 it. */
12683 bytes = compute_builtin_object_size (ptr, object_size_type);
12684 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12685 && double_int_fits_to_tree_p (size_type_node,
12686 double_int::from_uhwi (bytes)))
12687 return build_int_cstu (size_type_node, bytes);
12688 }
12689
12690 return NULL_TREE;
12691 }
12692
12693 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12694 DEST, SRC, LEN, and SIZE are the arguments to the call.
12695 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12696 code of the builtin. If MAXLEN is not NULL, it is maximum length
12697 passed as third argument. */
12698
12699 tree
12700 fold_builtin_memory_chk (location_t loc, tree fndecl,
12701 tree dest, tree src, tree len, tree size,
12702 tree maxlen, bool ignore,
12703 enum built_in_function fcode)
12704 {
12705 tree fn;
12706
12707 if (!validate_arg (dest, POINTER_TYPE)
12708 || !validate_arg (src,
12709 (fcode == BUILT_IN_MEMSET_CHK
12710 ? INTEGER_TYPE : POINTER_TYPE))
12711 || !validate_arg (len, INTEGER_TYPE)
12712 || !validate_arg (size, INTEGER_TYPE))
12713 return NULL_TREE;
12714
12715 /* If SRC and DEST are the same (and not volatile), return DEST
12716 (resp. DEST+LEN for __mempcpy_chk). */
12717 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12718 {
12719 if (fcode != BUILT_IN_MEMPCPY_CHK)
12720 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12721 dest, len);
12722 else
12723 {
12724 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12725 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12726 }
12727 }
12728
12729 if (! host_integerp (size, 1))
12730 return NULL_TREE;
12731
12732 if (! integer_all_onesp (size))
12733 {
12734 if (! host_integerp (len, 1))
12735 {
12736 /* If LEN is not constant, try MAXLEN too.
12737 For MAXLEN only allow optimizing into non-_ocs function
12738 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12739 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12740 {
12741 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12742 {
12743 /* (void) __mempcpy_chk () can be optimized into
12744 (void) __memcpy_chk (). */
12745 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12746 if (!fn)
12747 return NULL_TREE;
12748
12749 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12750 }
12751 return NULL_TREE;
12752 }
12753 }
12754 else
12755 maxlen = len;
12756
12757 if (tree_int_cst_lt (size, maxlen))
12758 return NULL_TREE;
12759 }
12760
12761 fn = NULL_TREE;
12762 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12763 mem{cpy,pcpy,move,set} is available. */
12764 switch (fcode)
12765 {
12766 case BUILT_IN_MEMCPY_CHK:
12767 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12768 break;
12769 case BUILT_IN_MEMPCPY_CHK:
12770 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12771 break;
12772 case BUILT_IN_MEMMOVE_CHK:
12773 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12774 break;
12775 case BUILT_IN_MEMSET_CHK:
12776 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12777 break;
12778 default:
12779 break;
12780 }
12781
12782 if (!fn)
12783 return NULL_TREE;
12784
12785 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12786 }
12787
12788 /* Fold a call to the __st[rp]cpy_chk builtin.
12789 DEST, SRC, and SIZE are the arguments to the call.
12790 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12791 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12792 strings passed as second argument. */
12793
12794 tree
12795 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12796 tree src, tree size,
12797 tree maxlen, bool ignore,
12798 enum built_in_function fcode)
12799 {
12800 tree len, fn;
12801
12802 if (!validate_arg (dest, POINTER_TYPE)
12803 || !validate_arg (src, POINTER_TYPE)
12804 || !validate_arg (size, INTEGER_TYPE))
12805 return NULL_TREE;
12806
12807 /* If SRC and DEST are the same (and not volatile), return DEST. */
12808 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12809 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12810
12811 if (! host_integerp (size, 1))
12812 return NULL_TREE;
12813
12814 if (! integer_all_onesp (size))
12815 {
12816 len = c_strlen (src, 1);
12817 if (! len || ! host_integerp (len, 1))
12818 {
12819 /* If LEN is not constant, try MAXLEN too.
12820 For MAXLEN only allow optimizing into non-_ocs function
12821 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12822 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12823 {
12824 if (fcode == BUILT_IN_STPCPY_CHK)
12825 {
12826 if (! ignore)
12827 return NULL_TREE;
12828
12829 /* If return value of __stpcpy_chk is ignored,
12830 optimize into __strcpy_chk. */
12831 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12832 if (!fn)
12833 return NULL_TREE;
12834
12835 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12836 }
12837
12838 if (! len || TREE_SIDE_EFFECTS (len))
12839 return NULL_TREE;
12840
12841 /* If c_strlen returned something, but not a constant,
12842 transform __strcpy_chk into __memcpy_chk. */
12843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12844 if (!fn)
12845 return NULL_TREE;
12846
12847 len = fold_convert_loc (loc, size_type_node, len);
12848 len = size_binop_loc (loc, PLUS_EXPR, len,
12849 build_int_cst (size_type_node, 1));
12850 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12851 build_call_expr_loc (loc, fn, 4,
12852 dest, src, len, size));
12853 }
12854 }
12855 else
12856 maxlen = len;
12857
12858 if (! tree_int_cst_lt (maxlen, size))
12859 return NULL_TREE;
12860 }
12861
12862 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12863 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12864 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12865 if (!fn)
12866 return NULL_TREE;
12867
12868 return build_call_expr_loc (loc, fn, 2, dest, src);
12869 }
12870
12871 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12872 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12873 length passed as third argument. IGNORE is true if return value can be
12874 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12875
12876 tree
12877 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12878 tree len, tree size, tree maxlen, bool ignore,
12879 enum built_in_function fcode)
12880 {
12881 tree fn;
12882
12883 if (!validate_arg (dest, POINTER_TYPE)
12884 || !validate_arg (src, POINTER_TYPE)
12885 || !validate_arg (len, INTEGER_TYPE)
12886 || !validate_arg (size, INTEGER_TYPE))
12887 return NULL_TREE;
12888
12889 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12890 {
12891 /* If return value of __stpncpy_chk is ignored,
12892 optimize into __strncpy_chk. */
12893 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12894 if (fn)
12895 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12896 }
12897
12898 if (! host_integerp (size, 1))
12899 return NULL_TREE;
12900
12901 if (! integer_all_onesp (size))
12902 {
12903 if (! host_integerp (len, 1))
12904 {
12905 /* If LEN is not constant, try MAXLEN too.
12906 For MAXLEN only allow optimizing into non-_ocs function
12907 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12908 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12909 return NULL_TREE;
12910 }
12911 else
12912 maxlen = len;
12913
12914 if (tree_int_cst_lt (size, maxlen))
12915 return NULL_TREE;
12916 }
12917
12918 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12919 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12920 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12921 if (!fn)
12922 return NULL_TREE;
12923
12924 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12925 }
12926
12927 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12928 are the arguments to the call. */
12929
12930 static tree
12931 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12932 tree src, tree size)
12933 {
12934 tree fn;
12935 const char *p;
12936
12937 if (!validate_arg (dest, POINTER_TYPE)
12938 || !validate_arg (src, POINTER_TYPE)
12939 || !validate_arg (size, INTEGER_TYPE))
12940 return NULL_TREE;
12941
12942 p = c_getstr (src);
12943 /* If the SRC parameter is "", return DEST. */
12944 if (p && *p == '\0')
12945 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12946
12947 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12948 return NULL_TREE;
12949
12950 /* If __builtin_strcat_chk is used, assume strcat is available. */
12951 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12952 if (!fn)
12953 return NULL_TREE;
12954
12955 return build_call_expr_loc (loc, fn, 2, dest, src);
12956 }
12957
12958 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12959 LEN, and SIZE. */
12960
12961 static tree
12962 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12963 tree dest, tree src, tree len, tree size)
12964 {
12965 tree fn;
12966 const char *p;
12967
12968 if (!validate_arg (dest, POINTER_TYPE)
12969 || !validate_arg (src, POINTER_TYPE)
12970 || !validate_arg (size, INTEGER_TYPE)
12971 || !validate_arg (size, INTEGER_TYPE))
12972 return NULL_TREE;
12973
12974 p = c_getstr (src);
12975 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12976 if (p && *p == '\0')
12977 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12978 else if (integer_zerop (len))
12979 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12980
12981 if (! host_integerp (size, 1))
12982 return NULL_TREE;
12983
12984 if (! integer_all_onesp (size))
12985 {
12986 tree src_len = c_strlen (src, 1);
12987 if (src_len
12988 && host_integerp (src_len, 1)
12989 && host_integerp (len, 1)
12990 && ! tree_int_cst_lt (len, src_len))
12991 {
12992 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12993 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12994 if (!fn)
12995 return NULL_TREE;
12996
12997 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12998 }
12999 return NULL_TREE;
13000 }
13001
13002 /* If __builtin_strncat_chk is used, assume strncat is available. */
13003 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13004 if (!fn)
13005 return NULL_TREE;
13006
13007 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13008 }
13009
13010 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13011 Return NULL_TREE if a normal call should be emitted rather than
13012 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13013 or BUILT_IN_VSPRINTF_CHK. */
13014
13015 static tree
13016 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13017 enum built_in_function fcode)
13018 {
13019 tree dest, size, len, fn, fmt, flag;
13020 const char *fmt_str;
13021
13022 /* Verify the required arguments in the original call. */
13023 if (nargs < 4)
13024 return NULL_TREE;
13025 dest = args[0];
13026 if (!validate_arg (dest, POINTER_TYPE))
13027 return NULL_TREE;
13028 flag = args[1];
13029 if (!validate_arg (flag, INTEGER_TYPE))
13030 return NULL_TREE;
13031 size = args[2];
13032 if (!validate_arg (size, INTEGER_TYPE))
13033 return NULL_TREE;
13034 fmt = args[3];
13035 if (!validate_arg (fmt, POINTER_TYPE))
13036 return NULL_TREE;
13037
13038 if (! host_integerp (size, 1))
13039 return NULL_TREE;
13040
13041 len = NULL_TREE;
13042
13043 if (!init_target_chars ())
13044 return NULL_TREE;
13045
13046 /* Check whether the format is a literal string constant. */
13047 fmt_str = c_getstr (fmt);
13048 if (fmt_str != NULL)
13049 {
13050 /* If the format doesn't contain % args or %%, we know the size. */
13051 if (strchr (fmt_str, target_percent) == 0)
13052 {
13053 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13054 len = build_int_cstu (size_type_node, strlen (fmt_str));
13055 }
13056 /* If the format is "%s" and first ... argument is a string literal,
13057 we know the size too. */
13058 else if (fcode == BUILT_IN_SPRINTF_CHK
13059 && strcmp (fmt_str, target_percent_s) == 0)
13060 {
13061 tree arg;
13062
13063 if (nargs == 5)
13064 {
13065 arg = args[4];
13066 if (validate_arg (arg, POINTER_TYPE))
13067 {
13068 len = c_strlen (arg, 1);
13069 if (! len || ! host_integerp (len, 1))
13070 len = NULL_TREE;
13071 }
13072 }
13073 }
13074 }
13075
13076 if (! integer_all_onesp (size))
13077 {
13078 if (! len || ! tree_int_cst_lt (len, size))
13079 return NULL_TREE;
13080 }
13081
13082 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13083 or if format doesn't contain % chars or is "%s". */
13084 if (! integer_zerop (flag))
13085 {
13086 if (fmt_str == NULL)
13087 return NULL_TREE;
13088 if (strchr (fmt_str, target_percent) != NULL
13089 && strcmp (fmt_str, target_percent_s))
13090 return NULL_TREE;
13091 }
13092
13093 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13094 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13095 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13096 if (!fn)
13097 return NULL_TREE;
13098
13099 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13100 }
13101
13102 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13103 a normal call should be emitted rather than expanding the function
13104 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13105
13106 static tree
13107 fold_builtin_sprintf_chk (location_t loc, tree exp,
13108 enum built_in_function fcode)
13109 {
13110 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13111 CALL_EXPR_ARGP (exp), fcode);
13112 }
13113
13114 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13115 NULL_TREE if a normal call should be emitted rather than expanding
13116 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13117 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13118 passed as second argument. */
13119
13120 static tree
13121 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13122 tree maxlen, enum built_in_function fcode)
13123 {
13124 tree dest, size, len, fn, fmt, flag;
13125 const char *fmt_str;
13126
13127 /* Verify the required arguments in the original call. */
13128 if (nargs < 5)
13129 return NULL_TREE;
13130 dest = args[0];
13131 if (!validate_arg (dest, POINTER_TYPE))
13132 return NULL_TREE;
13133 len = args[1];
13134 if (!validate_arg (len, INTEGER_TYPE))
13135 return NULL_TREE;
13136 flag = args[2];
13137 if (!validate_arg (flag, INTEGER_TYPE))
13138 return NULL_TREE;
13139 size = args[3];
13140 if (!validate_arg (size, INTEGER_TYPE))
13141 return NULL_TREE;
13142 fmt = args[4];
13143 if (!validate_arg (fmt, POINTER_TYPE))
13144 return NULL_TREE;
13145
13146 if (! host_integerp (size, 1))
13147 return NULL_TREE;
13148
13149 if (! integer_all_onesp (size))
13150 {
13151 if (! host_integerp (len, 1))
13152 {
13153 /* If LEN is not constant, try MAXLEN too.
13154 For MAXLEN only allow optimizing into non-_ocs function
13155 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13156 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13157 return NULL_TREE;
13158 }
13159 else
13160 maxlen = len;
13161
13162 if (tree_int_cst_lt (size, maxlen))
13163 return NULL_TREE;
13164 }
13165
13166 if (!init_target_chars ())
13167 return NULL_TREE;
13168
13169 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13170 or if format doesn't contain % chars or is "%s". */
13171 if (! integer_zerop (flag))
13172 {
13173 fmt_str = c_getstr (fmt);
13174 if (fmt_str == NULL)
13175 return NULL_TREE;
13176 if (strchr (fmt_str, target_percent) != NULL
13177 && strcmp (fmt_str, target_percent_s))
13178 return NULL_TREE;
13179 }
13180
13181 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13182 available. */
13183 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13184 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13185 if (!fn)
13186 return NULL_TREE;
13187
13188 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13189 }
13190
13191 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13192 a normal call should be emitted rather than expanding the function
13193 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13194 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13195 passed as second argument. */
13196
13197 static tree
13198 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13199 enum built_in_function fcode)
13200 {
13201 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13202 CALL_EXPR_ARGP (exp), maxlen, fcode);
13203 }
13204
13205 /* Builtins with folding operations that operate on "..." arguments
13206 need special handling; we need to store the arguments in a convenient
13207 data structure before attempting any folding. Fortunately there are
13208 only a few builtins that fall into this category. FNDECL is the
13209 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13210 result of the function call is ignored. */
13211
13212 static tree
13213 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13214 bool ignore ATTRIBUTE_UNUSED)
13215 {
13216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13217 tree ret = NULL_TREE;
13218
13219 switch (fcode)
13220 {
13221 case BUILT_IN_SPRINTF_CHK:
13222 case BUILT_IN_VSPRINTF_CHK:
13223 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13224 break;
13225
13226 case BUILT_IN_SNPRINTF_CHK:
13227 case BUILT_IN_VSNPRINTF_CHK:
13228 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13229 break;
13230
13231 case BUILT_IN_FPCLASSIFY:
13232 ret = fold_builtin_fpclassify (loc, exp);
13233 break;
13234
13235 default:
13236 break;
13237 }
13238 if (ret)
13239 {
13240 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13241 SET_EXPR_LOCATION (ret, loc);
13242 TREE_NO_WARNING (ret) = 1;
13243 return ret;
13244 }
13245 return NULL_TREE;
13246 }
13247
13248 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13249 FMT and ARG are the arguments to the call; we don't fold cases with
13250 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13251
13252 Return NULL_TREE if no simplification was possible, otherwise return the
13253 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13254 code of the function to be simplified. */
13255
13256 static tree
13257 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13258 tree arg, bool ignore,
13259 enum built_in_function fcode)
13260 {
13261 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13262 const char *fmt_str = NULL;
13263
13264 /* If the return value is used, don't do the transformation. */
13265 if (! ignore)
13266 return NULL_TREE;
13267
13268 /* Verify the required arguments in the original call. */
13269 if (!validate_arg (fmt, POINTER_TYPE))
13270 return NULL_TREE;
13271
13272 /* Check whether the format is a literal string constant. */
13273 fmt_str = c_getstr (fmt);
13274 if (fmt_str == NULL)
13275 return NULL_TREE;
13276
13277 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13278 {
13279 /* If we're using an unlocked function, assume the other
13280 unlocked functions exist explicitly. */
13281 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13282 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13283 }
13284 else
13285 {
13286 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13287 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13288 }
13289
13290 if (!init_target_chars ())
13291 return NULL_TREE;
13292
13293 if (strcmp (fmt_str, target_percent_s) == 0
13294 || strchr (fmt_str, target_percent) == NULL)
13295 {
13296 const char *str;
13297
13298 if (strcmp (fmt_str, target_percent_s) == 0)
13299 {
13300 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13301 return NULL_TREE;
13302
13303 if (!arg || !validate_arg (arg, POINTER_TYPE))
13304 return NULL_TREE;
13305
13306 str = c_getstr (arg);
13307 if (str == NULL)
13308 return NULL_TREE;
13309 }
13310 else
13311 {
13312 /* The format specifier doesn't contain any '%' characters. */
13313 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13314 && arg)
13315 return NULL_TREE;
13316 str = fmt_str;
13317 }
13318
13319 /* If the string was "", printf does nothing. */
13320 if (str[0] == '\0')
13321 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13322
13323 /* If the string has length of 1, call putchar. */
13324 if (str[1] == '\0')
13325 {
13326 /* Given printf("c"), (where c is any one character,)
13327 convert "c"[0] to an int and pass that to the replacement
13328 function. */
13329 newarg = build_int_cst (integer_type_node, str[0]);
13330 if (fn_putchar)
13331 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13332 }
13333 else
13334 {
13335 /* If the string was "string\n", call puts("string"). */
13336 size_t len = strlen (str);
13337 if ((unsigned char)str[len - 1] == target_newline
13338 && (size_t) (int) len == len
13339 && (int) len > 0)
13340 {
13341 char *newstr;
13342 tree offset_node, string_cst;
13343
13344 /* Create a NUL-terminated string that's one char shorter
13345 than the original, stripping off the trailing '\n'. */
13346 newarg = build_string_literal (len, str);
13347 string_cst = string_constant (newarg, &offset_node);
13348 gcc_checking_assert (string_cst
13349 && (TREE_STRING_LENGTH (string_cst)
13350 == (int) len)
13351 && integer_zerop (offset_node)
13352 && (unsigned char)
13353 TREE_STRING_POINTER (string_cst)[len - 1]
13354 == target_newline);
13355 /* build_string_literal creates a new STRING_CST,
13356 modify it in place to avoid double copying. */
13357 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13358 newstr[len - 1] = '\0';
13359 if (fn_puts)
13360 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13361 }
13362 else
13363 /* We'd like to arrange to call fputs(string,stdout) here,
13364 but we need stdout and don't have a way to get it yet. */
13365 return NULL_TREE;
13366 }
13367 }
13368
13369 /* The other optimizations can be done only on the non-va_list variants. */
13370 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13371 return NULL_TREE;
13372
13373 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13374 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13375 {
13376 if (!arg || !validate_arg (arg, POINTER_TYPE))
13377 return NULL_TREE;
13378 if (fn_puts)
13379 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13380 }
13381
13382 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13383 else if (strcmp (fmt_str, target_percent_c) == 0)
13384 {
13385 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13386 return NULL_TREE;
13387 if (fn_putchar)
13388 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13389 }
13390
13391 if (!call)
13392 return NULL_TREE;
13393
13394 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13395 }
13396
13397 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13398 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13399 more than 3 arguments, and ARG may be null in the 2-argument case.
13400
13401 Return NULL_TREE if no simplification was possible, otherwise return the
13402 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13403 code of the function to be simplified. */
13404
13405 static tree
13406 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13407 tree fmt, tree arg, bool ignore,
13408 enum built_in_function fcode)
13409 {
13410 tree fn_fputc, fn_fputs, call = NULL_TREE;
13411 const char *fmt_str = NULL;
13412
13413 /* If the return value is used, don't do the transformation. */
13414 if (! ignore)
13415 return NULL_TREE;
13416
13417 /* Verify the required arguments in the original call. */
13418 if (!validate_arg (fp, POINTER_TYPE))
13419 return NULL_TREE;
13420 if (!validate_arg (fmt, POINTER_TYPE))
13421 return NULL_TREE;
13422
13423 /* Check whether the format is a literal string constant. */
13424 fmt_str = c_getstr (fmt);
13425 if (fmt_str == NULL)
13426 return NULL_TREE;
13427
13428 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13429 {
13430 /* If we're using an unlocked function, assume the other
13431 unlocked functions exist explicitly. */
13432 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13433 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13434 }
13435 else
13436 {
13437 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13438 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13439 }
13440
13441 if (!init_target_chars ())
13442 return NULL_TREE;
13443
13444 /* If the format doesn't contain % args or %%, use strcpy. */
13445 if (strchr (fmt_str, target_percent) == NULL)
13446 {
13447 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13448 && arg)
13449 return NULL_TREE;
13450
13451 /* If the format specifier was "", fprintf does nothing. */
13452 if (fmt_str[0] == '\0')
13453 {
13454 /* If FP has side-effects, just wait until gimplification is
13455 done. */
13456 if (TREE_SIDE_EFFECTS (fp))
13457 return NULL_TREE;
13458
13459 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13460 }
13461
13462 /* When "string" doesn't contain %, replace all cases of
13463 fprintf (fp, string) with fputs (string, fp). The fputs
13464 builtin will take care of special cases like length == 1. */
13465 if (fn_fputs)
13466 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13467 }
13468
13469 /* The other optimizations can be done only on the non-va_list variants. */
13470 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13471 return NULL_TREE;
13472
13473 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13474 else if (strcmp (fmt_str, target_percent_s) == 0)
13475 {
13476 if (!arg || !validate_arg (arg, POINTER_TYPE))
13477 return NULL_TREE;
13478 if (fn_fputs)
13479 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13480 }
13481
13482 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13483 else if (strcmp (fmt_str, target_percent_c) == 0)
13484 {
13485 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13486 return NULL_TREE;
13487 if (fn_fputc)
13488 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13489 }
13490
13491 if (!call)
13492 return NULL_TREE;
13493 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13494 }
13495
13496 /* Initialize format string characters in the target charset. */
13497
13498 static bool
13499 init_target_chars (void)
13500 {
13501 static bool init;
13502 if (!init)
13503 {
13504 target_newline = lang_hooks.to_target_charset ('\n');
13505 target_percent = lang_hooks.to_target_charset ('%');
13506 target_c = lang_hooks.to_target_charset ('c');
13507 target_s = lang_hooks.to_target_charset ('s');
13508 if (target_newline == 0 || target_percent == 0 || target_c == 0
13509 || target_s == 0)
13510 return false;
13511
13512 target_percent_c[0] = target_percent;
13513 target_percent_c[1] = target_c;
13514 target_percent_c[2] = '\0';
13515
13516 target_percent_s[0] = target_percent;
13517 target_percent_s[1] = target_s;
13518 target_percent_s[2] = '\0';
13519
13520 target_percent_s_newline[0] = target_percent;
13521 target_percent_s_newline[1] = target_s;
13522 target_percent_s_newline[2] = target_newline;
13523 target_percent_s_newline[3] = '\0';
13524
13525 init = true;
13526 }
13527 return true;
13528 }
13529
13530 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13531 and no overflow/underflow occurred. INEXACT is true if M was not
13532 exactly calculated. TYPE is the tree type for the result. This
13533 function assumes that you cleared the MPFR flags and then
13534 calculated M to see if anything subsequently set a flag prior to
13535 entering this function. Return NULL_TREE if any checks fail. */
13536
13537 static tree
13538 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13539 {
13540 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13541 overflow/underflow occurred. If -frounding-math, proceed iff the
13542 result of calling FUNC was exact. */
13543 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13544 && (!flag_rounding_math || !inexact))
13545 {
13546 REAL_VALUE_TYPE rr;
13547
13548 real_from_mpfr (&rr, m, type, GMP_RNDN);
13549 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13550 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13551 but the mpft_t is not, then we underflowed in the
13552 conversion. */
13553 if (real_isfinite (&rr)
13554 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13555 {
13556 REAL_VALUE_TYPE rmode;
13557
13558 real_convert (&rmode, TYPE_MODE (type), &rr);
13559 /* Proceed iff the specified mode can hold the value. */
13560 if (real_identical (&rmode, &rr))
13561 return build_real (type, rmode);
13562 }
13563 }
13564 return NULL_TREE;
13565 }
13566
13567 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13568 number and no overflow/underflow occurred. INEXACT is true if M
13569 was not exactly calculated. TYPE is the tree type for the result.
13570 This function assumes that you cleared the MPFR flags and then
13571 calculated M to see if anything subsequently set a flag prior to
13572 entering this function. Return NULL_TREE if any checks fail, if
13573 FORCE_CONVERT is true, then bypass the checks. */
13574
13575 static tree
13576 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13577 {
13578 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13579 overflow/underflow occurred. If -frounding-math, proceed iff the
13580 result of calling FUNC was exact. */
13581 if (force_convert
13582 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13583 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13584 && (!flag_rounding_math || !inexact)))
13585 {
13586 REAL_VALUE_TYPE re, im;
13587
13588 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13589 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13590 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13591 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13592 but the mpft_t is not, then we underflowed in the
13593 conversion. */
13594 if (force_convert
13595 || (real_isfinite (&re) && real_isfinite (&im)
13596 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13597 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13598 {
13599 REAL_VALUE_TYPE re_mode, im_mode;
13600
13601 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13602 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13603 /* Proceed iff the specified mode can hold the value. */
13604 if (force_convert
13605 || (real_identical (&re_mode, &re)
13606 && real_identical (&im_mode, &im)))
13607 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13608 build_real (TREE_TYPE (type), im_mode));
13609 }
13610 }
13611 return NULL_TREE;
13612 }
13613
13614 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13615 FUNC on it and return the resulting value as a tree with type TYPE.
13616 If MIN and/or MAX are not NULL, then the supplied ARG must be
13617 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13618 acceptable values, otherwise they are not. The mpfr precision is
13619 set to the precision of TYPE. We assume that function FUNC returns
13620 zero if the result could be calculated exactly within the requested
13621 precision. */
13622
13623 static tree
13624 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13625 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13626 bool inclusive)
13627 {
13628 tree result = NULL_TREE;
13629
13630 STRIP_NOPS (arg);
13631
13632 /* To proceed, MPFR must exactly represent the target floating point
13633 format, which only happens when the target base equals two. */
13634 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13635 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13636 {
13637 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13638
13639 if (real_isfinite (ra)
13640 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13641 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13642 {
13643 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13644 const int prec = fmt->p;
13645 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13646 int inexact;
13647 mpfr_t m;
13648
13649 mpfr_init2 (m, prec);
13650 mpfr_from_real (m, ra, GMP_RNDN);
13651 mpfr_clear_flags ();
13652 inexact = func (m, m, rnd);
13653 result = do_mpfr_ckconv (m, type, inexact);
13654 mpfr_clear (m);
13655 }
13656 }
13657
13658 return result;
13659 }
13660
13661 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13662 FUNC on it and return the resulting value as a tree with type TYPE.
13663 The mpfr precision is set to the precision of TYPE. We assume that
13664 function FUNC returns zero if the result could be calculated
13665 exactly within the requested precision. */
13666
13667 static tree
13668 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13669 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13670 {
13671 tree result = NULL_TREE;
13672
13673 STRIP_NOPS (arg1);
13674 STRIP_NOPS (arg2);
13675
13676 /* To proceed, MPFR must exactly represent the target floating point
13677 format, which only happens when the target base equals two. */
13678 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13679 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13680 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13681 {
13682 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13683 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13684
13685 if (real_isfinite (ra1) && real_isfinite (ra2))
13686 {
13687 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13688 const int prec = fmt->p;
13689 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13690 int inexact;
13691 mpfr_t m1, m2;
13692
13693 mpfr_inits2 (prec, m1, m2, NULL);
13694 mpfr_from_real (m1, ra1, GMP_RNDN);
13695 mpfr_from_real (m2, ra2, GMP_RNDN);
13696 mpfr_clear_flags ();
13697 inexact = func (m1, m1, m2, rnd);
13698 result = do_mpfr_ckconv (m1, type, inexact);
13699 mpfr_clears (m1, m2, NULL);
13700 }
13701 }
13702
13703 return result;
13704 }
13705
13706 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13707 FUNC on it and return the resulting value as a tree with type TYPE.
13708 The mpfr precision is set to the precision of TYPE. We assume that
13709 function FUNC returns zero if the result could be calculated
13710 exactly within the requested precision. */
13711
13712 static tree
13713 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13714 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13715 {
13716 tree result = NULL_TREE;
13717
13718 STRIP_NOPS (arg1);
13719 STRIP_NOPS (arg2);
13720 STRIP_NOPS (arg3);
13721
13722 /* To proceed, MPFR must exactly represent the target floating point
13723 format, which only happens when the target base equals two. */
13724 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13725 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13726 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13727 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13728 {
13729 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13730 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13731 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13732
13733 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13734 {
13735 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13736 const int prec = fmt->p;
13737 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13738 int inexact;
13739 mpfr_t m1, m2, m3;
13740
13741 mpfr_inits2 (prec, m1, m2, m3, NULL);
13742 mpfr_from_real (m1, ra1, GMP_RNDN);
13743 mpfr_from_real (m2, ra2, GMP_RNDN);
13744 mpfr_from_real (m3, ra3, GMP_RNDN);
13745 mpfr_clear_flags ();
13746 inexact = func (m1, m1, m2, m3, rnd);
13747 result = do_mpfr_ckconv (m1, type, inexact);
13748 mpfr_clears (m1, m2, m3, NULL);
13749 }
13750 }
13751
13752 return result;
13753 }
13754
13755 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13756 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13757 If ARG_SINP and ARG_COSP are NULL then the result is returned
13758 as a complex value.
13759 The type is taken from the type of ARG and is used for setting the
13760 precision of the calculation and results. */
13761
13762 static tree
13763 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13764 {
13765 tree const type = TREE_TYPE (arg);
13766 tree result = NULL_TREE;
13767
13768 STRIP_NOPS (arg);
13769
13770 /* To proceed, MPFR must exactly represent the target floating point
13771 format, which only happens when the target base equals two. */
13772 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13773 && TREE_CODE (arg) == REAL_CST
13774 && !TREE_OVERFLOW (arg))
13775 {
13776 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13777
13778 if (real_isfinite (ra))
13779 {
13780 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13781 const int prec = fmt->p;
13782 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13783 tree result_s, result_c;
13784 int inexact;
13785 mpfr_t m, ms, mc;
13786
13787 mpfr_inits2 (prec, m, ms, mc, NULL);
13788 mpfr_from_real (m, ra, GMP_RNDN);
13789 mpfr_clear_flags ();
13790 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13791 result_s = do_mpfr_ckconv (ms, type, inexact);
13792 result_c = do_mpfr_ckconv (mc, type, inexact);
13793 mpfr_clears (m, ms, mc, NULL);
13794 if (result_s && result_c)
13795 {
13796 /* If we are to return in a complex value do so. */
13797 if (!arg_sinp && !arg_cosp)
13798 return build_complex (build_complex_type (type),
13799 result_c, result_s);
13800
13801 /* Dereference the sin/cos pointer arguments. */
13802 arg_sinp = build_fold_indirect_ref (arg_sinp);
13803 arg_cosp = build_fold_indirect_ref (arg_cosp);
13804 /* Proceed if valid pointer type were passed in. */
13805 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13806 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13807 {
13808 /* Set the values. */
13809 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13810 result_s);
13811 TREE_SIDE_EFFECTS (result_s) = 1;
13812 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13813 result_c);
13814 TREE_SIDE_EFFECTS (result_c) = 1;
13815 /* Combine the assignments into a compound expr. */
13816 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13817 result_s, result_c));
13818 }
13819 }
13820 }
13821 }
13822 return result;
13823 }
13824
13825 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13826 two-argument mpfr order N Bessel function FUNC on them and return
13827 the resulting value as a tree with type TYPE. The mpfr precision
13828 is set to the precision of TYPE. We assume that function FUNC
13829 returns zero if the result could be calculated exactly within the
13830 requested precision. */
13831 static tree
13832 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13833 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13834 const REAL_VALUE_TYPE *min, bool inclusive)
13835 {
13836 tree result = NULL_TREE;
13837
13838 STRIP_NOPS (arg1);
13839 STRIP_NOPS (arg2);
13840
13841 /* To proceed, MPFR must exactly represent the target floating point
13842 format, which only happens when the target base equals two. */
13843 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13844 && tree_fits_shwi_p (arg1)
13845 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13846 {
13847 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13848 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13849
13850 if (n == (long)n
13851 && real_isfinite (ra)
13852 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13853 {
13854 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13855 const int prec = fmt->p;
13856 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13857 int inexact;
13858 mpfr_t m;
13859
13860 mpfr_init2 (m, prec);
13861 mpfr_from_real (m, ra, GMP_RNDN);
13862 mpfr_clear_flags ();
13863 inexact = func (m, n, m, rnd);
13864 result = do_mpfr_ckconv (m, type, inexact);
13865 mpfr_clear (m);
13866 }
13867 }
13868
13869 return result;
13870 }
13871
13872 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13873 the pointer *(ARG_QUO) and return the result. The type is taken
13874 from the type of ARG0 and is used for setting the precision of the
13875 calculation and results. */
13876
13877 static tree
13878 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13879 {
13880 tree const type = TREE_TYPE (arg0);
13881 tree result = NULL_TREE;
13882
13883 STRIP_NOPS (arg0);
13884 STRIP_NOPS (arg1);
13885
13886 /* To proceed, MPFR must exactly represent the target floating point
13887 format, which only happens when the target base equals two. */
13888 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13889 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13890 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13891 {
13892 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13893 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13894
13895 if (real_isfinite (ra0) && real_isfinite (ra1))
13896 {
13897 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13898 const int prec = fmt->p;
13899 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13900 tree result_rem;
13901 long integer_quo;
13902 mpfr_t m0, m1;
13903
13904 mpfr_inits2 (prec, m0, m1, NULL);
13905 mpfr_from_real (m0, ra0, GMP_RNDN);
13906 mpfr_from_real (m1, ra1, GMP_RNDN);
13907 mpfr_clear_flags ();
13908 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13909 /* Remquo is independent of the rounding mode, so pass
13910 inexact=0 to do_mpfr_ckconv(). */
13911 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13912 mpfr_clears (m0, m1, NULL);
13913 if (result_rem)
13914 {
13915 /* MPFR calculates quo in the host's long so it may
13916 return more bits in quo than the target int can hold
13917 if sizeof(host long) > sizeof(target int). This can
13918 happen even for native compilers in LP64 mode. In
13919 these cases, modulo the quo value with the largest
13920 number that the target int can hold while leaving one
13921 bit for the sign. */
13922 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13923 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13924
13925 /* Dereference the quo pointer argument. */
13926 arg_quo = build_fold_indirect_ref (arg_quo);
13927 /* Proceed iff a valid pointer type was passed in. */
13928 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13929 {
13930 /* Set the value. */
13931 tree result_quo
13932 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13933 build_int_cst (TREE_TYPE (arg_quo),
13934 integer_quo));
13935 TREE_SIDE_EFFECTS (result_quo) = 1;
13936 /* Combine the quo assignment with the rem. */
13937 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13938 result_quo, result_rem));
13939 }
13940 }
13941 }
13942 }
13943 return result;
13944 }
13945
13946 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13947 resulting value as a tree with type TYPE. The mpfr precision is
13948 set to the precision of TYPE. We assume that this mpfr function
13949 returns zero if the result could be calculated exactly within the
13950 requested precision. In addition, the integer pointer represented
13951 by ARG_SG will be dereferenced and set to the appropriate signgam
13952 (-1,1) value. */
13953
13954 static tree
13955 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13956 {
13957 tree result = NULL_TREE;
13958
13959 STRIP_NOPS (arg);
13960
13961 /* To proceed, MPFR must exactly represent the target floating point
13962 format, which only happens when the target base equals two. Also
13963 verify ARG is a constant and that ARG_SG is an int pointer. */
13964 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13965 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13966 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13967 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13968 {
13969 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13970
13971 /* In addition to NaN and Inf, the argument cannot be zero or a
13972 negative integer. */
13973 if (real_isfinite (ra)
13974 && ra->cl != rvc_zero
13975 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13976 {
13977 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13978 const int prec = fmt->p;
13979 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13980 int inexact, sg;
13981 mpfr_t m;
13982 tree result_lg;
13983
13984 mpfr_init2 (m, prec);
13985 mpfr_from_real (m, ra, GMP_RNDN);
13986 mpfr_clear_flags ();
13987 inexact = mpfr_lgamma (m, &sg, m, rnd);
13988 result_lg = do_mpfr_ckconv (m, type, inexact);
13989 mpfr_clear (m);
13990 if (result_lg)
13991 {
13992 tree result_sg;
13993
13994 /* Dereference the arg_sg pointer argument. */
13995 arg_sg = build_fold_indirect_ref (arg_sg);
13996 /* Assign the signgam value into *arg_sg. */
13997 result_sg = fold_build2 (MODIFY_EXPR,
13998 TREE_TYPE (arg_sg), arg_sg,
13999 build_int_cst (TREE_TYPE (arg_sg), sg));
14000 TREE_SIDE_EFFECTS (result_sg) = 1;
14001 /* Combine the signgam assignment with the lgamma result. */
14002 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14003 result_sg, result_lg));
14004 }
14005 }
14006 }
14007
14008 return result;
14009 }
14010
14011 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14012 function FUNC on it and return the resulting value as a tree with
14013 type TYPE. The mpfr precision is set to the precision of TYPE. We
14014 assume that function FUNC returns zero if the result could be
14015 calculated exactly within the requested precision. */
14016
14017 static tree
14018 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14019 {
14020 tree result = NULL_TREE;
14021
14022 STRIP_NOPS (arg);
14023
14024 /* To proceed, MPFR must exactly represent the target floating point
14025 format, which only happens when the target base equals two. */
14026 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14028 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14029 {
14030 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14031 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14032
14033 if (real_isfinite (re) && real_isfinite (im))
14034 {
14035 const struct real_format *const fmt =
14036 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14037 const int prec = fmt->p;
14038 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14039 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14040 int inexact;
14041 mpc_t m;
14042
14043 mpc_init2 (m, prec);
14044 mpfr_from_real (mpc_realref (m), re, rnd);
14045 mpfr_from_real (mpc_imagref (m), im, rnd);
14046 mpfr_clear_flags ();
14047 inexact = func (m, m, crnd);
14048 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14049 mpc_clear (m);
14050 }
14051 }
14052
14053 return result;
14054 }
14055
14056 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14057 mpc function FUNC on it and return the resulting value as a tree
14058 with type TYPE. The mpfr precision is set to the precision of
14059 TYPE. We assume that function FUNC returns zero if the result
14060 could be calculated exactly within the requested precision. If
14061 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14062 in the arguments and/or results. */
14063
14064 tree
14065 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14066 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14067 {
14068 tree result = NULL_TREE;
14069
14070 STRIP_NOPS (arg0);
14071 STRIP_NOPS (arg1);
14072
14073 /* To proceed, MPFR must exactly represent the target floating point
14074 format, which only happens when the target base equals two. */
14075 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14076 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14077 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14078 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14079 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14080 {
14081 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14082 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14083 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14084 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14085
14086 if (do_nonfinite
14087 || (real_isfinite (re0) && real_isfinite (im0)
14088 && real_isfinite (re1) && real_isfinite (im1)))
14089 {
14090 const struct real_format *const fmt =
14091 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14092 const int prec = fmt->p;
14093 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14094 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14095 int inexact;
14096 mpc_t m0, m1;
14097
14098 mpc_init2 (m0, prec);
14099 mpc_init2 (m1, prec);
14100 mpfr_from_real (mpc_realref (m0), re0, rnd);
14101 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14102 mpfr_from_real (mpc_realref (m1), re1, rnd);
14103 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14104 mpfr_clear_flags ();
14105 inexact = func (m0, m0, m1, crnd);
14106 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14107 mpc_clear (m0);
14108 mpc_clear (m1);
14109 }
14110 }
14111
14112 return result;
14113 }
14114
14115 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14116 a normal call should be emitted rather than expanding the function
14117 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14118
14119 static tree
14120 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14121 {
14122 int nargs = gimple_call_num_args (stmt);
14123
14124 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14125 (nargs > 0
14126 ? gimple_call_arg_ptr (stmt, 0)
14127 : &error_mark_node), fcode);
14128 }
14129
14130 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14131 a normal call should be emitted rather than expanding the function
14132 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14133 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14134 passed as second argument. */
14135
14136 tree
14137 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14138 enum built_in_function fcode)
14139 {
14140 int nargs = gimple_call_num_args (stmt);
14141
14142 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14143 (nargs > 0
14144 ? gimple_call_arg_ptr (stmt, 0)
14145 : &error_mark_node), maxlen, fcode);
14146 }
14147
14148 /* Builtins with folding operations that operate on "..." arguments
14149 need special handling; we need to store the arguments in a convenient
14150 data structure before attempting any folding. Fortunately there are
14151 only a few builtins that fall into this category. FNDECL is the
14152 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14153 result of the function call is ignored. */
14154
14155 static tree
14156 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14157 bool ignore ATTRIBUTE_UNUSED)
14158 {
14159 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14160 tree ret = NULL_TREE;
14161
14162 switch (fcode)
14163 {
14164 case BUILT_IN_SPRINTF_CHK:
14165 case BUILT_IN_VSPRINTF_CHK:
14166 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14167 break;
14168
14169 case BUILT_IN_SNPRINTF_CHK:
14170 case BUILT_IN_VSNPRINTF_CHK:
14171 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14172
14173 default:
14174 break;
14175 }
14176 if (ret)
14177 {
14178 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14179 TREE_NO_WARNING (ret) = 1;
14180 return ret;
14181 }
14182 return NULL_TREE;
14183 }
14184
14185 /* A wrapper function for builtin folding that prevents warnings for
14186 "statement without effect" and the like, caused by removing the
14187 call node earlier than the warning is generated. */
14188
14189 tree
14190 fold_call_stmt (gimple stmt, bool ignore)
14191 {
14192 tree ret = NULL_TREE;
14193 tree fndecl = gimple_call_fndecl (stmt);
14194 location_t loc = gimple_location (stmt);
14195 if (fndecl
14196 && TREE_CODE (fndecl) == FUNCTION_DECL
14197 && DECL_BUILT_IN (fndecl)
14198 && !gimple_call_va_arg_pack_p (stmt))
14199 {
14200 int nargs = gimple_call_num_args (stmt);
14201 tree *args = (nargs > 0
14202 ? gimple_call_arg_ptr (stmt, 0)
14203 : &error_mark_node);
14204
14205 if (avoid_folding_inline_builtin (fndecl))
14206 return NULL_TREE;
14207 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14208 {
14209 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14210 }
14211 else
14212 {
14213 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14214 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14215 if (!ret)
14216 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14217 if (ret)
14218 {
14219 /* Propagate location information from original call to
14220 expansion of builtin. Otherwise things like
14221 maybe_emit_chk_warning, that operate on the expansion
14222 of a builtin, will use the wrong location information. */
14223 if (gimple_has_location (stmt))
14224 {
14225 tree realret = ret;
14226 if (TREE_CODE (ret) == NOP_EXPR)
14227 realret = TREE_OPERAND (ret, 0);
14228 if (CAN_HAVE_LOCATION_P (realret)
14229 && !EXPR_HAS_LOCATION (realret))
14230 SET_EXPR_LOCATION (realret, loc);
14231 return realret;
14232 }
14233 return ret;
14234 }
14235 }
14236 }
14237 return NULL_TREE;
14238 }
14239
14240 /* Look up the function in builtin_decl that corresponds to DECL
14241 and set ASMSPEC as its user assembler name. DECL must be a
14242 function decl that declares a builtin. */
14243
14244 void
14245 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14246 {
14247 tree builtin;
14248 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14249 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14250 && asmspec != 0);
14251
14252 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14253 set_user_assembler_name (builtin, asmspec);
14254 switch (DECL_FUNCTION_CODE (decl))
14255 {
14256 case BUILT_IN_MEMCPY:
14257 init_block_move_fn (asmspec);
14258 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14259 break;
14260 case BUILT_IN_MEMSET:
14261 init_block_clear_fn (asmspec);
14262 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14263 break;
14264 case BUILT_IN_MEMMOVE:
14265 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14266 break;
14267 case BUILT_IN_MEMCMP:
14268 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14269 break;
14270 case BUILT_IN_ABORT:
14271 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14272 break;
14273 case BUILT_IN_FFS:
14274 if (INT_TYPE_SIZE < BITS_PER_WORD)
14275 {
14276 set_user_assembler_libfunc ("ffs", asmspec);
14277 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14278 MODE_INT, 0), "ffs");
14279 }
14280 break;
14281 default:
14282 break;
14283 }
14284 }
14285
14286 /* Return true if DECL is a builtin that expands to a constant or similarly
14287 simple code. */
14288 bool
14289 is_simple_builtin (tree decl)
14290 {
14291 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14292 switch (DECL_FUNCTION_CODE (decl))
14293 {
14294 /* Builtins that expand to constants. */
14295 case BUILT_IN_CONSTANT_P:
14296 case BUILT_IN_EXPECT:
14297 case BUILT_IN_OBJECT_SIZE:
14298 case BUILT_IN_UNREACHABLE:
14299 /* Simple register moves or loads from stack. */
14300 case BUILT_IN_ASSUME_ALIGNED:
14301 case BUILT_IN_RETURN_ADDRESS:
14302 case BUILT_IN_EXTRACT_RETURN_ADDR:
14303 case BUILT_IN_FROB_RETURN_ADDR:
14304 case BUILT_IN_RETURN:
14305 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14306 case BUILT_IN_FRAME_ADDRESS:
14307 case BUILT_IN_VA_END:
14308 case BUILT_IN_STACK_SAVE:
14309 case BUILT_IN_STACK_RESTORE:
14310 /* Exception state returns or moves registers around. */
14311 case BUILT_IN_EH_FILTER:
14312 case BUILT_IN_EH_POINTER:
14313 case BUILT_IN_EH_COPY_VALUES:
14314 return true;
14315
14316 default:
14317 return false;
14318 }
14319
14320 return false;
14321 }
14322
14323 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14324 most probably expanded inline into reasonably simple code. This is a
14325 superset of is_simple_builtin. */
14326 bool
14327 is_inexpensive_builtin (tree decl)
14328 {
14329 if (!decl)
14330 return false;
14331 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14332 return true;
14333 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14334 switch (DECL_FUNCTION_CODE (decl))
14335 {
14336 case BUILT_IN_ABS:
14337 case BUILT_IN_ALLOCA:
14338 case BUILT_IN_ALLOCA_WITH_ALIGN:
14339 case BUILT_IN_BSWAP16:
14340 case BUILT_IN_BSWAP32:
14341 case BUILT_IN_BSWAP64:
14342 case BUILT_IN_CLZ:
14343 case BUILT_IN_CLZIMAX:
14344 case BUILT_IN_CLZL:
14345 case BUILT_IN_CLZLL:
14346 case BUILT_IN_CTZ:
14347 case BUILT_IN_CTZIMAX:
14348 case BUILT_IN_CTZL:
14349 case BUILT_IN_CTZLL:
14350 case BUILT_IN_FFS:
14351 case BUILT_IN_FFSIMAX:
14352 case BUILT_IN_FFSL:
14353 case BUILT_IN_FFSLL:
14354 case BUILT_IN_IMAXABS:
14355 case BUILT_IN_FINITE:
14356 case BUILT_IN_FINITEF:
14357 case BUILT_IN_FINITEL:
14358 case BUILT_IN_FINITED32:
14359 case BUILT_IN_FINITED64:
14360 case BUILT_IN_FINITED128:
14361 case BUILT_IN_FPCLASSIFY:
14362 case BUILT_IN_ISFINITE:
14363 case BUILT_IN_ISINF_SIGN:
14364 case BUILT_IN_ISINF:
14365 case BUILT_IN_ISINFF:
14366 case BUILT_IN_ISINFL:
14367 case BUILT_IN_ISINFD32:
14368 case BUILT_IN_ISINFD64:
14369 case BUILT_IN_ISINFD128:
14370 case BUILT_IN_ISNAN:
14371 case BUILT_IN_ISNANF:
14372 case BUILT_IN_ISNANL:
14373 case BUILT_IN_ISNAND32:
14374 case BUILT_IN_ISNAND64:
14375 case BUILT_IN_ISNAND128:
14376 case BUILT_IN_ISNORMAL:
14377 case BUILT_IN_ISGREATER:
14378 case BUILT_IN_ISGREATEREQUAL:
14379 case BUILT_IN_ISLESS:
14380 case BUILT_IN_ISLESSEQUAL:
14381 case BUILT_IN_ISLESSGREATER:
14382 case BUILT_IN_ISUNORDERED:
14383 case BUILT_IN_VA_ARG_PACK:
14384 case BUILT_IN_VA_ARG_PACK_LEN:
14385 case BUILT_IN_VA_COPY:
14386 case BUILT_IN_TRAP:
14387 case BUILT_IN_SAVEREGS:
14388 case BUILT_IN_POPCOUNTL:
14389 case BUILT_IN_POPCOUNTLL:
14390 case BUILT_IN_POPCOUNTIMAX:
14391 case BUILT_IN_POPCOUNT:
14392 case BUILT_IN_PARITYL:
14393 case BUILT_IN_PARITYLL:
14394 case BUILT_IN_PARITYIMAX:
14395 case BUILT_IN_PARITY:
14396 case BUILT_IN_LABS:
14397 case BUILT_IN_LLABS:
14398 case BUILT_IN_PREFETCH:
14399 return true;
14400
14401 default:
14402 return is_simple_builtin (decl);
14403 }
14404
14405 return false;
14406 }