]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssa.h"
47 #include "value-prof.h"
48 #include "diagnostic-core.h"
49 #include "builtins.h"
50 #include "ubsan.h"
51
52
53 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
54
55 struct target_builtins default_target_builtins;
56 #if SWITCHABLE_TARGET
57 struct target_builtins *this_target_builtins = &default_target_builtins;
58 #endif
59
60 /* Define the names of the builtin function types and codes. */
61 const char *const built_in_class_names[BUILT_IN_LAST]
62 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63
64 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
65 const char * built_in_names[(int) END_BUILTINS] =
66 {
67 #include "builtins.def"
68 };
69 #undef DEF_BUILTIN
70
71 /* Setup an array of _DECL trees, make sure each element is
72 initialized to NULL_TREE. */
73 builtin_info_type builtin_info;
74
75 /* Non-zero if __builtin_constant_p should be folded right away. */
76 bool force_folding_builtin_constant_p;
77
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
86 #endif
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
100 static rtx expand_builtin_interclass_mathfn (tree, rtx);
101 static rtx expand_builtin_sincos (tree);
102 static rtx expand_builtin_cexpi (tree, rtx);
103 static rtx expand_builtin_int_roundingfn (tree, rtx);
104 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_memcpy (tree, rtx);
114 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
116 enum machine_mode, int);
117 static rtx expand_builtin_strcpy (tree, rtx);
118 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
119 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncpy (tree, rtx);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
124 static rtx expand_builtin_bzero (tree);
125 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_alloca (tree, bool);
127 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
128 static rtx expand_builtin_frame_address (tree, tree);
129 static tree stabilize_va_list_loc (location_t, tree, int);
130 static rtx expand_builtin_expect (tree, rtx);
131 static tree fold_builtin_constant_p (tree);
132 static tree fold_builtin_expect (location_t, tree, tree);
133 static tree fold_builtin_classify_type (tree);
134 static tree fold_builtin_strlen (location_t, tree, tree);
135 static tree fold_builtin_inf (location_t, tree, int);
136 static tree fold_builtin_nan (tree, tree, int);
137 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
138 static bool validate_arg (const_tree, enum tree_code code);
139 static bool integer_valued_real_p (tree);
140 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
141 static bool readonly_data_expr (tree);
142 static rtx expand_builtin_fabs (tree, rtx, rtx);
143 static rtx expand_builtin_signbit (tree, rtx);
144 static tree fold_builtin_sqrt (location_t, tree, tree);
145 static tree fold_builtin_cbrt (location_t, tree, tree);
146 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
147 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
148 static tree fold_builtin_cos (location_t, tree, tree, tree);
149 static tree fold_builtin_cosh (location_t, tree, tree, tree);
150 static tree fold_builtin_tan (tree, tree);
151 static tree fold_builtin_trunc (location_t, tree, tree);
152 static tree fold_builtin_floor (location_t, tree, tree);
153 static tree fold_builtin_ceil (location_t, tree, tree);
154 static tree fold_builtin_round (location_t, tree, tree);
155 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
156 static tree fold_builtin_bitop (tree, tree);
157 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
158 static tree fold_builtin_strchr (location_t, tree, tree, tree);
159 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_strcmp (location_t, tree, tree);
162 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
163 static tree fold_builtin_signbit (location_t, tree, tree);
164 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_isascii (location_t, tree);
166 static tree fold_builtin_toascii (location_t, tree);
167 static tree fold_builtin_isdigit (location_t, tree);
168 static tree fold_builtin_fabs (location_t, tree, tree);
169 static tree fold_builtin_abs (location_t, tree, tree);
170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
171 enum tree_code);
172 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
173 static tree fold_builtin_0 (location_t, tree, bool);
174 static tree fold_builtin_1 (location_t, tree, tree, bool);
175 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
176 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
177 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
178 static tree fold_builtin_varargs (location_t, tree, tree, bool);
179
180 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
181 static tree fold_builtin_strstr (location_t, tree, tree, tree);
182 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
183 static tree fold_builtin_strcat (location_t, tree, tree);
184 static tree fold_builtin_strncat (location_t, tree, tree, tree);
185 static tree fold_builtin_strspn (location_t, tree, tree);
186 static tree fold_builtin_strcspn (location_t, tree, tree);
187 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
188 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
189
190 static rtx expand_builtin_object_size (tree);
191 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
192 enum built_in_function);
193 static void maybe_emit_chk_warning (tree, enum built_in_function);
194 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_free_warning (tree);
196 static tree fold_builtin_object_size (tree, tree);
197 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
198 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
199 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
200 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
201 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
202 enum built_in_function);
203 static bool init_target_chars (void);
204
205 static unsigned HOST_WIDE_INT target_newline;
206 static unsigned HOST_WIDE_INT target_percent;
207 static unsigned HOST_WIDE_INT target_c;
208 static unsigned HOST_WIDE_INT target_s;
209 static char target_percent_c[3];
210 static char target_percent_s[3];
211 static char target_percent_s_newline[4];
212 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
213 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
214 static tree do_mpfr_arg2 (tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
216 static tree do_mpfr_arg3 (tree, tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_sincos (tree, tree, tree);
219 static tree do_mpfr_bessel_n (tree, tree, tree,
220 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_remquo (tree, tree, tree);
223 static tree do_mpfr_lgamma_r (tree, tree, tree);
224 static void expand_builtin_sync_synchronize (void);
225
226 /* Return true if NAME starts with __builtin_ or __sync_. */
227
228 static bool
229 is_builtin_name (const char *name)
230 {
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 return false;
238 }
239
240
241 /* Return true if DECL is a function symbol representing a built-in. */
242
243 bool
244 is_builtin_fn (tree decl)
245 {
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
247 }
248
249 /* By default we assume that c99 functions are present at the runtime,
250 but sincos is not. */
251 bool
252 default_libc_has_function (enum function_class fn_class)
253 {
254 if (fn_class == function_c94
255 || fn_class == function_c99_misc
256 || fn_class == function_c99_math_complex)
257 return true;
258
259 return false;
260 }
261
262 bool
263 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
264 {
265 return true;
266 }
267
268 bool
269 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
270 {
271 return false;
272 }
273
274 /* Return true if NODE should be considered for inline expansion regardless
275 of the optimization level. This means whenever a function is invoked with
276 its "internal" name, which normally contains the prefix "__builtin". */
277
278 static bool
279 called_as_built_in (tree node)
280 {
281 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
282 we want the name used to call the function, not the name it
283 will have. */
284 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
285 return is_builtin_name (name);
286 }
287
288 /* Compute values M and N such that M divides (address of EXP - N) and such
289 that N < M. If these numbers can be determined, store M in alignp and N in
290 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
291 *alignp and any bit-offset to *bitposp.
292
293 Note that the address (and thus the alignment) computed here is based
294 on the address to which a symbol resolves, whereas DECL_ALIGN is based
295 on the address at which an object is actually located. These two
296 addresses are not always the same. For example, on ARM targets,
297 the address &foo of a Thumb function foo() has the lowest bit set,
298 whereas foo() itself starts on an even address.
299
300 If ADDR_P is true we are taking the address of the memory reference EXP
301 and thus cannot rely on the access taking place. */
302
303 static bool
304 get_object_alignment_2 (tree exp, unsigned int *alignp,
305 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
306 {
307 HOST_WIDE_INT bitsize, bitpos;
308 tree offset;
309 enum machine_mode mode;
310 int unsignedp, volatilep;
311 unsigned int inner, align = BITS_PER_UNIT;
312 bool known_alignment = false;
313
314 /* Get the innermost object and the constant (bitpos) and possibly
315 variable (offset) offset of the access. */
316 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
317 &mode, &unsignedp, &volatilep, true);
318
319 /* Extract alignment information from the innermost object and
320 possibly adjust bitpos and offset. */
321 if (TREE_CODE (exp) == FUNCTION_DECL)
322 {
323 /* Function addresses can encode extra information besides their
324 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
325 allows the low bit to be used as a virtual bit, we know
326 that the address itself must be at least 2-byte aligned. */
327 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
328 align = 2 * BITS_PER_UNIT;
329 }
330 else if (TREE_CODE (exp) == LABEL_DECL)
331 ;
332 else if (TREE_CODE (exp) == CONST_DECL)
333 {
334 /* The alignment of a CONST_DECL is determined by its initializer. */
335 exp = DECL_INITIAL (exp);
336 align = TYPE_ALIGN (TREE_TYPE (exp));
337 #ifdef CONSTANT_ALIGNMENT
338 if (CONSTANT_CLASS_P (exp))
339 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
340 #endif
341 known_alignment = true;
342 }
343 else if (DECL_P (exp))
344 {
345 align = DECL_ALIGN (exp);
346 known_alignment = true;
347 }
348 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
349 {
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 }
352 else if (TREE_CODE (exp) == INDIRECT_REF
353 || TREE_CODE (exp) == MEM_REF
354 || TREE_CODE (exp) == TARGET_MEM_REF)
355 {
356 tree addr = TREE_OPERAND (exp, 0);
357 unsigned ptr_align;
358 unsigned HOST_WIDE_INT ptr_bitpos;
359
360 if (TREE_CODE (addr) == BIT_AND_EXPR
361 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
362 {
363 align = (tree_to_hwi (TREE_OPERAND (addr, 1))
364 & -tree_to_hwi (TREE_OPERAND (addr, 1)));
365 align *= BITS_PER_UNIT;
366 addr = TREE_OPERAND (addr, 0);
367 }
368
369 known_alignment
370 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
371 align = MAX (ptr_align, align);
372
373 /* The alignment of the pointer operand in a TARGET_MEM_REF
374 has to take the variable offset parts into account. */
375 if (TREE_CODE (exp) == TARGET_MEM_REF)
376 {
377 if (TMR_INDEX (exp))
378 {
379 unsigned HOST_WIDE_INT step = 1;
380 if (TMR_STEP (exp))
381 step = tree_to_hwi (TMR_STEP (exp));
382 align = MIN (align, (step & -step) * BITS_PER_UNIT);
383 }
384 if (TMR_INDEX2 (exp))
385 align = BITS_PER_UNIT;
386 known_alignment = false;
387 }
388
389 /* When EXP is an actual memory reference then we can use
390 TYPE_ALIGN of a pointer indirection to derive alignment.
391 Do so only if get_pointer_alignment_1 did not reveal absolute
392 alignment knowledge and if using that alignment would
393 improve the situation. */
394 if (!addr_p && !known_alignment
395 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
396 align = TYPE_ALIGN (TREE_TYPE (exp));
397 else
398 {
399 /* Else adjust bitpos accordingly. */
400 bitpos += ptr_bitpos;
401 if (TREE_CODE (exp) == MEM_REF
402 || TREE_CODE (exp) == TARGET_MEM_REF)
403 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
404 }
405 }
406 else if (TREE_CODE (exp) == STRING_CST)
407 {
408 /* STRING_CST are the only constant objects we allow to be not
409 wrapped inside a CONST_DECL. */
410 align = TYPE_ALIGN (TREE_TYPE (exp));
411 #ifdef CONSTANT_ALIGNMENT
412 if (CONSTANT_CLASS_P (exp))
413 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
414 #endif
415 known_alignment = true;
416 }
417
418 /* If there is a non-constant offset part extract the maximum
419 alignment that can prevail. */
420 inner = ~0U;
421 while (offset)
422 {
423 tree next_offset;
424
425 if (TREE_CODE (offset) == PLUS_EXPR)
426 {
427 next_offset = TREE_OPERAND (offset, 0);
428 offset = TREE_OPERAND (offset, 1);
429 }
430 else
431 next_offset = NULL;
432 if (tree_fits_uhwi_p (offset))
433 {
434 /* Any overflow in calculating offset_bits won't change
435 the alignment. */
436 unsigned offset_bits
437 = ((unsigned) tree_to_uhwi (offset) * BITS_PER_UNIT);
438
439 if (offset_bits)
440 inner = MIN (inner, (offset_bits & -offset_bits));
441 }
442 else if (TREE_CODE (offset) == MULT_EXPR
443 && tree_fits_uhwi_p (TREE_OPERAND (offset, 1)))
444 {
445 /* Any overflow in calculating offset_factor won't change
446 the alignment. */
447 unsigned offset_factor
448 = ((unsigned) tree_to_uhwi (TREE_OPERAND (offset, 1))
449 * BITS_PER_UNIT);
450
451 if (offset_factor)
452 inner = MIN (inner, (offset_factor & -offset_factor));
453 }
454 else
455 {
456 inner = MIN (inner, BITS_PER_UNIT);
457 break;
458 }
459 offset = next_offset;
460 }
461 /* Alignment is innermost object alignment adjusted by the constant
462 and non-constant offset parts. */
463 align = MIN (align, inner);
464
465 *alignp = align;
466 *bitposp = bitpos & (*alignp - 1);
467 return known_alignment;
468 }
469
470 /* For a memory reference expression EXP compute values M and N such that M
471 divides (&EXP - N) and such that N < M. If these numbers can be determined,
472 store M in alignp and N in *BITPOSP and return true. Otherwise return false
473 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
474
475 bool
476 get_object_alignment_1 (tree exp, unsigned int *alignp,
477 unsigned HOST_WIDE_INT *bitposp)
478 {
479 return get_object_alignment_2 (exp, alignp, bitposp, false);
480 }
481
482 /* Return the alignment in bits of EXP, an object. */
483
484 unsigned int
485 get_object_alignment (tree exp)
486 {
487 unsigned HOST_WIDE_INT bitpos = 0;
488 unsigned int align;
489
490 get_object_alignment_1 (exp, &align, &bitpos);
491
492 /* align and bitpos now specify known low bits of the pointer.
493 ptr & (align - 1) == bitpos. */
494
495 if (bitpos != 0)
496 align = (bitpos & -bitpos);
497 return align;
498 }
499
500 /* For a pointer valued expression EXP compute values M and N such that M
501 divides (EXP - N) and such that N < M. If these numbers can be determined,
502 store M in alignp and N in *BITPOSP and return true. Return false if
503 the results are just a conservative approximation.
504
505 If EXP is not a pointer, false is returned too. */
506
507 bool
508 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
509 unsigned HOST_WIDE_INT *bitposp)
510 {
511 STRIP_NOPS (exp);
512
513 if (TREE_CODE (exp) == ADDR_EXPR)
514 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
515 alignp, bitposp, true);
516 else if (TREE_CODE (exp) == SSA_NAME
517 && POINTER_TYPE_P (TREE_TYPE (exp)))
518 {
519 unsigned int ptr_align, ptr_misalign;
520 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
521
522 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
523 {
524 *bitposp = ptr_misalign * BITS_PER_UNIT;
525 *alignp = ptr_align * BITS_PER_UNIT;
526 /* We cannot really tell whether this result is an approximation. */
527 return true;
528 }
529 else
530 {
531 *bitposp = 0;
532 *alignp = BITS_PER_UNIT;
533 return false;
534 }
535 }
536 else if (TREE_CODE (exp) == INTEGER_CST)
537 {
538 *alignp = BIGGEST_ALIGNMENT;
539 *bitposp = ((tree_to_hwi (exp) * BITS_PER_UNIT)
540 & (BIGGEST_ALIGNMENT - 1));
541 return true;
542 }
543
544 *bitposp = 0;
545 *alignp = BITS_PER_UNIT;
546 return false;
547 }
548
549 /* Return the alignment in bits of EXP, a pointer valued expression.
550 The alignment returned is, by default, the alignment of the thing that
551 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
552
553 Otherwise, look at the expression to see if we can do better, i.e., if the
554 expression is actually pointing at an object whose alignment is tighter. */
555
556 unsigned int
557 get_pointer_alignment (tree exp)
558 {
559 unsigned HOST_WIDE_INT bitpos = 0;
560 unsigned int align;
561
562 get_pointer_alignment_1 (exp, &align, &bitpos);
563
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
566
567 if (bitpos != 0)
568 align = (bitpos & -bitpos);
569
570 return align;
571 }
572
573 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
574 way, because it could contain a zero byte in the middle.
575 TREE_STRING_LENGTH is the size of the character array, not the string.
576
577 ONLY_VALUE should be nonzero if the result is not going to be emitted
578 into the instruction stream and zero if it is going to be expanded.
579 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
580 is returned, otherwise NULL, since
581 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
582 evaluate the side-effects.
583
584 The value returned is of type `ssizetype'.
585
586 Unfortunately, string_constant can't access the values of const char
587 arrays with initializers, so neither can we do so here. */
588
589 tree
590 c_strlen (tree src, int only_value)
591 {
592 tree offset_node;
593 HOST_WIDE_INT offset;
594 int max;
595 const char *ptr;
596 location_t loc;
597
598 STRIP_NOPS (src);
599 if (TREE_CODE (src) == COND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
601 {
602 tree len1, len2;
603
604 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
605 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
606 if (tree_int_cst_equal (len1, len2))
607 return len1;
608 }
609
610 if (TREE_CODE (src) == COMPOUND_EXPR
611 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
612 return c_strlen (TREE_OPERAND (src, 1), only_value);
613
614 loc = EXPR_LOC_OR_HERE (src);
615
616 src = string_constant (src, &offset_node);
617 if (src == 0)
618 return NULL_TREE;
619
620 max = TREE_STRING_LENGTH (src) - 1;
621 ptr = TREE_STRING_POINTER (src);
622
623 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
624 {
625 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
626 compute the offset to the following null if we don't know where to
627 start searching for it. */
628 int i;
629
630 for (i = 0; i < max; i++)
631 if (ptr[i] == 0)
632 return NULL_TREE;
633
634 /* We don't know the starting offset, but we do know that the string
635 has no internal zero bytes. We can assume that the offset falls
636 within the bounds of the string; otherwise, the programmer deserves
637 what he gets. Subtract the offset from the length of the string,
638 and return that. This would perhaps not be valid if we were dealing
639 with named arrays in addition to literal string constants. */
640
641 return size_diffop_loc (loc, size_int (max), offset_node);
642 }
643
644 /* We have a known offset into the string. Start searching there for
645 a null character if we can represent it as a single HOST_WIDE_INT. */
646 if (offset_node == 0)
647 offset = 0;
648 else if (!tree_fits_shwi_p (offset_node))
649 offset = -1;
650 else
651 offset = tree_to_shwi (offset_node);
652
653 /* If the offset is known to be out of bounds, warn, and call strlen at
654 runtime. */
655 if (offset < 0 || offset > max)
656 {
657 /* Suppress multiple warnings for propagated constant strings. */
658 if (! TREE_NO_WARNING (src))
659 {
660 warning_at (loc, 0, "offset outside bounds of constant string");
661 TREE_NO_WARNING (src) = 1;
662 }
663 return NULL_TREE;
664 }
665
666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
669
670 Since OFFSET is our starting index into the string, no further
671 calculation is needed. */
672 return ssize_int (strlen (ptr + offset));
673 }
674
675 /* Return a char pointer for a C string if it is a string constant
676 or sum of string constant and integer constant. */
677
678 static const char *
679 c_getstr (tree src)
680 {
681 tree offset_node;
682
683 src = string_constant (src, &offset_node);
684 if (src == 0)
685 return 0;
686
687 if (offset_node == 0)
688 return TREE_STRING_POINTER (src);
689 else if (!tree_fits_uhwi_p (offset_node)
690 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
691 return 0;
692
693 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
694 }
695
696 /* Return a constant integer corresponding to target reading
697 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
698
699 static rtx
700 c_readstr (const char *str, enum machine_mode mode)
701 {
702 wide_int c;
703 HOST_WIDE_INT ch;
704 unsigned int i, j;
705 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
706 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
707 / HOST_BITS_PER_WIDE_INT;
708
709 for (i = 0; i < len; i++)
710 tmp[i] = 0;
711
712 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
713
714 ch = 1;
715 for (i = 0; i < GET_MODE_SIZE (mode); i++)
716 {
717 j = i;
718 if (WORDS_BIG_ENDIAN)
719 j = GET_MODE_SIZE (mode) - i - 1;
720 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
721 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
722 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
723 j *= BITS_PER_UNIT;
724
725 if (ch)
726 ch = (unsigned char) str[i];
727 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
728 }
729
730 c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
731 return immed_wide_int_const (c, mode);
732 }
733
734 /* Cast a target constant CST to target CHAR and if that value fits into
735 host char type, return zero and put that value into variable pointed to by
736 P. */
737
738 static int
739 target_char_cast (tree cst, char *p)
740 {
741 unsigned HOST_WIDE_INT val, hostval;
742
743 if (TREE_CODE (cst) != INTEGER_CST
744 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
745 return 1;
746
747 /* Do not care if it fits or not right here. */
748 val = tree_to_hwi (cst);
749
750 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
751 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
752
753 hostval = val;
754 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
755 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
756
757 if (val != hostval)
758 return 1;
759
760 *p = hostval;
761 return 0;
762 }
763
764 /* Similar to save_expr, but assumes that arbitrary code is not executed
765 in between the multiple evaluations. In particular, we assume that a
766 non-addressable local variable will not be modified. */
767
768 static tree
769 builtin_save_expr (tree exp)
770 {
771 if (TREE_CODE (exp) == SSA_NAME
772 || (TREE_ADDRESSABLE (exp) == 0
773 && (TREE_CODE (exp) == PARM_DECL
774 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
775 return exp;
776
777 return save_expr (exp);
778 }
779
780 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
781 times to get the address of either a higher stack frame, or a return
782 address located within it (depending on FNDECL_CODE). */
783
784 static rtx
785 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
786 {
787 int i;
788
789 #ifdef INITIAL_FRAME_ADDRESS_RTX
790 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
791 #else
792 rtx tem;
793
794 /* For a zero count with __builtin_return_address, we don't care what
795 frame address we return, because target-specific definitions will
796 override us. Therefore frame pointer elimination is OK, and using
797 the soft frame pointer is OK.
798
799 For a nonzero count, or a zero count with __builtin_frame_address,
800 we require a stable offset from the current frame pointer to the
801 previous one, so we must use the hard frame pointer, and
802 we must disable frame pointer elimination. */
803 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
804 tem = frame_pointer_rtx;
805 else
806 {
807 tem = hard_frame_pointer_rtx;
808
809 /* Tell reload not to eliminate the frame pointer. */
810 crtl->accesses_prior_frames = 1;
811 }
812 #endif
813
814 /* Some machines need special handling before we can access
815 arbitrary frames. For example, on the SPARC, we must first flush
816 all register windows to the stack. */
817 #ifdef SETUP_FRAME_ADDRESSES
818 if (count > 0)
819 SETUP_FRAME_ADDRESSES ();
820 #endif
821
822 /* On the SPARC, the return address is not in the frame, it is in a
823 register. There is no way to access it off of the current frame
824 pointer, but it can be accessed off the previous frame pointer by
825 reading the value from the register window save area. */
826 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
827 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
828 count--;
829 #endif
830
831 /* Scan back COUNT frames to the specified frame. */
832 for (i = 0; i < count; i++)
833 {
834 /* Assume the dynamic chain pointer is in the word that the
835 frame address points to, unless otherwise specified. */
836 #ifdef DYNAMIC_CHAIN_ADDRESS
837 tem = DYNAMIC_CHAIN_ADDRESS (tem);
838 #endif
839 tem = memory_address (Pmode, tem);
840 tem = gen_frame_mem (Pmode, tem);
841 tem = copy_to_reg (tem);
842 }
843
844 /* For __builtin_frame_address, return what we've got. But, on
845 the SPARC for example, we may have to add a bias. */
846 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
847 #ifdef FRAME_ADDR_RTX
848 return FRAME_ADDR_RTX (tem);
849 #else
850 return tem;
851 #endif
852
853 /* For __builtin_return_address, get the return address from that frame. */
854 #ifdef RETURN_ADDR_RTX
855 tem = RETURN_ADDR_RTX (count, tem);
856 #else
857 tem = memory_address (Pmode,
858 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
859 tem = gen_frame_mem (Pmode, tem);
860 #endif
861 return tem;
862 }
863
864 /* Alias set used for setjmp buffer. */
865 static alias_set_type setjmp_alias_set = -1;
866
867 /* Construct the leading half of a __builtin_setjmp call. Control will
868 return to RECEIVER_LABEL. This is also called directly by the SJLJ
869 exception handling code. */
870
871 void
872 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
873 {
874 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
875 rtx stack_save;
876 rtx mem;
877
878 if (setjmp_alias_set == -1)
879 setjmp_alias_set = new_alias_set ();
880
881 buf_addr = convert_memory_address (Pmode, buf_addr);
882
883 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
884
885 /* We store the frame pointer and the address of receiver_label in
886 the buffer and use the rest of it for the stack save area, which
887 is machine-dependent. */
888
889 mem = gen_rtx_MEM (Pmode, buf_addr);
890 set_mem_alias_set (mem, setjmp_alias_set);
891 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
892
893 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
894 GET_MODE_SIZE (Pmode))),
895 set_mem_alias_set (mem, setjmp_alias_set);
896
897 emit_move_insn (validize_mem (mem),
898 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
899
900 stack_save = gen_rtx_MEM (sa_mode,
901 plus_constant (Pmode, buf_addr,
902 2 * GET_MODE_SIZE (Pmode)));
903 set_mem_alias_set (stack_save, setjmp_alias_set);
904 emit_stack_save (SAVE_NONLOCAL, &stack_save);
905
906 /* If there is further processing to do, do it. */
907 #ifdef HAVE_builtin_setjmp_setup
908 if (HAVE_builtin_setjmp_setup)
909 emit_insn (gen_builtin_setjmp_setup (buf_addr));
910 #endif
911
912 /* We have a nonlocal label. */
913 cfun->has_nonlocal_label = 1;
914 }
915
916 /* Construct the trailing part of a __builtin_setjmp call. This is
917 also called directly by the SJLJ exception handling code.
918 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
919
920 void
921 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
922 {
923 rtx chain;
924
925 /* Mark the FP as used when we get here, so we have to make sure it's
926 marked as used by this function. */
927 emit_use (hard_frame_pointer_rtx);
928
929 /* Mark the static chain as clobbered here so life information
930 doesn't get messed up for it. */
931 chain = targetm.calls.static_chain (current_function_decl, true);
932 if (chain && REG_P (chain))
933 emit_clobber (chain);
934
935 /* Now put in the code to restore the frame pointer, and argument
936 pointer, if needed. */
937 #ifdef HAVE_nonlocal_goto
938 if (! HAVE_nonlocal_goto)
939 #endif
940 /* First adjust our frame pointer to its actual value. It was
941 previously set to the start of the virtual area corresponding to
942 the stacked variables when we branched here and now needs to be
943 adjusted to the actual hardware fp value.
944
945 Assignments to virtual registers are converted by
946 instantiate_virtual_regs into the corresponding assignment
947 to the underlying register (fp in this case) that makes
948 the original assignment true.
949 So the following insn will actually be decrementing fp by
950 STARTING_FRAME_OFFSET. */
951 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
952
953 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
954 if (fixed_regs[ARG_POINTER_REGNUM])
955 {
956 #ifdef ELIMINABLE_REGS
957 /* If the argument pointer can be eliminated in favor of the
958 frame pointer, we don't need to restore it. We assume here
959 that if such an elimination is present, it can always be used.
960 This is the case on all known machines; if we don't make this
961 assumption, we do unnecessary saving on many machines. */
962 size_t i;
963 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
964
965 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
966 if (elim_regs[i].from == ARG_POINTER_REGNUM
967 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
968 break;
969
970 if (i == ARRAY_SIZE (elim_regs))
971 #endif
972 {
973 /* Now restore our arg pointer from the address at which it
974 was saved in our stack frame. */
975 emit_move_insn (crtl->args.internal_arg_pointer,
976 copy_to_reg (get_arg_pointer_save_area ()));
977 }
978 }
979 #endif
980
981 #ifdef HAVE_builtin_setjmp_receiver
982 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
983 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
984 else
985 #endif
986 #ifdef HAVE_nonlocal_goto_receiver
987 if (HAVE_nonlocal_goto_receiver)
988 emit_insn (gen_nonlocal_goto_receiver ());
989 else
990 #endif
991 { /* Nothing */ }
992
993 /* We must not allow the code we just generated to be reordered by
994 scheduling. Specifically, the update of the frame pointer must
995 happen immediately, not later. Similarly, we must block
996 (frame-related) register values to be used across this code. */
997 emit_insn (gen_blockage ());
998 }
999
1000 /* __builtin_longjmp is passed a pointer to an array of five words (not
1001 all will be used on all machines). It operates similarly to the C
1002 library function of the same name, but is more efficient. Much of
1003 the code below is copied from the handling of non-local gotos. */
1004
1005 static void
1006 expand_builtin_longjmp (rtx buf_addr, rtx value)
1007 {
1008 rtx fp, lab, stack, insn, last;
1009 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1010
1011 /* DRAP is needed for stack realign if longjmp is expanded to current
1012 function */
1013 if (SUPPORTS_STACK_ALIGNMENT)
1014 crtl->need_drap = true;
1015
1016 if (setjmp_alias_set == -1)
1017 setjmp_alias_set = new_alias_set ();
1018
1019 buf_addr = convert_memory_address (Pmode, buf_addr);
1020
1021 buf_addr = force_reg (Pmode, buf_addr);
1022
1023 /* We require that the user must pass a second argument of 1, because
1024 that is what builtin_setjmp will return. */
1025 gcc_assert (value == const1_rtx);
1026
1027 last = get_last_insn ();
1028 #ifdef HAVE_builtin_longjmp
1029 if (HAVE_builtin_longjmp)
1030 emit_insn (gen_builtin_longjmp (buf_addr));
1031 else
1032 #endif
1033 {
1034 fp = gen_rtx_MEM (Pmode, buf_addr);
1035 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1036 GET_MODE_SIZE (Pmode)));
1037
1038 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1039 2 * GET_MODE_SIZE (Pmode)));
1040 set_mem_alias_set (fp, setjmp_alias_set);
1041 set_mem_alias_set (lab, setjmp_alias_set);
1042 set_mem_alias_set (stack, setjmp_alias_set);
1043
1044 /* Pick up FP, label, and SP from the block and jump. This code is
1045 from expand_goto in stmt.c; see there for detailed comments. */
1046 #ifdef HAVE_nonlocal_goto
1047 if (HAVE_nonlocal_goto)
1048 /* We have to pass a value to the nonlocal_goto pattern that will
1049 get copied into the static_chain pointer, but it does not matter
1050 what that value is, because builtin_setjmp does not use it. */
1051 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1052 else
1053 #endif
1054 {
1055 lab = copy_to_reg (lab);
1056
1057 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1058 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1059
1060 emit_move_insn (hard_frame_pointer_rtx, fp);
1061 emit_stack_restore (SAVE_NONLOCAL, stack);
1062
1063 emit_use (hard_frame_pointer_rtx);
1064 emit_use (stack_pointer_rtx);
1065 emit_indirect_jump (lab);
1066 }
1067 }
1068
1069 /* Search backwards and mark the jump insn as a non-local goto.
1070 Note that this precludes the use of __builtin_longjmp to a
1071 __builtin_setjmp target in the same function. However, we've
1072 already cautioned the user that these functions are for
1073 internal exception handling use only. */
1074 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1075 {
1076 gcc_assert (insn != last);
1077
1078 if (JUMP_P (insn))
1079 {
1080 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1081 break;
1082 }
1083 else if (CALL_P (insn))
1084 break;
1085 }
1086 }
1087
1088 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1089 and the address of the save area. */
1090
1091 static rtx
1092 expand_builtin_nonlocal_goto (tree exp)
1093 {
1094 tree t_label, t_save_area;
1095 rtx r_label, r_save_area, r_fp, r_sp, insn;
1096
1097 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1098 return NULL_RTX;
1099
1100 t_label = CALL_EXPR_ARG (exp, 0);
1101 t_save_area = CALL_EXPR_ARG (exp, 1);
1102
1103 r_label = expand_normal (t_label);
1104 r_label = convert_memory_address (Pmode, r_label);
1105 r_save_area = expand_normal (t_save_area);
1106 r_save_area = convert_memory_address (Pmode, r_save_area);
1107 /* Copy the address of the save location to a register just in case it was
1108 based on the frame pointer. */
1109 r_save_area = copy_to_reg (r_save_area);
1110 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1111 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1112 plus_constant (Pmode, r_save_area,
1113 GET_MODE_SIZE (Pmode)));
1114
1115 crtl->has_nonlocal_goto = 1;
1116
1117 #ifdef HAVE_nonlocal_goto
1118 /* ??? We no longer need to pass the static chain value, afaik. */
1119 if (HAVE_nonlocal_goto)
1120 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1121 else
1122 #endif
1123 {
1124 r_label = copy_to_reg (r_label);
1125
1126 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1127 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1128
1129 /* Restore frame pointer for containing function. */
1130 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1131 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1132
1133 /* USE of hard_frame_pointer_rtx added for consistency;
1134 not clear if really needed. */
1135 emit_use (hard_frame_pointer_rtx);
1136 emit_use (stack_pointer_rtx);
1137
1138 /* If the architecture is using a GP register, we must
1139 conservatively assume that the target function makes use of it.
1140 The prologue of functions with nonlocal gotos must therefore
1141 initialize the GP register to the appropriate value, and we
1142 must then make sure that this value is live at the point
1143 of the jump. (Note that this doesn't necessarily apply
1144 to targets with a nonlocal_goto pattern; they are free
1145 to implement it in their own way. Note also that this is
1146 a no-op if the GP register is a global invariant.) */
1147 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1148 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1149 emit_use (pic_offset_table_rtx);
1150
1151 emit_indirect_jump (r_label);
1152 }
1153
1154 /* Search backwards to the jump insn and mark it as a
1155 non-local goto. */
1156 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1157 {
1158 if (JUMP_P (insn))
1159 {
1160 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1161 break;
1162 }
1163 else if (CALL_P (insn))
1164 break;
1165 }
1166
1167 return const0_rtx;
1168 }
1169
1170 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1171 (not all will be used on all machines) that was passed to __builtin_setjmp.
1172 It updates the stack pointer in that block to correspond to the current
1173 stack pointer. */
1174
1175 static void
1176 expand_builtin_update_setjmp_buf (rtx buf_addr)
1177 {
1178 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1179 rtx stack_save
1180 = gen_rtx_MEM (sa_mode,
1181 memory_address
1182 (sa_mode,
1183 plus_constant (Pmode, buf_addr,
1184 2 * GET_MODE_SIZE (Pmode))));
1185
1186 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1187 }
1188
1189 /* Expand a call to __builtin_prefetch. For a target that does not support
1190 data prefetch, evaluate the memory address argument in case it has side
1191 effects. */
1192
1193 static void
1194 expand_builtin_prefetch (tree exp)
1195 {
1196 tree arg0, arg1, arg2;
1197 int nargs;
1198 rtx op0, op1, op2;
1199
1200 if (!validate_arglist (exp, POINTER_TYPE, 0))
1201 return;
1202
1203 arg0 = CALL_EXPR_ARG (exp, 0);
1204
1205 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1206 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1207 locality). */
1208 nargs = call_expr_nargs (exp);
1209 if (nargs > 1)
1210 arg1 = CALL_EXPR_ARG (exp, 1);
1211 else
1212 arg1 = integer_zero_node;
1213 if (nargs > 2)
1214 arg2 = CALL_EXPR_ARG (exp, 2);
1215 else
1216 arg2 = integer_three_node;
1217
1218 /* Argument 0 is an address. */
1219 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1220
1221 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1222 if (TREE_CODE (arg1) != INTEGER_CST)
1223 {
1224 error ("second argument to %<__builtin_prefetch%> must be a constant");
1225 arg1 = integer_zero_node;
1226 }
1227 op1 = expand_normal (arg1);
1228 /* Argument 1 must be either zero or one. */
1229 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1230 {
1231 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1232 " using zero");
1233 op1 = const0_rtx;
1234 }
1235
1236 /* Argument 2 (locality) must be a compile-time constant int. */
1237 if (TREE_CODE (arg2) != INTEGER_CST)
1238 {
1239 error ("third argument to %<__builtin_prefetch%> must be a constant");
1240 arg2 = integer_zero_node;
1241 }
1242 op2 = expand_normal (arg2);
1243 /* Argument 2 must be 0, 1, 2, or 3. */
1244 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1245 {
1246 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1247 op2 = const0_rtx;
1248 }
1249
1250 #ifdef HAVE_prefetch
1251 if (HAVE_prefetch)
1252 {
1253 struct expand_operand ops[3];
1254
1255 create_address_operand (&ops[0], op0);
1256 create_integer_operand (&ops[1], INTVAL (op1));
1257 create_integer_operand (&ops[2], INTVAL (op2));
1258 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1259 return;
1260 }
1261 #endif
1262
1263 /* Don't do anything with direct references to volatile memory, but
1264 generate code to handle other side effects. */
1265 if (!MEM_P (op0) && side_effects_p (op0))
1266 emit_insn (op0);
1267 }
1268
1269 /* Get a MEM rtx for expression EXP which is the address of an operand
1270 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1271 the maximum length of the block of memory that might be accessed or
1272 NULL if unknown. */
1273
1274 static rtx
1275 get_memory_rtx (tree exp, tree len)
1276 {
1277 tree orig_exp = exp;
1278 rtx addr, mem;
1279
1280 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1281 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1282 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1283 exp = TREE_OPERAND (exp, 0);
1284
1285 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1286 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1287
1288 /* Get an expression we can use to find the attributes to assign to MEM.
1289 First remove any nops. */
1290 while (CONVERT_EXPR_P (exp)
1291 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1292 exp = TREE_OPERAND (exp, 0);
1293
1294 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1295 (as builtin stringops may alias with anything). */
1296 exp = fold_build2 (MEM_REF,
1297 build_array_type (char_type_node,
1298 build_range_type (sizetype,
1299 size_one_node, len)),
1300 exp, build_int_cst (ptr_type_node, 0));
1301
1302 /* If the MEM_REF has no acceptable address, try to get the base object
1303 from the original address we got, and build an all-aliasing
1304 unknown-sized access to that one. */
1305 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1306 set_mem_attributes (mem, exp, 0);
1307 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1308 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1309 0))))
1310 {
1311 exp = build_fold_addr_expr (exp);
1312 exp = fold_build2 (MEM_REF,
1313 build_array_type (char_type_node,
1314 build_range_type (sizetype,
1315 size_zero_node,
1316 NULL)),
1317 exp, build_int_cst (ptr_type_node, 0));
1318 set_mem_attributes (mem, exp, 0);
1319 }
1320 set_mem_alias_set (mem, 0);
1321 return mem;
1322 }
1323 \f
1324 /* Built-in functions to perform an untyped call and return. */
1325
1326 #define apply_args_mode \
1327 (this_target_builtins->x_apply_args_mode)
1328 #define apply_result_mode \
1329 (this_target_builtins->x_apply_result_mode)
1330
1331 /* Return the size required for the block returned by __builtin_apply_args,
1332 and initialize apply_args_mode. */
1333
1334 static int
1335 apply_args_size (void)
1336 {
1337 static int size = -1;
1338 int align;
1339 unsigned int regno;
1340 enum machine_mode mode;
1341
1342 /* The values computed by this function never change. */
1343 if (size < 0)
1344 {
1345 /* The first value is the incoming arg-pointer. */
1346 size = GET_MODE_SIZE (Pmode);
1347
1348 /* The second value is the structure value address unless this is
1349 passed as an "invisible" first argument. */
1350 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1351 size += GET_MODE_SIZE (Pmode);
1352
1353 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1354 if (FUNCTION_ARG_REGNO_P (regno))
1355 {
1356 mode = targetm.calls.get_raw_arg_mode (regno);
1357
1358 gcc_assert (mode != VOIDmode);
1359
1360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1361 if (size % align != 0)
1362 size = CEIL (size, align) * align;
1363 size += GET_MODE_SIZE (mode);
1364 apply_args_mode[regno] = mode;
1365 }
1366 else
1367 {
1368 apply_args_mode[regno] = VOIDmode;
1369 }
1370 }
1371 return size;
1372 }
1373
1374 /* Return the size required for the block returned by __builtin_apply,
1375 and initialize apply_result_mode. */
1376
1377 static int
1378 apply_result_size (void)
1379 {
1380 static int size = -1;
1381 int align, regno;
1382 enum machine_mode mode;
1383
1384 /* The values computed by this function never change. */
1385 if (size < 0)
1386 {
1387 size = 0;
1388
1389 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1390 if (targetm.calls.function_value_regno_p (regno))
1391 {
1392 mode = targetm.calls.get_raw_result_mode (regno);
1393
1394 gcc_assert (mode != VOIDmode);
1395
1396 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1397 if (size % align != 0)
1398 size = CEIL (size, align) * align;
1399 size += GET_MODE_SIZE (mode);
1400 apply_result_mode[regno] = mode;
1401 }
1402 else
1403 apply_result_mode[regno] = VOIDmode;
1404
1405 /* Allow targets that use untyped_call and untyped_return to override
1406 the size so that machine-specific information can be stored here. */
1407 #ifdef APPLY_RESULT_SIZE
1408 size = APPLY_RESULT_SIZE;
1409 #endif
1410 }
1411 return size;
1412 }
1413
1414 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1415 /* Create a vector describing the result block RESULT. If SAVEP is true,
1416 the result block is used to save the values; otherwise it is used to
1417 restore the values. */
1418
1419 static rtx
1420 result_vector (int savep, rtx result)
1421 {
1422 int regno, size, align, nelts;
1423 enum machine_mode mode;
1424 rtx reg, mem;
1425 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1426
1427 size = nelts = 0;
1428 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1429 if ((mode = apply_result_mode[regno]) != VOIDmode)
1430 {
1431 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1432 if (size % align != 0)
1433 size = CEIL (size, align) * align;
1434 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1435 mem = adjust_address (result, mode, size);
1436 savevec[nelts++] = (savep
1437 ? gen_rtx_SET (VOIDmode, mem, reg)
1438 : gen_rtx_SET (VOIDmode, reg, mem));
1439 size += GET_MODE_SIZE (mode);
1440 }
1441 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1442 }
1443 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1444
1445 /* Save the state required to perform an untyped call with the same
1446 arguments as were passed to the current function. */
1447
1448 static rtx
1449 expand_builtin_apply_args_1 (void)
1450 {
1451 rtx registers, tem;
1452 int size, align, regno;
1453 enum machine_mode mode;
1454 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1455
1456 /* Create a block where the arg-pointer, structure value address,
1457 and argument registers can be saved. */
1458 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1459
1460 /* Walk past the arg-pointer and structure value address. */
1461 size = GET_MODE_SIZE (Pmode);
1462 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1463 size += GET_MODE_SIZE (Pmode);
1464
1465 /* Save each register used in calling a function to the block. */
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if ((mode = apply_args_mode[regno]) != VOIDmode)
1468 {
1469 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1470 if (size % align != 0)
1471 size = CEIL (size, align) * align;
1472
1473 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1474
1475 emit_move_insn (adjust_address (registers, mode, size), tem);
1476 size += GET_MODE_SIZE (mode);
1477 }
1478
1479 /* Save the arg pointer to the block. */
1480 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1481 #ifdef STACK_GROWS_DOWNWARD
1482 /* We need the pointer as the caller actually passed them to us, not
1483 as we might have pretended they were passed. Make sure it's a valid
1484 operand, as emit_move_insn isn't expected to handle a PLUS. */
1485 tem
1486 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1487 NULL_RTX);
1488 #endif
1489 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1490
1491 size = GET_MODE_SIZE (Pmode);
1492
1493 /* Save the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 if (struct_incoming_value)
1496 {
1497 emit_move_insn (adjust_address (registers, Pmode, size),
1498 copy_to_reg (struct_incoming_value));
1499 size += GET_MODE_SIZE (Pmode);
1500 }
1501
1502 /* Return the address of the block. */
1503 return copy_addr_to_reg (XEXP (registers, 0));
1504 }
1505
1506 /* __builtin_apply_args returns block of memory allocated on
1507 the stack into which is stored the arg pointer, structure
1508 value address, static chain, and all the registers that might
1509 possibly be used in performing a function call. The code is
1510 moved to the start of the function so the incoming values are
1511 saved. */
1512
1513 static rtx
1514 expand_builtin_apply_args (void)
1515 {
1516 /* Don't do __builtin_apply_args more than once in a function.
1517 Save the result of the first call and reuse it. */
1518 if (apply_args_value != 0)
1519 return apply_args_value;
1520 {
1521 /* When this function is called, it means that registers must be
1522 saved on entry to this function. So we migrate the
1523 call to the first insn of this function. */
1524 rtx temp;
1525 rtx seq;
1526
1527 start_sequence ();
1528 temp = expand_builtin_apply_args_1 ();
1529 seq = get_insns ();
1530 end_sequence ();
1531
1532 apply_args_value = temp;
1533
1534 /* Put the insns after the NOTE that starts the function.
1535 If this is inside a start_sequence, make the outer-level insn
1536 chain current, so the code is placed at the start of the
1537 function. If internal_arg_pointer is a non-virtual pseudo,
1538 it needs to be placed after the function that initializes
1539 that pseudo. */
1540 push_topmost_sequence ();
1541 if (REG_P (crtl->args.internal_arg_pointer)
1542 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1543 emit_insn_before (seq, parm_birth_insn);
1544 else
1545 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1546 pop_topmost_sequence ();
1547 return temp;
1548 }
1549 }
1550
1551 /* Perform an untyped call and save the state required to perform an
1552 untyped return of whatever value was returned by the given function. */
1553
1554 static rtx
1555 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1556 {
1557 int size, align, regno;
1558 enum machine_mode mode;
1559 rtx incoming_args, result, reg, dest, src, call_insn;
1560 rtx old_stack_level = 0;
1561 rtx call_fusage = 0;
1562 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1563
1564 arguments = convert_memory_address (Pmode, arguments);
1565
1566 /* Create a block where the return registers can be saved. */
1567 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1568
1569 /* Fetch the arg pointer from the ARGUMENTS block. */
1570 incoming_args = gen_reg_rtx (Pmode);
1571 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1572 #ifndef STACK_GROWS_DOWNWARD
1573 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1574 incoming_args, 0, OPTAB_LIB_WIDEN);
1575 #endif
1576
1577 /* Push a new argument block and copy the arguments. Do not allow
1578 the (potential) memcpy call below to interfere with our stack
1579 manipulations. */
1580 do_pending_stack_adjust ();
1581 NO_DEFER_POP;
1582
1583 /* Save the stack with nonlocal if available. */
1584 #ifdef HAVE_save_stack_nonlocal
1585 if (HAVE_save_stack_nonlocal)
1586 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1587 else
1588 #endif
1589 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1590
1591 /* Allocate a block of memory onto the stack and copy the memory
1592 arguments to the outgoing arguments address. We can pass TRUE
1593 as the 4th argument because we just saved the stack pointer
1594 and will restore it right after the call. */
1595 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1596
1597 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1598 may have already set current_function_calls_alloca to true.
1599 current_function_calls_alloca won't be set if argsize is zero,
1600 so we have to guarantee need_drap is true here. */
1601 if (SUPPORTS_STACK_ALIGNMENT)
1602 crtl->need_drap = true;
1603
1604 dest = virtual_outgoing_args_rtx;
1605 #ifndef STACK_GROWS_DOWNWARD
1606 if (CONST_INT_P (argsize))
1607 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1608 else
1609 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1610 #endif
1611 dest = gen_rtx_MEM (BLKmode, dest);
1612 set_mem_align (dest, PARM_BOUNDARY);
1613 src = gen_rtx_MEM (BLKmode, incoming_args);
1614 set_mem_align (src, PARM_BOUNDARY);
1615 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1616
1617 /* Refer to the argument block. */
1618 apply_args_size ();
1619 arguments = gen_rtx_MEM (BLKmode, arguments);
1620 set_mem_align (arguments, PARM_BOUNDARY);
1621
1622 /* Walk past the arg-pointer and structure value address. */
1623 size = GET_MODE_SIZE (Pmode);
1624 if (struct_value)
1625 size += GET_MODE_SIZE (Pmode);
1626
1627 /* Restore each of the registers previously saved. Make USE insns
1628 for each of these registers for use in making the call. */
1629 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1630 if ((mode = apply_args_mode[regno]) != VOIDmode)
1631 {
1632 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1633 if (size % align != 0)
1634 size = CEIL (size, align) * align;
1635 reg = gen_rtx_REG (mode, regno);
1636 emit_move_insn (reg, adjust_address (arguments, mode, size));
1637 use_reg (&call_fusage, reg);
1638 size += GET_MODE_SIZE (mode);
1639 }
1640
1641 /* Restore the structure value address unless this is passed as an
1642 "invisible" first argument. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 {
1646 rtx value = gen_reg_rtx (Pmode);
1647 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1648 emit_move_insn (struct_value, value);
1649 if (REG_P (struct_value))
1650 use_reg (&call_fusage, struct_value);
1651 size += GET_MODE_SIZE (Pmode);
1652 }
1653
1654 /* All arguments and registers used for the call are set up by now! */
1655 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1656
1657 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1658 and we don't want to load it into a register as an optimization,
1659 because prepare_call_address already did it if it should be done. */
1660 if (GET_CODE (function) != SYMBOL_REF)
1661 function = memory_address (FUNCTION_MODE, function);
1662
1663 /* Generate the actual call instruction and save the return value. */
1664 #ifdef HAVE_untyped_call
1665 if (HAVE_untyped_call)
1666 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1667 result, result_vector (1, result)));
1668 else
1669 #endif
1670 #ifdef HAVE_call_value
1671 if (HAVE_call_value)
1672 {
1673 rtx valreg = 0;
1674
1675 /* Locate the unique return register. It is not possible to
1676 express a call that sets more than one return register using
1677 call_value; use untyped_call for that. In fact, untyped_call
1678 only needs to save the return registers in the given block. */
1679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1680 if ((mode = apply_result_mode[regno]) != VOIDmode)
1681 {
1682 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1683
1684 valreg = gen_rtx_REG (mode, regno);
1685 }
1686
1687 emit_call_insn (GEN_CALL_VALUE (valreg,
1688 gen_rtx_MEM (FUNCTION_MODE, function),
1689 const0_rtx, NULL_RTX, const0_rtx));
1690
1691 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1692 }
1693 else
1694 #endif
1695 gcc_unreachable ();
1696
1697 /* Find the CALL insn we just emitted, and attach the register usage
1698 information. */
1699 call_insn = last_call_insn ();
1700 add_function_usage_to (call_insn, call_fusage);
1701
1702 /* Restore the stack. */
1703 #ifdef HAVE_save_stack_nonlocal
1704 if (HAVE_save_stack_nonlocal)
1705 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1706 else
1707 #endif
1708 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1709 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1710
1711 OK_DEFER_POP;
1712
1713 /* Return the address of the result block. */
1714 result = copy_addr_to_reg (XEXP (result, 0));
1715 return convert_memory_address (ptr_mode, result);
1716 }
1717
1718 /* Perform an untyped return. */
1719
1720 static void
1721 expand_builtin_return (rtx result)
1722 {
1723 int size, align, regno;
1724 enum machine_mode mode;
1725 rtx reg;
1726 rtx call_fusage = 0;
1727
1728 result = convert_memory_address (Pmode, result);
1729
1730 apply_result_size ();
1731 result = gen_rtx_MEM (BLKmode, result);
1732
1733 #ifdef HAVE_untyped_return
1734 if (HAVE_untyped_return)
1735 {
1736 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1737 emit_barrier ();
1738 return;
1739 }
1740 #endif
1741
1742 /* Restore the return value and note that each value is used. */
1743 size = 0;
1744 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1745 if ((mode = apply_result_mode[regno]) != VOIDmode)
1746 {
1747 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1748 if (size % align != 0)
1749 size = CEIL (size, align) * align;
1750 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1751 emit_move_insn (reg, adjust_address (result, mode, size));
1752
1753 push_to_sequence (call_fusage);
1754 emit_use (reg);
1755 call_fusage = get_insns ();
1756 end_sequence ();
1757 size += GET_MODE_SIZE (mode);
1758 }
1759
1760 /* Put the USE insns before the return. */
1761 emit_insn (call_fusage);
1762
1763 /* Return whatever values was restored by jumping directly to the end
1764 of the function. */
1765 expand_naked_return ();
1766 }
1767
1768 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1769
1770 static enum type_class
1771 type_to_class (tree type)
1772 {
1773 switch (TREE_CODE (type))
1774 {
1775 case VOID_TYPE: return void_type_class;
1776 case INTEGER_TYPE: return integer_type_class;
1777 case ENUMERAL_TYPE: return enumeral_type_class;
1778 case BOOLEAN_TYPE: return boolean_type_class;
1779 case POINTER_TYPE: return pointer_type_class;
1780 case REFERENCE_TYPE: return reference_type_class;
1781 case OFFSET_TYPE: return offset_type_class;
1782 case REAL_TYPE: return real_type_class;
1783 case COMPLEX_TYPE: return complex_type_class;
1784 case FUNCTION_TYPE: return function_type_class;
1785 case METHOD_TYPE: return method_type_class;
1786 case RECORD_TYPE: return record_type_class;
1787 case UNION_TYPE:
1788 case QUAL_UNION_TYPE: return union_type_class;
1789 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1790 ? string_type_class : array_type_class);
1791 case LANG_TYPE: return lang_type_class;
1792 default: return no_type_class;
1793 }
1794 }
1795
1796 /* Expand a call EXP to __builtin_classify_type. */
1797
1798 static rtx
1799 expand_builtin_classify_type (tree exp)
1800 {
1801 if (call_expr_nargs (exp))
1802 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1803 return GEN_INT (no_type_class);
1804 }
1805
1806 /* This helper macro, meant to be used in mathfn_built_in below,
1807 determines which among a set of three builtin math functions is
1808 appropriate for a given type mode. The `F' and `L' cases are
1809 automatically generated from the `double' case. */
1810 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1811 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1812 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1813 fcodel = BUILT_IN_MATHFN##L ; break;
1814 /* Similar to above, but appends _R after any F/L suffix. */
1815 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1816 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1817 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1818 fcodel = BUILT_IN_MATHFN##L_R ; break;
1819
1820 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1821 if available. If IMPLICIT is true use the implicit builtin declaration,
1822 otherwise use the explicit declaration. If we can't do the conversion,
1823 return zero. */
1824
1825 static tree
1826 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1827 {
1828 enum built_in_function fcode, fcodef, fcodel, fcode2;
1829
1830 switch (fn)
1831 {
1832 CASE_MATHFN (BUILT_IN_ACOS)
1833 CASE_MATHFN (BUILT_IN_ACOSH)
1834 CASE_MATHFN (BUILT_IN_ASIN)
1835 CASE_MATHFN (BUILT_IN_ASINH)
1836 CASE_MATHFN (BUILT_IN_ATAN)
1837 CASE_MATHFN (BUILT_IN_ATAN2)
1838 CASE_MATHFN (BUILT_IN_ATANH)
1839 CASE_MATHFN (BUILT_IN_CBRT)
1840 CASE_MATHFN (BUILT_IN_CEIL)
1841 CASE_MATHFN (BUILT_IN_CEXPI)
1842 CASE_MATHFN (BUILT_IN_COPYSIGN)
1843 CASE_MATHFN (BUILT_IN_COS)
1844 CASE_MATHFN (BUILT_IN_COSH)
1845 CASE_MATHFN (BUILT_IN_DREM)
1846 CASE_MATHFN (BUILT_IN_ERF)
1847 CASE_MATHFN (BUILT_IN_ERFC)
1848 CASE_MATHFN (BUILT_IN_EXP)
1849 CASE_MATHFN (BUILT_IN_EXP10)
1850 CASE_MATHFN (BUILT_IN_EXP2)
1851 CASE_MATHFN (BUILT_IN_EXPM1)
1852 CASE_MATHFN (BUILT_IN_FABS)
1853 CASE_MATHFN (BUILT_IN_FDIM)
1854 CASE_MATHFN (BUILT_IN_FLOOR)
1855 CASE_MATHFN (BUILT_IN_FMA)
1856 CASE_MATHFN (BUILT_IN_FMAX)
1857 CASE_MATHFN (BUILT_IN_FMIN)
1858 CASE_MATHFN (BUILT_IN_FMOD)
1859 CASE_MATHFN (BUILT_IN_FREXP)
1860 CASE_MATHFN (BUILT_IN_GAMMA)
1861 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1862 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1863 CASE_MATHFN (BUILT_IN_HYPOT)
1864 CASE_MATHFN (BUILT_IN_ILOGB)
1865 CASE_MATHFN (BUILT_IN_ICEIL)
1866 CASE_MATHFN (BUILT_IN_IFLOOR)
1867 CASE_MATHFN (BUILT_IN_INF)
1868 CASE_MATHFN (BUILT_IN_IRINT)
1869 CASE_MATHFN (BUILT_IN_IROUND)
1870 CASE_MATHFN (BUILT_IN_ISINF)
1871 CASE_MATHFN (BUILT_IN_J0)
1872 CASE_MATHFN (BUILT_IN_J1)
1873 CASE_MATHFN (BUILT_IN_JN)
1874 CASE_MATHFN (BUILT_IN_LCEIL)
1875 CASE_MATHFN (BUILT_IN_LDEXP)
1876 CASE_MATHFN (BUILT_IN_LFLOOR)
1877 CASE_MATHFN (BUILT_IN_LGAMMA)
1878 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1879 CASE_MATHFN (BUILT_IN_LLCEIL)
1880 CASE_MATHFN (BUILT_IN_LLFLOOR)
1881 CASE_MATHFN (BUILT_IN_LLRINT)
1882 CASE_MATHFN (BUILT_IN_LLROUND)
1883 CASE_MATHFN (BUILT_IN_LOG)
1884 CASE_MATHFN (BUILT_IN_LOG10)
1885 CASE_MATHFN (BUILT_IN_LOG1P)
1886 CASE_MATHFN (BUILT_IN_LOG2)
1887 CASE_MATHFN (BUILT_IN_LOGB)
1888 CASE_MATHFN (BUILT_IN_LRINT)
1889 CASE_MATHFN (BUILT_IN_LROUND)
1890 CASE_MATHFN (BUILT_IN_MODF)
1891 CASE_MATHFN (BUILT_IN_NAN)
1892 CASE_MATHFN (BUILT_IN_NANS)
1893 CASE_MATHFN (BUILT_IN_NEARBYINT)
1894 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1895 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1896 CASE_MATHFN (BUILT_IN_POW)
1897 CASE_MATHFN (BUILT_IN_POWI)
1898 CASE_MATHFN (BUILT_IN_POW10)
1899 CASE_MATHFN (BUILT_IN_REMAINDER)
1900 CASE_MATHFN (BUILT_IN_REMQUO)
1901 CASE_MATHFN (BUILT_IN_RINT)
1902 CASE_MATHFN (BUILT_IN_ROUND)
1903 CASE_MATHFN (BUILT_IN_SCALB)
1904 CASE_MATHFN (BUILT_IN_SCALBLN)
1905 CASE_MATHFN (BUILT_IN_SCALBN)
1906 CASE_MATHFN (BUILT_IN_SIGNBIT)
1907 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1908 CASE_MATHFN (BUILT_IN_SIN)
1909 CASE_MATHFN (BUILT_IN_SINCOS)
1910 CASE_MATHFN (BUILT_IN_SINH)
1911 CASE_MATHFN (BUILT_IN_SQRT)
1912 CASE_MATHFN (BUILT_IN_TAN)
1913 CASE_MATHFN (BUILT_IN_TANH)
1914 CASE_MATHFN (BUILT_IN_TGAMMA)
1915 CASE_MATHFN (BUILT_IN_TRUNC)
1916 CASE_MATHFN (BUILT_IN_Y0)
1917 CASE_MATHFN (BUILT_IN_Y1)
1918 CASE_MATHFN (BUILT_IN_YN)
1919
1920 default:
1921 return NULL_TREE;
1922 }
1923
1924 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1925 fcode2 = fcode;
1926 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1927 fcode2 = fcodef;
1928 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1929 fcode2 = fcodel;
1930 else
1931 return NULL_TREE;
1932
1933 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1934 return NULL_TREE;
1935
1936 return builtin_decl_explicit (fcode2);
1937 }
1938
1939 /* Like mathfn_built_in_1(), but always use the implicit array. */
1940
1941 tree
1942 mathfn_built_in (tree type, enum built_in_function fn)
1943 {
1944 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1945 }
1946
1947 /* If errno must be maintained, expand the RTL to check if the result,
1948 TARGET, of a built-in function call, EXP, is NaN, and if so set
1949 errno to EDOM. */
1950
1951 static void
1952 expand_errno_check (tree exp, rtx target)
1953 {
1954 rtx lab = gen_label_rtx ();
1955
1956 /* Test the result; if it is NaN, set errno=EDOM because
1957 the argument was not in the domain. */
1958 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1959 NULL_RTX, NULL_RTX, lab,
1960 /* The jump is very likely. */
1961 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1962
1963 #ifdef TARGET_EDOM
1964 /* If this built-in doesn't throw an exception, set errno directly. */
1965 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1966 {
1967 #ifdef GEN_ERRNO_RTX
1968 rtx errno_rtx = GEN_ERRNO_RTX;
1969 #else
1970 rtx errno_rtx
1971 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1972 #endif
1973 emit_move_insn (errno_rtx,
1974 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1975 emit_label (lab);
1976 return;
1977 }
1978 #endif
1979
1980 /* Make sure the library call isn't expanded as a tail call. */
1981 CALL_EXPR_TAILCALL (exp) = 0;
1982
1983 /* We can't set errno=EDOM directly; let the library call do it.
1984 Pop the arguments right away in case the call gets deleted. */
1985 NO_DEFER_POP;
1986 expand_call (exp, target, 0);
1987 OK_DEFER_POP;
1988 emit_label (lab);
1989 }
1990
1991 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1992 Return NULL_RTX if a normal call should be emitted rather than expanding
1993 the function in-line. EXP is the expression that is a call to the builtin
1994 function; if convenient, the result should be placed in TARGET.
1995 SUBTARGET may be used as the target for computing one of EXP's operands. */
1996
1997 static rtx
1998 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1999 {
2000 optab builtin_optab;
2001 rtx op0, insns;
2002 tree fndecl = get_callee_fndecl (exp);
2003 enum machine_mode mode;
2004 bool errno_set = false;
2005 bool try_widening = false;
2006 tree arg;
2007
2008 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2009 return NULL_RTX;
2010
2011 arg = CALL_EXPR_ARG (exp, 0);
2012
2013 switch (DECL_FUNCTION_CODE (fndecl))
2014 {
2015 CASE_FLT_FN (BUILT_IN_SQRT):
2016 errno_set = ! tree_expr_nonnegative_p (arg);
2017 try_widening = true;
2018 builtin_optab = sqrt_optab;
2019 break;
2020 CASE_FLT_FN (BUILT_IN_EXP):
2021 errno_set = true; builtin_optab = exp_optab; break;
2022 CASE_FLT_FN (BUILT_IN_EXP10):
2023 CASE_FLT_FN (BUILT_IN_POW10):
2024 errno_set = true; builtin_optab = exp10_optab; break;
2025 CASE_FLT_FN (BUILT_IN_EXP2):
2026 errno_set = true; builtin_optab = exp2_optab; break;
2027 CASE_FLT_FN (BUILT_IN_EXPM1):
2028 errno_set = true; builtin_optab = expm1_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOGB):
2030 errno_set = true; builtin_optab = logb_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG):
2032 errno_set = true; builtin_optab = log_optab; break;
2033 CASE_FLT_FN (BUILT_IN_LOG10):
2034 errno_set = true; builtin_optab = log10_optab; break;
2035 CASE_FLT_FN (BUILT_IN_LOG2):
2036 errno_set = true; builtin_optab = log2_optab; break;
2037 CASE_FLT_FN (BUILT_IN_LOG1P):
2038 errno_set = true; builtin_optab = log1p_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ASIN):
2040 builtin_optab = asin_optab; break;
2041 CASE_FLT_FN (BUILT_IN_ACOS):
2042 builtin_optab = acos_optab; break;
2043 CASE_FLT_FN (BUILT_IN_TAN):
2044 builtin_optab = tan_optab; break;
2045 CASE_FLT_FN (BUILT_IN_ATAN):
2046 builtin_optab = atan_optab; break;
2047 CASE_FLT_FN (BUILT_IN_FLOOR):
2048 builtin_optab = floor_optab; break;
2049 CASE_FLT_FN (BUILT_IN_CEIL):
2050 builtin_optab = ceil_optab; break;
2051 CASE_FLT_FN (BUILT_IN_TRUNC):
2052 builtin_optab = btrunc_optab; break;
2053 CASE_FLT_FN (BUILT_IN_ROUND):
2054 builtin_optab = round_optab; break;
2055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2056 builtin_optab = nearbyint_optab;
2057 if (flag_trapping_math)
2058 break;
2059 /* Else fallthrough and expand as rint. */
2060 CASE_FLT_FN (BUILT_IN_RINT):
2061 builtin_optab = rint_optab; break;
2062 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2063 builtin_optab = significand_optab; break;
2064 default:
2065 gcc_unreachable ();
2066 }
2067
2068 /* Make a suitable register to place result in. */
2069 mode = TYPE_MODE (TREE_TYPE (exp));
2070
2071 if (! flag_errno_math || ! HONOR_NANS (mode))
2072 errno_set = false;
2073
2074 /* Before working hard, check whether the instruction is available, but try
2075 to widen the mode for specific operations. */
2076 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2077 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2078 && (!errno_set || !optimize_insn_for_size_p ()))
2079 {
2080 rtx result = gen_reg_rtx (mode);
2081
2082 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2083 need to expand the argument again. This way, we will not perform
2084 side-effects more the once. */
2085 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2086
2087 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2088
2089 start_sequence ();
2090
2091 /* Compute into RESULT.
2092 Set RESULT to wherever the result comes back. */
2093 result = expand_unop (mode, builtin_optab, op0, result, 0);
2094
2095 if (result != 0)
2096 {
2097 if (errno_set)
2098 expand_errno_check (exp, result);
2099
2100 /* Output the entire sequence. */
2101 insns = get_insns ();
2102 end_sequence ();
2103 emit_insn (insns);
2104 return result;
2105 }
2106
2107 /* If we were unable to expand via the builtin, stop the sequence
2108 (without outputting the insns) and call to the library function
2109 with the stabilized argument list. */
2110 end_sequence ();
2111 }
2112
2113 return expand_call (exp, target, target == const0_rtx);
2114 }
2115
2116 /* Expand a call to the builtin binary math functions (pow and atan2).
2117 Return NULL_RTX if a normal call should be emitted rather than expanding the
2118 function in-line. EXP is the expression that is a call to the builtin
2119 function; if convenient, the result should be placed in TARGET.
2120 SUBTARGET may be used as the target for computing one of EXP's
2121 operands. */
2122
2123 static rtx
2124 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2125 {
2126 optab builtin_optab;
2127 rtx op0, op1, insns, result;
2128 int op1_type = REAL_TYPE;
2129 tree fndecl = get_callee_fndecl (exp);
2130 tree arg0, arg1;
2131 enum machine_mode mode;
2132 bool errno_set = true;
2133
2134 switch (DECL_FUNCTION_CODE (fndecl))
2135 {
2136 CASE_FLT_FN (BUILT_IN_SCALBN):
2137 CASE_FLT_FN (BUILT_IN_SCALBLN):
2138 CASE_FLT_FN (BUILT_IN_LDEXP):
2139 op1_type = INTEGER_TYPE;
2140 default:
2141 break;
2142 }
2143
2144 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2145 return NULL_RTX;
2146
2147 arg0 = CALL_EXPR_ARG (exp, 0);
2148 arg1 = CALL_EXPR_ARG (exp, 1);
2149
2150 switch (DECL_FUNCTION_CODE (fndecl))
2151 {
2152 CASE_FLT_FN (BUILT_IN_POW):
2153 builtin_optab = pow_optab; break;
2154 CASE_FLT_FN (BUILT_IN_ATAN2):
2155 builtin_optab = atan2_optab; break;
2156 CASE_FLT_FN (BUILT_IN_SCALB):
2157 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2158 return 0;
2159 builtin_optab = scalb_optab; break;
2160 CASE_FLT_FN (BUILT_IN_SCALBN):
2161 CASE_FLT_FN (BUILT_IN_SCALBLN):
2162 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2163 return 0;
2164 /* Fall through... */
2165 CASE_FLT_FN (BUILT_IN_LDEXP):
2166 builtin_optab = ldexp_optab; break;
2167 CASE_FLT_FN (BUILT_IN_FMOD):
2168 builtin_optab = fmod_optab; break;
2169 CASE_FLT_FN (BUILT_IN_REMAINDER):
2170 CASE_FLT_FN (BUILT_IN_DREM):
2171 builtin_optab = remainder_optab; break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 /* Make a suitable register to place result in. */
2177 mode = TYPE_MODE (TREE_TYPE (exp));
2178
2179 /* Before working hard, check whether the instruction is available. */
2180 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2181 return NULL_RTX;
2182
2183 result = gen_reg_rtx (mode);
2184
2185 if (! flag_errno_math || ! HONOR_NANS (mode))
2186 errno_set = false;
2187
2188 if (errno_set && optimize_insn_for_size_p ())
2189 return 0;
2190
2191 /* Always stabilize the argument list. */
2192 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2193 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2194
2195 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2196 op1 = expand_normal (arg1);
2197
2198 start_sequence ();
2199
2200 /* Compute into RESULT.
2201 Set RESULT to wherever the result comes back. */
2202 result = expand_binop (mode, builtin_optab, op0, op1,
2203 result, 0, OPTAB_DIRECT);
2204
2205 /* If we were unable to expand via the builtin, stop the sequence
2206 (without outputting the insns) and call to the library function
2207 with the stabilized argument list. */
2208 if (result == 0)
2209 {
2210 end_sequence ();
2211 return expand_call (exp, target, target == const0_rtx);
2212 }
2213
2214 if (errno_set)
2215 expand_errno_check (exp, result);
2216
2217 /* Output the entire sequence. */
2218 insns = get_insns ();
2219 end_sequence ();
2220 emit_insn (insns);
2221
2222 return result;
2223 }
2224
2225 /* Expand a call to the builtin trinary math functions (fma).
2226 Return NULL_RTX if a normal call should be emitted rather than expanding the
2227 function in-line. EXP is the expression that is a call to the builtin
2228 function; if convenient, the result should be placed in TARGET.
2229 SUBTARGET may be used as the target for computing one of EXP's
2230 operands. */
2231
2232 static rtx
2233 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2234 {
2235 optab builtin_optab;
2236 rtx op0, op1, op2, insns, result;
2237 tree fndecl = get_callee_fndecl (exp);
2238 tree arg0, arg1, arg2;
2239 enum machine_mode mode;
2240
2241 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2242 return NULL_RTX;
2243
2244 arg0 = CALL_EXPR_ARG (exp, 0);
2245 arg1 = CALL_EXPR_ARG (exp, 1);
2246 arg2 = CALL_EXPR_ARG (exp, 2);
2247
2248 switch (DECL_FUNCTION_CODE (fndecl))
2249 {
2250 CASE_FLT_FN (BUILT_IN_FMA):
2251 builtin_optab = fma_optab; break;
2252 default:
2253 gcc_unreachable ();
2254 }
2255
2256 /* Make a suitable register to place result in. */
2257 mode = TYPE_MODE (TREE_TYPE (exp));
2258
2259 /* Before working hard, check whether the instruction is available. */
2260 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2261 return NULL_RTX;
2262
2263 result = gen_reg_rtx (mode);
2264
2265 /* Always stabilize the argument list. */
2266 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2267 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2268 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2269
2270 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2271 op1 = expand_normal (arg1);
2272 op2 = expand_normal (arg2);
2273
2274 start_sequence ();
2275
2276 /* Compute into RESULT.
2277 Set RESULT to wherever the result comes back. */
2278 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2279 result, 0);
2280
2281 /* If we were unable to expand via the builtin, stop the sequence
2282 (without outputting the insns) and call to the library function
2283 with the stabilized argument list. */
2284 if (result == 0)
2285 {
2286 end_sequence ();
2287 return expand_call (exp, target, target == const0_rtx);
2288 }
2289
2290 /* Output the entire sequence. */
2291 insns = get_insns ();
2292 end_sequence ();
2293 emit_insn (insns);
2294
2295 return result;
2296 }
2297
2298 /* Expand a call to the builtin sin and cos math functions.
2299 Return NULL_RTX if a normal call should be emitted rather than expanding the
2300 function in-line. EXP is the expression that is a call to the builtin
2301 function; if convenient, the result should be placed in TARGET.
2302 SUBTARGET may be used as the target for computing one of EXP's
2303 operands. */
2304
2305 static rtx
2306 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2307 {
2308 optab builtin_optab;
2309 rtx op0, insns;
2310 tree fndecl = get_callee_fndecl (exp);
2311 enum machine_mode mode;
2312 tree arg;
2313
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2316
2317 arg = CALL_EXPR_ARG (exp, 0);
2318
2319 switch (DECL_FUNCTION_CODE (fndecl))
2320 {
2321 CASE_FLT_FN (BUILT_IN_SIN):
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = sincos_optab; break;
2324 default:
2325 gcc_unreachable ();
2326 }
2327
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (exp));
2330
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334 switch (DECL_FUNCTION_CODE (fndecl))
2335 {
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 builtin_optab = sin_optab; break;
2338 CASE_FLT_FN (BUILT_IN_COS):
2339 builtin_optab = cos_optab; break;
2340 default:
2341 gcc_unreachable ();
2342 }
2343
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2346 {
2347 rtx result = gen_reg_rtx (mode);
2348
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2353
2354 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2355
2356 start_sequence ();
2357
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab == sincos_optab)
2361 {
2362 int ok;
2363
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 break;
2372 default:
2373 gcc_unreachable ();
2374 }
2375 gcc_assert (ok);
2376 }
2377 else
2378 result = expand_unop (mode, builtin_optab, op0, result, 0);
2379
2380 if (result != 0)
2381 {
2382 /* Output the entire sequence. */
2383 insns = get_insns ();
2384 end_sequence ();
2385 emit_insn (insns);
2386 return result;
2387 }
2388
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2392 end_sequence ();
2393 }
2394
2395 return expand_call (exp, target, target == const0_rtx);
2396 }
2397
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2401
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2404 {
2405 bool errno_set = false;
2406 optab builtin_optab = unknown_optab;
2407 enum machine_mode mode;
2408
2409 switch (DECL_FUNCTION_CODE (fndecl))
2410 {
2411 CASE_FLT_FN (BUILT_IN_ILOGB):
2412 errno_set = true; builtin_optab = ilogb_optab; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF):
2414 builtin_optab = isinf_optab; break;
2415 case BUILT_IN_ISNORMAL:
2416 case BUILT_IN_ISFINITE:
2417 CASE_FLT_FN (BUILT_IN_FINITE):
2418 case BUILT_IN_FINITED32:
2419 case BUILT_IN_FINITED64:
2420 case BUILT_IN_FINITED128:
2421 case BUILT_IN_ISINFD32:
2422 case BUILT_IN_ISINFD64:
2423 case BUILT_IN_ISINFD128:
2424 /* These builtins have no optabs (yet). */
2425 break;
2426 default:
2427 gcc_unreachable ();
2428 }
2429
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math && errno_set)
2432 return CODE_FOR_nothing;
2433
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2436
2437 if (builtin_optab)
2438 return optab_handler (builtin_optab, mode);
2439 return CODE_FOR_nothing;
2440 }
2441
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2444 isnan, etc).
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2448
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2451 {
2452 enum insn_code icode = CODE_FOR_nothing;
2453 rtx op0;
2454 tree fndecl = get_callee_fndecl (exp);
2455 enum machine_mode mode;
2456 tree arg;
2457
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2460
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 icode = interclass_mathfn_icode (arg, fndecl);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2464
2465 if (icode != CODE_FOR_nothing)
2466 {
2467 struct expand_operand ops[1];
2468 rtx last = get_last_insn ();
2469 tree orig_arg = arg;
2470
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2475
2476 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2477
2478 if (mode != GET_MODE (op0))
2479 op0 = convert_to_mode (mode, op0, 0);
2480
2481 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482 if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 return ops[0].value;
2485
2486 delete_insns_since (last);
2487 CALL_EXPR_ARG (exp, 0) = orig_arg;
2488 }
2489
2490 return NULL_RTX;
2491 }
2492
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function. */
2497
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2500 {
2501 rtx op0, op1, op2, target1, target2;
2502 enum machine_mode mode;
2503 tree arg, sinp, cosp;
2504 int result;
2505 location_t loc = EXPR_LOCATION (exp);
2506 tree alias_type, alias_off;
2507
2508 if (!validate_arglist (exp, REAL_TYPE,
2509 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2511
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 sinp = CALL_EXPR_ARG (exp, 1);
2514 cosp = CALL_EXPR_ARG (exp, 2);
2515
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (arg));
2518
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521 return NULL_RTX;
2522
2523 target1 = gen_reg_rtx (mode);
2524 target2 = gen_reg_rtx (mode);
2525
2526 op0 = expand_normal (arg);
2527 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528 alias_off = build_int_cst (alias_type, 0);
2529 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 sinp, alias_off));
2531 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 cosp, alias_off));
2533
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537 gcc_assert (result);
2538
2539 /* Move target1 and target2 to the memory locations indicated
2540 by op1 and op2. */
2541 emit_move_insn (op1, target1);
2542 emit_move_insn (op2, target2);
2543
2544 return const0_rtx;
2545 }
2546
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2550
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2553 {
2554 tree fndecl = get_callee_fndecl (exp);
2555 tree arg, type;
2556 enum machine_mode mode;
2557 rtx op0, op1, op2;
2558 location_t loc = EXPR_LOCATION (exp);
2559
2560 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2562
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 type = TREE_TYPE (arg);
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2566
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2571 {
2572 op1 = gen_reg_rtx (mode);
2573 op2 = gen_reg_rtx (mode);
2574
2575 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2576
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2579 }
2580 else if (targetm.libc_has_function (function_sincos))
2581 {
2582 tree call, fn = NULL_TREE;
2583 tree top1, top2;
2584 rtx op1a, op2a;
2585
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592 else
2593 gcc_unreachable ();
2594
2595 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597 op1a = copy_addr_to_reg (XEXP (op1, 0));
2598 op2a = copy_addr_to_reg (XEXP (op2, 0));
2599 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2601
2602 /* Make sure not to fold the sincos call again. */
2603 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 call, 3, arg, top1, top2));
2606 }
2607 else
2608 {
2609 tree call, fn = NULL_TREE, narg;
2610 tree ctype = build_complex_type (type);
2611
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618 else
2619 gcc_unreachable ();
2620
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn == NULL_TREE)
2625 {
2626 tree fntype;
2627 const char *name = NULL;
2628
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 name = "cexpf";
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 name = "cexp";
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 name = "cexpl";
2635
2636 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 fn = build_fn_decl (name, fntype);
2638 }
2639
2640 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 build_real (type, dconst0), arg);
2642
2643 /* Make sure not to fold the cexp call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 target, VOIDmode, EXPAND_NORMAL);
2647 }
2648
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 make_tree (TREE_TYPE (arg), op2),
2652 make_tree (TREE_TYPE (arg), op1)),
2653 target, VOIDmode, EXPAND_NORMAL);
2654 }
2655
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2660
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2663 {
2664 va_list ap;
2665 tree fntype = TREE_TYPE (fndecl);
2666 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2667
2668 va_start (ap, n);
2669 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670 va_end (ap);
2671 SET_EXPR_LOCATION (fn, loc);
2672 return fn;
2673 }
2674
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2681
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2684 {
2685 convert_optab builtin_optab;
2686 rtx op0, insns, tmp;
2687 tree fndecl = get_callee_fndecl (exp);
2688 enum built_in_function fallback_fn;
2689 tree fallback_fndecl;
2690 enum machine_mode mode;
2691 tree arg;
2692
2693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2694 gcc_unreachable ();
2695
2696 arg = CALL_EXPR_ARG (exp, 0);
2697
2698 switch (DECL_FUNCTION_CODE (fndecl))
2699 {
2700 CASE_FLT_FN (BUILT_IN_ICEIL):
2701 CASE_FLT_FN (BUILT_IN_LCEIL):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL):
2703 builtin_optab = lceil_optab;
2704 fallback_fn = BUILT_IN_CEIL;
2705 break;
2706
2707 CASE_FLT_FN (BUILT_IN_IFLOOR):
2708 CASE_FLT_FN (BUILT_IN_LFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2710 builtin_optab = lfloor_optab;
2711 fallback_fn = BUILT_IN_FLOOR;
2712 break;
2713
2714 default:
2715 gcc_unreachable ();
2716 }
2717
2718 /* Make a suitable register to place result in. */
2719 mode = TYPE_MODE (TREE_TYPE (exp));
2720
2721 target = gen_reg_rtx (mode);
2722
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2727
2728 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2729
2730 start_sequence ();
2731
2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target, op0, builtin_optab))
2734 {
2735 /* Output the entire sequence. */
2736 insns = get_insns ();
2737 end_sequence ();
2738 emit_insn (insns);
2739 return target;
2740 }
2741
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2744 end_sequence ();
2745
2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2748
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl == NULL_TREE)
2754 {
2755 tree fntype;
2756 const char *name = NULL;
2757
2758 switch (DECL_FUNCTION_CODE (fndecl))
2759 {
2760 case BUILT_IN_ICEIL:
2761 case BUILT_IN_LCEIL:
2762 case BUILT_IN_LLCEIL:
2763 name = "ceil";
2764 break;
2765 case BUILT_IN_ICEILF:
2766 case BUILT_IN_LCEILF:
2767 case BUILT_IN_LLCEILF:
2768 name = "ceilf";
2769 break;
2770 case BUILT_IN_ICEILL:
2771 case BUILT_IN_LCEILL:
2772 case BUILT_IN_LLCEILL:
2773 name = "ceill";
2774 break;
2775 case BUILT_IN_IFLOOR:
2776 case BUILT_IN_LFLOOR:
2777 case BUILT_IN_LLFLOOR:
2778 name = "floor";
2779 break;
2780 case BUILT_IN_IFLOORF:
2781 case BUILT_IN_LFLOORF:
2782 case BUILT_IN_LLFLOORF:
2783 name = "floorf";
2784 break;
2785 case BUILT_IN_IFLOORL:
2786 case BUILT_IN_LFLOORL:
2787 case BUILT_IN_LLFLOORL:
2788 name = "floorl";
2789 break;
2790 default:
2791 gcc_unreachable ();
2792 }
2793
2794 fntype = build_function_type_list (TREE_TYPE (arg),
2795 TREE_TYPE (arg), NULL_TREE);
2796 fallback_fndecl = build_fn_decl (name, fntype);
2797 }
2798
2799 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2800
2801 tmp = expand_normal (exp);
2802 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2803
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target = gen_reg_rtx (mode);
2807 expand_fix (target, tmp, 0);
2808
2809 return target;
2810 }
2811
2812 /* Expand a call to one of the builtin math functions doing integer
2813 conversion (lrint).
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
2816 function; if convenient, the result should be placed in TARGET. */
2817
2818 static rtx
2819 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2820 {
2821 convert_optab builtin_optab;
2822 rtx op0, insns;
2823 tree fndecl = get_callee_fndecl (exp);
2824 tree arg;
2825 enum machine_mode mode;
2826 enum built_in_function fallback_fn = BUILT_IN_NONE;
2827
2828 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2829 gcc_unreachable ();
2830
2831 arg = CALL_EXPR_ARG (exp, 0);
2832
2833 switch (DECL_FUNCTION_CODE (fndecl))
2834 {
2835 CASE_FLT_FN (BUILT_IN_IRINT):
2836 fallback_fn = BUILT_IN_LRINT;
2837 /* FALLTHRU */
2838 CASE_FLT_FN (BUILT_IN_LRINT):
2839 CASE_FLT_FN (BUILT_IN_LLRINT):
2840 builtin_optab = lrint_optab;
2841 break;
2842
2843 CASE_FLT_FN (BUILT_IN_IROUND):
2844 fallback_fn = BUILT_IN_LROUND;
2845 /* FALLTHRU */
2846 CASE_FLT_FN (BUILT_IN_LROUND):
2847 CASE_FLT_FN (BUILT_IN_LLROUND):
2848 builtin_optab = lround_optab;
2849 break;
2850
2851 default:
2852 gcc_unreachable ();
2853 }
2854
2855 /* There's no easy way to detect the case we need to set EDOM. */
2856 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2857 return NULL_RTX;
2858
2859 /* Make a suitable register to place result in. */
2860 mode = TYPE_MODE (TREE_TYPE (exp));
2861
2862 /* There's no easy way to detect the case we need to set EDOM. */
2863 if (!flag_errno_math)
2864 {
2865 rtx result = gen_reg_rtx (mode);
2866
2867 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2868 need to expand the argument again. This way, we will not perform
2869 side-effects more the once. */
2870 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2871
2872 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2873
2874 start_sequence ();
2875
2876 if (expand_sfix_optab (result, op0, builtin_optab))
2877 {
2878 /* Output the entire sequence. */
2879 insns = get_insns ();
2880 end_sequence ();
2881 emit_insn (insns);
2882 return result;
2883 }
2884
2885 /* If we were unable to expand via the builtin, stop the sequence
2886 (without outputting the insns) and call to the library function
2887 with the stabilized argument list. */
2888 end_sequence ();
2889 }
2890
2891 if (fallback_fn != BUILT_IN_NONE)
2892 {
2893 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2894 targets, (int) round (x) should never be transformed into
2895 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2896 a call to lround in the hope that the target provides at least some
2897 C99 functions. This should result in the best user experience for
2898 not full C99 targets. */
2899 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2900 fallback_fn, 0);
2901
2902 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2903 fallback_fndecl, 1, arg);
2904
2905 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2906 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2907 return convert_to_mode (mode, target, 0);
2908 }
2909
2910 return expand_call (exp, target, target == const0_rtx);
2911 }
2912
2913 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2914 a normal call should be emitted rather than expanding the function
2915 in-line. EXP is the expression that is a call to the builtin
2916 function; if convenient, the result should be placed in TARGET. */
2917
2918 static rtx
2919 expand_builtin_powi (tree exp, rtx target)
2920 {
2921 tree arg0, arg1;
2922 rtx op0, op1;
2923 enum machine_mode mode;
2924 enum machine_mode mode2;
2925
2926 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2927 return NULL_RTX;
2928
2929 arg0 = CALL_EXPR_ARG (exp, 0);
2930 arg1 = CALL_EXPR_ARG (exp, 1);
2931 mode = TYPE_MODE (TREE_TYPE (exp));
2932
2933 /* Emit a libcall to libgcc. */
2934
2935 /* Mode of the 2nd argument must match that of an int. */
2936 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2937
2938 if (target == NULL_RTX)
2939 target = gen_reg_rtx (mode);
2940
2941 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2942 if (GET_MODE (op0) != mode)
2943 op0 = convert_to_mode (mode, op0, 0);
2944 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2945 if (GET_MODE (op1) != mode2)
2946 op1 = convert_to_mode (mode2, op1, 0);
2947
2948 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2949 target, LCT_CONST, mode, 2,
2950 op0, mode, op1, mode2);
2951
2952 return target;
2953 }
2954
2955 /* Expand expression EXP which is a call to the strlen builtin. Return
2956 NULL_RTX if we failed the caller should emit a normal call, otherwise
2957 try to get the result in TARGET, if convenient. */
2958
2959 static rtx
2960 expand_builtin_strlen (tree exp, rtx target,
2961 enum machine_mode target_mode)
2962 {
2963 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2964 return NULL_RTX;
2965 else
2966 {
2967 struct expand_operand ops[4];
2968 rtx pat;
2969 tree len;
2970 tree src = CALL_EXPR_ARG (exp, 0);
2971 rtx src_reg, before_strlen;
2972 enum machine_mode insn_mode = target_mode;
2973 enum insn_code icode = CODE_FOR_nothing;
2974 unsigned int align;
2975
2976 /* If the length can be computed at compile-time, return it. */
2977 len = c_strlen (src, 0);
2978 if (len)
2979 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2980
2981 /* If the length can be computed at compile-time and is constant
2982 integer, but there are side-effects in src, evaluate
2983 src for side-effects, then return len.
2984 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2985 can be optimized into: i++; x = 3; */
2986 len = c_strlen (src, 1);
2987 if (len && TREE_CODE (len) == INTEGER_CST)
2988 {
2989 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2990 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2991 }
2992
2993 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2994
2995 /* If SRC is not a pointer type, don't do this operation inline. */
2996 if (align == 0)
2997 return NULL_RTX;
2998
2999 /* Bail out if we can't compute strlen in the right mode. */
3000 while (insn_mode != VOIDmode)
3001 {
3002 icode = optab_handler (strlen_optab, insn_mode);
3003 if (icode != CODE_FOR_nothing)
3004 break;
3005
3006 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3007 }
3008 if (insn_mode == VOIDmode)
3009 return NULL_RTX;
3010
3011 /* Make a place to hold the source address. We will not expand
3012 the actual source until we are sure that the expansion will
3013 not fail -- there are trees that cannot be expanded twice. */
3014 src_reg = gen_reg_rtx (Pmode);
3015
3016 /* Mark the beginning of the strlen sequence so we can emit the
3017 source operand later. */
3018 before_strlen = get_last_insn ();
3019
3020 create_output_operand (&ops[0], target, insn_mode);
3021 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022 create_integer_operand (&ops[2], 0);
3023 create_integer_operand (&ops[3], align);
3024 if (!maybe_expand_insn (icode, 4, ops))
3025 return NULL_RTX;
3026
3027 /* Now that we are assured of success, expand the source. */
3028 start_sequence ();
3029 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3030 if (pat != src_reg)
3031 {
3032 #ifdef POINTERS_EXTEND_UNSIGNED
3033 if (GET_MODE (pat) != Pmode)
3034 pat = convert_to_mode (Pmode, pat,
3035 POINTERS_EXTEND_UNSIGNED);
3036 #endif
3037 emit_move_insn (src_reg, pat);
3038 }
3039 pat = get_insns ();
3040 end_sequence ();
3041
3042 if (before_strlen)
3043 emit_insn_after (pat, before_strlen);
3044 else
3045 emit_insn_before (pat, get_insns ());
3046
3047 /* Return the value in the proper mode for this function. */
3048 if (GET_MODE (ops[0].value) == target_mode)
3049 target = ops[0].value;
3050 else if (target != 0)
3051 convert_move (target, ops[0].value, 0);
3052 else
3053 target = convert_to_mode (target_mode, ops[0].value, 0);
3054
3055 return target;
3056 }
3057 }
3058
3059 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3060 bytes from constant string DATA + OFFSET and return it as target
3061 constant. */
3062
3063 static rtx
3064 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3065 enum machine_mode mode)
3066 {
3067 const char *str = (const char *) data;
3068
3069 gcc_assert (offset >= 0
3070 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3071 <= strlen (str) + 1));
3072
3073 return c_readstr (str + offset, mode);
3074 }
3075
3076 /* Expand a call EXP to the memcpy builtin.
3077 Return NULL_RTX if we failed, the caller should emit a normal call,
3078 otherwise try to get the result in TARGET, if convenient (and in
3079 mode MODE if that's convenient). */
3080
3081 static rtx
3082 expand_builtin_memcpy (tree exp, rtx target)
3083 {
3084 if (!validate_arglist (exp,
3085 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3086 return NULL_RTX;
3087 else
3088 {
3089 tree dest = CALL_EXPR_ARG (exp, 0);
3090 tree src = CALL_EXPR_ARG (exp, 1);
3091 tree len = CALL_EXPR_ARG (exp, 2);
3092 const char *src_str;
3093 unsigned int src_align = get_pointer_alignment (src);
3094 unsigned int dest_align = get_pointer_alignment (dest);
3095 rtx dest_mem, src_mem, dest_addr, len_rtx;
3096 HOST_WIDE_INT expected_size = -1;
3097 unsigned int expected_align = 0;
3098
3099 /* If DEST is not a pointer type, call the normal function. */
3100 if (dest_align == 0)
3101 return NULL_RTX;
3102
3103 /* If either SRC is not a pointer type, don't do this
3104 operation in-line. */
3105 if (src_align == 0)
3106 return NULL_RTX;
3107
3108 if (currently_expanding_gimple_stmt)
3109 stringop_block_profile (currently_expanding_gimple_stmt,
3110 &expected_align, &expected_size);
3111
3112 if (expected_align < dest_align)
3113 expected_align = dest_align;
3114 dest_mem = get_memory_rtx (dest, len);
3115 set_mem_align (dest_mem, dest_align);
3116 len_rtx = expand_normal (len);
3117 src_str = c_getstr (src);
3118
3119 /* If SRC is a string constant and block move would be done
3120 by pieces, we can avoid loading the string from memory
3121 and only stored the computed constants. */
3122 if (src_str
3123 && CONST_INT_P (len_rtx)
3124 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3125 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3126 CONST_CAST (char *, src_str),
3127 dest_align, false))
3128 {
3129 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3130 builtin_memcpy_read_str,
3131 CONST_CAST (char *, src_str),
3132 dest_align, false, 0);
3133 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3134 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3135 return dest_mem;
3136 }
3137
3138 src_mem = get_memory_rtx (src, len);
3139 set_mem_align (src_mem, src_align);
3140
3141 /* Copy word part most expediently. */
3142 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3143 CALL_EXPR_TAILCALL (exp)
3144 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3145 expected_align, expected_size);
3146
3147 if (dest_addr == 0)
3148 {
3149 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3150 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3151 }
3152 return dest_addr;
3153 }
3154 }
3155
3156 /* Expand a call EXP to the mempcpy builtin.
3157 Return NULL_RTX if we failed; the caller should emit a normal call,
3158 otherwise try to get the result in TARGET, if convenient (and in
3159 mode MODE if that's convenient). If ENDP is 0 return the
3160 destination pointer, if ENDP is 1 return the end pointer ala
3161 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3162 stpcpy. */
3163
3164 static rtx
3165 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3166 {
3167 if (!validate_arglist (exp,
3168 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3169 return NULL_RTX;
3170 else
3171 {
3172 tree dest = CALL_EXPR_ARG (exp, 0);
3173 tree src = CALL_EXPR_ARG (exp, 1);
3174 tree len = CALL_EXPR_ARG (exp, 2);
3175 return expand_builtin_mempcpy_args (dest, src, len,
3176 target, mode, /*endp=*/ 1);
3177 }
3178 }
3179
3180 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3181 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3182 so that this can also be called without constructing an actual CALL_EXPR.
3183 The other arguments and return value are the same as for
3184 expand_builtin_mempcpy. */
3185
3186 static rtx
3187 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3188 rtx target, enum machine_mode mode, int endp)
3189 {
3190 /* If return value is ignored, transform mempcpy into memcpy. */
3191 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3192 {
3193 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3194 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3195 dest, src, len);
3196 return expand_expr (result, target, mode, EXPAND_NORMAL);
3197 }
3198 else
3199 {
3200 const char *src_str;
3201 unsigned int src_align = get_pointer_alignment (src);
3202 unsigned int dest_align = get_pointer_alignment (dest);
3203 rtx dest_mem, src_mem, len_rtx;
3204
3205 /* If either SRC or DEST is not a pointer type, don't do this
3206 operation in-line. */
3207 if (dest_align == 0 || src_align == 0)
3208 return NULL_RTX;
3209
3210 /* If LEN is not constant, call the normal function. */
3211 if (! tree_fits_uhwi_p (len))
3212 return NULL_RTX;
3213
3214 len_rtx = expand_normal (len);
3215 src_str = c_getstr (src);
3216
3217 /* If SRC is a string constant and block move would be done
3218 by pieces, we can avoid loading the string from memory
3219 and only stored the computed constants. */
3220 if (src_str
3221 && CONST_INT_P (len_rtx)
3222 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3223 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3224 CONST_CAST (char *, src_str),
3225 dest_align, false))
3226 {
3227 dest_mem = get_memory_rtx (dest, len);
3228 set_mem_align (dest_mem, dest_align);
3229 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3230 builtin_memcpy_read_str,
3231 CONST_CAST (char *, src_str),
3232 dest_align, false, endp);
3233 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3234 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3235 return dest_mem;
3236 }
3237
3238 if (CONST_INT_P (len_rtx)
3239 && can_move_by_pieces (INTVAL (len_rtx),
3240 MIN (dest_align, src_align)))
3241 {
3242 dest_mem = get_memory_rtx (dest, len);
3243 set_mem_align (dest_mem, dest_align);
3244 src_mem = get_memory_rtx (src, len);
3245 set_mem_align (src_mem, src_align);
3246 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3247 MIN (dest_align, src_align), endp);
3248 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3249 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3250 return dest_mem;
3251 }
3252
3253 return NULL_RTX;
3254 }
3255 }
3256
3257 #ifndef HAVE_movstr
3258 # define HAVE_movstr 0
3259 # define CODE_FOR_movstr CODE_FOR_nothing
3260 #endif
3261
3262 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3263 we failed, the caller should emit a normal call, otherwise try to
3264 get the result in TARGET, if convenient. If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_movstr (tree dest, tree src, rtx target, int endp)
3271 {
3272 struct expand_operand ops[3];
3273 rtx dest_mem;
3274 rtx src_mem;
3275
3276 if (!HAVE_movstr)
3277 return NULL_RTX;
3278
3279 dest_mem = get_memory_rtx (dest, NULL);
3280 src_mem = get_memory_rtx (src, NULL);
3281 if (!endp)
3282 {
3283 target = force_reg (Pmode, XEXP (dest_mem, 0));
3284 dest_mem = replace_equiv_address (dest_mem, target);
3285 }
3286
3287 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3288 create_fixed_operand (&ops[1], dest_mem);
3289 create_fixed_operand (&ops[2], src_mem);
3290 expand_insn (CODE_FOR_movstr, 3, ops);
3291
3292 if (endp && target != const0_rtx)
3293 {
3294 target = ops[0].value;
3295 /* movstr is supposed to set end to the address of the NUL
3296 terminator. If the caller requested a mempcpy-like return value,
3297 adjust it. */
3298 if (endp == 1)
3299 {
3300 rtx tem = plus_constant (GET_MODE (target),
3301 gen_lowpart (GET_MODE (target), target), 1);
3302 emit_move_insn (target, force_operand (tem, NULL_RTX));
3303 }
3304 }
3305 return target;
3306 }
3307
3308 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3309 NULL_RTX if we failed the caller should emit a normal call, otherwise
3310 try to get the result in TARGET, if convenient (and in mode MODE if that's
3311 convenient). */
3312
3313 static rtx
3314 expand_builtin_strcpy (tree exp, rtx target)
3315 {
3316 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3317 {
3318 tree dest = CALL_EXPR_ARG (exp, 0);
3319 tree src = CALL_EXPR_ARG (exp, 1);
3320 return expand_builtin_strcpy_args (dest, src, target);
3321 }
3322 return NULL_RTX;
3323 }
3324
3325 /* Helper function to do the actual work for expand_builtin_strcpy. The
3326 arguments to the builtin_strcpy call DEST and SRC are broken out
3327 so that this can also be called without constructing an actual CALL_EXPR.
3328 The other arguments and return value are the same as for
3329 expand_builtin_strcpy. */
3330
3331 static rtx
3332 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3333 {
3334 return expand_movstr (dest, src, target, /*endp=*/0);
3335 }
3336
3337 /* Expand a call EXP to the stpcpy builtin.
3338 Return NULL_RTX if we failed the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient (and in
3340 mode MODE if that's convenient). */
3341
3342 static rtx
3343 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3344 {
3345 tree dst, src;
3346 location_t loc = EXPR_LOCATION (exp);
3347
3348 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3349 return NULL_RTX;
3350
3351 dst = CALL_EXPR_ARG (exp, 0);
3352 src = CALL_EXPR_ARG (exp, 1);
3353
3354 /* If return value is ignored, transform stpcpy into strcpy. */
3355 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3356 {
3357 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3358 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3359 return expand_expr (result, target, mode, EXPAND_NORMAL);
3360 }
3361 else
3362 {
3363 tree len, lenp1;
3364 rtx ret;
3365
3366 /* Ensure we get an actual string whose length can be evaluated at
3367 compile-time, not an expression containing a string. This is
3368 because the latter will potentially produce pessimized code
3369 when used to produce the return value. */
3370 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3371 return expand_movstr (dst, src, target, /*endp=*/2);
3372
3373 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3374 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3375 target, mode, /*endp=*/2);
3376
3377 if (ret)
3378 return ret;
3379
3380 if (TREE_CODE (len) == INTEGER_CST)
3381 {
3382 rtx len_rtx = expand_normal (len);
3383
3384 if (CONST_INT_P (len_rtx))
3385 {
3386 ret = expand_builtin_strcpy_args (dst, src, target);
3387
3388 if (ret)
3389 {
3390 if (! target)
3391 {
3392 if (mode != VOIDmode)
3393 target = gen_reg_rtx (mode);
3394 else
3395 target = gen_reg_rtx (GET_MODE (ret));
3396 }
3397 if (GET_MODE (target) != GET_MODE (ret))
3398 ret = gen_lowpart (GET_MODE (target), ret);
3399
3400 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3401 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3402 gcc_assert (ret);
3403
3404 return target;
3405 }
3406 }
3407 }
3408
3409 return expand_movstr (dst, src, target, /*endp=*/2);
3410 }
3411 }
3412
3413 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3414 bytes from constant string DATA + OFFSET and return it as target
3415 constant. */
3416
3417 rtx
3418 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3419 enum machine_mode mode)
3420 {
3421 const char *str = (const char *) data;
3422
3423 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3424 return const0_rtx;
3425
3426 return c_readstr (str + offset, mode);
3427 }
3428
3429 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3430 NULL_RTX if we failed the caller should emit a normal call. */
3431
3432 static rtx
3433 expand_builtin_strncpy (tree exp, rtx target)
3434 {
3435 location_t loc = EXPR_LOCATION (exp);
3436
3437 if (validate_arglist (exp,
3438 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3439 {
3440 tree dest = CALL_EXPR_ARG (exp, 0);
3441 tree src = CALL_EXPR_ARG (exp, 1);
3442 tree len = CALL_EXPR_ARG (exp, 2);
3443 tree slen = c_strlen (src, 1);
3444
3445 /* We must be passed a constant len and src parameter. */
3446 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3447 return NULL_RTX;
3448
3449 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3450
3451 /* We're required to pad with trailing zeros if the requested
3452 len is greater than strlen(s2)+1. In that case try to
3453 use store_by_pieces, if it fails, punt. */
3454 if (tree_int_cst_lt (slen, len))
3455 {
3456 unsigned int dest_align = get_pointer_alignment (dest);
3457 const char *p = c_getstr (src);
3458 rtx dest_mem;
3459
3460 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3461 || !can_store_by_pieces (tree_to_uhwi (len),
3462 builtin_strncpy_read_str,
3463 CONST_CAST (char *, p),
3464 dest_align, false))
3465 return NULL_RTX;
3466
3467 dest_mem = get_memory_rtx (dest, len);
3468 store_by_pieces (dest_mem, tree_to_uhwi (len),
3469 builtin_strncpy_read_str,
3470 CONST_CAST (char *, p), dest_align, false, 0);
3471 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3472 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3473 return dest_mem;
3474 }
3475 }
3476 return NULL_RTX;
3477 }
3478
3479 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3480 bytes from constant string DATA + OFFSET and return it as target
3481 constant. */
3482
3483 rtx
3484 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3485 enum machine_mode mode)
3486 {
3487 const char *c = (const char *) data;
3488 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3489
3490 memset (p, *c, GET_MODE_SIZE (mode));
3491
3492 return c_readstr (p, mode);
3493 }
3494
3495 /* Callback routine for store_by_pieces. Return the RTL of a register
3496 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3497 char value given in the RTL register data. For example, if mode is
3498 4 bytes wide, return the RTL for 0x01010101*data. */
3499
3500 static rtx
3501 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3502 enum machine_mode mode)
3503 {
3504 rtx target, coeff;
3505 size_t size;
3506 char *p;
3507
3508 size = GET_MODE_SIZE (mode);
3509 if (size == 1)
3510 return (rtx) data;
3511
3512 p = XALLOCAVEC (char, size);
3513 memset (p, 1, size);
3514 coeff = c_readstr (p, mode);
3515
3516 target = convert_to_mode (mode, (rtx) data, 1);
3517 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3518 return force_reg (mode, target);
3519 }
3520
3521 /* Expand expression EXP, which is a call to the memset builtin. Return
3522 NULL_RTX if we failed the caller should emit a normal call, otherwise
3523 try to get the result in TARGET, if convenient (and in mode MODE if that's
3524 convenient). */
3525
3526 static rtx
3527 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3528 {
3529 if (!validate_arglist (exp,
3530 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3531 return NULL_RTX;
3532 else
3533 {
3534 tree dest = CALL_EXPR_ARG (exp, 0);
3535 tree val = CALL_EXPR_ARG (exp, 1);
3536 tree len = CALL_EXPR_ARG (exp, 2);
3537 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3538 }
3539 }
3540
3541 /* Helper function to do the actual work for expand_builtin_memset. The
3542 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3543 so that this can also be called without constructing an actual CALL_EXPR.
3544 The other arguments and return value are the same as for
3545 expand_builtin_memset. */
3546
3547 static rtx
3548 expand_builtin_memset_args (tree dest, tree val, tree len,
3549 rtx target, enum machine_mode mode, tree orig_exp)
3550 {
3551 tree fndecl, fn;
3552 enum built_in_function fcode;
3553 enum machine_mode val_mode;
3554 char c;
3555 unsigned int dest_align;
3556 rtx dest_mem, dest_addr, len_rtx;
3557 HOST_WIDE_INT expected_size = -1;
3558 unsigned int expected_align = 0;
3559
3560 dest_align = get_pointer_alignment (dest);
3561
3562 /* If DEST is not a pointer type, don't do this operation in-line. */
3563 if (dest_align == 0)
3564 return NULL_RTX;
3565
3566 if (currently_expanding_gimple_stmt)
3567 stringop_block_profile (currently_expanding_gimple_stmt,
3568 &expected_align, &expected_size);
3569
3570 if (expected_align < dest_align)
3571 expected_align = dest_align;
3572
3573 /* If the LEN parameter is zero, return DEST. */
3574 if (integer_zerop (len))
3575 {
3576 /* Evaluate and ignore VAL in case it has side-effects. */
3577 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3578 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3579 }
3580
3581 /* Stabilize the arguments in case we fail. */
3582 dest = builtin_save_expr (dest);
3583 val = builtin_save_expr (val);
3584 len = builtin_save_expr (len);
3585
3586 len_rtx = expand_normal (len);
3587 dest_mem = get_memory_rtx (dest, len);
3588 val_mode = TYPE_MODE (unsigned_char_type_node);
3589
3590 if (TREE_CODE (val) != INTEGER_CST)
3591 {
3592 rtx val_rtx;
3593
3594 val_rtx = expand_normal (val);
3595 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3596
3597 /* Assume that we can memset by pieces if we can store
3598 * the coefficients by pieces (in the required modes).
3599 * We can't pass builtin_memset_gen_str as that emits RTL. */
3600 c = 1;
3601 if (tree_fits_uhwi_p (len)
3602 && can_store_by_pieces (tree_to_uhwi (len),
3603 builtin_memset_read_str, &c, dest_align,
3604 true))
3605 {
3606 val_rtx = force_reg (val_mode, val_rtx);
3607 store_by_pieces (dest_mem, tree_to_uhwi (len),
3608 builtin_memset_gen_str, val_rtx, dest_align,
3609 true, 0);
3610 }
3611 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3612 dest_align, expected_align,
3613 expected_size))
3614 goto do_libcall;
3615
3616 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3617 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3618 return dest_mem;
3619 }
3620
3621 if (target_char_cast (val, &c))
3622 goto do_libcall;
3623
3624 if (c)
3625 {
3626 if (tree_fits_uhwi_p (len)
3627 && can_store_by_pieces (tree_to_uhwi (len),
3628 builtin_memset_read_str, &c, dest_align,
3629 true))
3630 store_by_pieces (dest_mem, tree_to_uhwi (len),
3631 builtin_memset_read_str, &c, dest_align, true, 0);
3632 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3633 gen_int_mode (c, val_mode),
3634 dest_align, expected_align,
3635 expected_size))
3636 goto do_libcall;
3637
3638 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3639 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3640 return dest_mem;
3641 }
3642
3643 set_mem_align (dest_mem, dest_align);
3644 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3645 CALL_EXPR_TAILCALL (orig_exp)
3646 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3647 expected_align, expected_size);
3648
3649 if (dest_addr == 0)
3650 {
3651 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3652 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3653 }
3654
3655 return dest_addr;
3656
3657 do_libcall:
3658 fndecl = get_callee_fndecl (orig_exp);
3659 fcode = DECL_FUNCTION_CODE (fndecl);
3660 if (fcode == BUILT_IN_MEMSET)
3661 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3662 dest, val, len);
3663 else if (fcode == BUILT_IN_BZERO)
3664 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3665 dest, len);
3666 else
3667 gcc_unreachable ();
3668 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3669 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3670 return expand_call (fn, target, target == const0_rtx);
3671 }
3672
3673 /* Expand expression EXP, which is a call to the bzero builtin. Return
3674 NULL_RTX if we failed the caller should emit a normal call. */
3675
3676 static rtx
3677 expand_builtin_bzero (tree exp)
3678 {
3679 tree dest, size;
3680 location_t loc = EXPR_LOCATION (exp);
3681
3682 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
3684
3685 dest = CALL_EXPR_ARG (exp, 0);
3686 size = CALL_EXPR_ARG (exp, 1);
3687
3688 /* New argument list transforming bzero(ptr x, int y) to
3689 memset(ptr x, int 0, size_t y). This is done this way
3690 so that if it isn't expanded inline, we fallback to
3691 calling bzero instead of memset. */
3692
3693 return expand_builtin_memset_args (dest, integer_zero_node,
3694 fold_convert_loc (loc,
3695 size_type_node, size),
3696 const0_rtx, VOIDmode, exp);
3697 }
3698
3699 /* Expand expression EXP, which is a call to the memcmp built-in function.
3700 Return NULL_RTX if we failed and the caller should emit a normal call,
3701 otherwise try to get the result in TARGET, if convenient (and in mode
3702 MODE, if that's convenient). */
3703
3704 static rtx
3705 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3706 ATTRIBUTE_UNUSED enum machine_mode mode)
3707 {
3708 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3709
3710 if (!validate_arglist (exp,
3711 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3712 return NULL_RTX;
3713
3714 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3715 implementing memcmp because it will stop if it encounters two
3716 zero bytes. */
3717 #if defined HAVE_cmpmemsi
3718 {
3719 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3720 rtx result;
3721 rtx insn;
3722 tree arg1 = CALL_EXPR_ARG (exp, 0);
3723 tree arg2 = CALL_EXPR_ARG (exp, 1);
3724 tree len = CALL_EXPR_ARG (exp, 2);
3725
3726 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3727 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3728 enum machine_mode insn_mode;
3729
3730 if (HAVE_cmpmemsi)
3731 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3732 else
3733 return NULL_RTX;
3734
3735 /* If we don't have POINTER_TYPE, call the function. */
3736 if (arg1_align == 0 || arg2_align == 0)
3737 return NULL_RTX;
3738
3739 /* Make a place to write the result of the instruction. */
3740 result = target;
3741 if (! (result != 0
3742 && REG_P (result) && GET_MODE (result) == insn_mode
3743 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3744 result = gen_reg_rtx (insn_mode);
3745
3746 arg1_rtx = get_memory_rtx (arg1, len);
3747 arg2_rtx = get_memory_rtx (arg2, len);
3748 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3749
3750 /* Set MEM_SIZE as appropriate. */
3751 if (CONST_INT_P (arg3_rtx))
3752 {
3753 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3754 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3755 }
3756
3757 if (HAVE_cmpmemsi)
3758 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3759 GEN_INT (MIN (arg1_align, arg2_align)));
3760 else
3761 gcc_unreachable ();
3762
3763 if (insn)
3764 emit_insn (insn);
3765 else
3766 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3767 TYPE_MODE (integer_type_node), 3,
3768 XEXP (arg1_rtx, 0), Pmode,
3769 XEXP (arg2_rtx, 0), Pmode,
3770 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3771 TYPE_UNSIGNED (sizetype)),
3772 TYPE_MODE (sizetype));
3773
3774 /* Return the value in the proper mode for this function. */
3775 mode = TYPE_MODE (TREE_TYPE (exp));
3776 if (GET_MODE (result) == mode)
3777 return result;
3778 else if (target != 0)
3779 {
3780 convert_move (target, result, 0);
3781 return target;
3782 }
3783 else
3784 return convert_to_mode (mode, result, 0);
3785 }
3786 #endif /* HAVE_cmpmemsi. */
3787
3788 return NULL_RTX;
3789 }
3790
3791 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3792 if we failed the caller should emit a normal call, otherwise try to get
3793 the result in TARGET, if convenient. */
3794
3795 static rtx
3796 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3797 {
3798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3799 return NULL_RTX;
3800
3801 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3802 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3803 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3804 {
3805 rtx arg1_rtx, arg2_rtx;
3806 rtx result, insn = NULL_RTX;
3807 tree fndecl, fn;
3808 tree arg1 = CALL_EXPR_ARG (exp, 0);
3809 tree arg2 = CALL_EXPR_ARG (exp, 1);
3810
3811 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3812 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3813
3814 /* If we don't have POINTER_TYPE, call the function. */
3815 if (arg1_align == 0 || arg2_align == 0)
3816 return NULL_RTX;
3817
3818 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3819 arg1 = builtin_save_expr (arg1);
3820 arg2 = builtin_save_expr (arg2);
3821
3822 arg1_rtx = get_memory_rtx (arg1, NULL);
3823 arg2_rtx = get_memory_rtx (arg2, NULL);
3824
3825 #ifdef HAVE_cmpstrsi
3826 /* Try to call cmpstrsi. */
3827 if (HAVE_cmpstrsi)
3828 {
3829 enum machine_mode insn_mode
3830 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3831
3832 /* Make a place to write the result of the instruction. */
3833 result = target;
3834 if (! (result != 0
3835 && REG_P (result) && GET_MODE (result) == insn_mode
3836 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3837 result = gen_reg_rtx (insn_mode);
3838
3839 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3840 GEN_INT (MIN (arg1_align, arg2_align)));
3841 }
3842 #endif
3843 #ifdef HAVE_cmpstrnsi
3844 /* Try to determine at least one length and call cmpstrnsi. */
3845 if (!insn && HAVE_cmpstrnsi)
3846 {
3847 tree len;
3848 rtx arg3_rtx;
3849
3850 enum machine_mode insn_mode
3851 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3852 tree len1 = c_strlen (arg1, 1);
3853 tree len2 = c_strlen (arg2, 1);
3854
3855 if (len1)
3856 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3857 if (len2)
3858 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3859
3860 /* If we don't have a constant length for the first, use the length
3861 of the second, if we know it. We don't require a constant for
3862 this case; some cost analysis could be done if both are available
3863 but neither is constant. For now, assume they're equally cheap,
3864 unless one has side effects. If both strings have constant lengths,
3865 use the smaller. */
3866
3867 if (!len1)
3868 len = len2;
3869 else if (!len2)
3870 len = len1;
3871 else if (TREE_SIDE_EFFECTS (len1))
3872 len = len2;
3873 else if (TREE_SIDE_EFFECTS (len2))
3874 len = len1;
3875 else if (TREE_CODE (len1) != INTEGER_CST)
3876 len = len2;
3877 else if (TREE_CODE (len2) != INTEGER_CST)
3878 len = len1;
3879 else if (tree_int_cst_lt (len1, len2))
3880 len = len1;
3881 else
3882 len = len2;
3883
3884 /* If both arguments have side effects, we cannot optimize. */
3885 if (!len || TREE_SIDE_EFFECTS (len))
3886 goto do_libcall;
3887
3888 arg3_rtx = expand_normal (len);
3889
3890 /* Make a place to write the result of the instruction. */
3891 result = target;
3892 if (! (result != 0
3893 && REG_P (result) && GET_MODE (result) == insn_mode
3894 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3895 result = gen_reg_rtx (insn_mode);
3896
3897 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3898 GEN_INT (MIN (arg1_align, arg2_align)));
3899 }
3900 #endif
3901
3902 if (insn)
3903 {
3904 enum machine_mode mode;
3905 emit_insn (insn);
3906
3907 /* Return the value in the proper mode for this function. */
3908 mode = TYPE_MODE (TREE_TYPE (exp));
3909 if (GET_MODE (result) == mode)
3910 return result;
3911 if (target == 0)
3912 return convert_to_mode (mode, result, 0);
3913 convert_move (target, result, 0);
3914 return target;
3915 }
3916
3917 /* Expand the library call ourselves using a stabilized argument
3918 list to avoid re-evaluating the function's arguments twice. */
3919 #ifdef HAVE_cmpstrnsi
3920 do_libcall:
3921 #endif
3922 fndecl = get_callee_fndecl (exp);
3923 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3924 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3925 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3926 return expand_call (fn, target, target == const0_rtx);
3927 }
3928 #endif
3929 return NULL_RTX;
3930 }
3931
3932 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3933 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3934 the result in TARGET, if convenient. */
3935
3936 static rtx
3937 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3938 ATTRIBUTE_UNUSED enum machine_mode mode)
3939 {
3940 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3941
3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
3945
3946 /* If c_strlen can determine an expression for one of the string
3947 lengths, and it doesn't have side effects, then emit cmpstrnsi
3948 using length MIN(strlen(string)+1, arg3). */
3949 #ifdef HAVE_cmpstrnsi
3950 if (HAVE_cmpstrnsi)
3951 {
3952 tree len, len1, len2;
3953 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3954 rtx result, insn;
3955 tree fndecl, fn;
3956 tree arg1 = CALL_EXPR_ARG (exp, 0);
3957 tree arg2 = CALL_EXPR_ARG (exp, 1);
3958 tree arg3 = CALL_EXPR_ARG (exp, 2);
3959
3960 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3961 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3962 enum machine_mode insn_mode
3963 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3964
3965 len1 = c_strlen (arg1, 1);
3966 len2 = c_strlen (arg2, 1);
3967
3968 if (len1)
3969 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3970 if (len2)
3971 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3972
3973 /* If we don't have a constant length for the first, use the length
3974 of the second, if we know it. We don't require a constant for
3975 this case; some cost analysis could be done if both are available
3976 but neither is constant. For now, assume they're equally cheap,
3977 unless one has side effects. If both strings have constant lengths,
3978 use the smaller. */
3979
3980 if (!len1)
3981 len = len2;
3982 else if (!len2)
3983 len = len1;
3984 else if (TREE_SIDE_EFFECTS (len1))
3985 len = len2;
3986 else if (TREE_SIDE_EFFECTS (len2))
3987 len = len1;
3988 else if (TREE_CODE (len1) != INTEGER_CST)
3989 len = len2;
3990 else if (TREE_CODE (len2) != INTEGER_CST)
3991 len = len1;
3992 else if (tree_int_cst_lt (len1, len2))
3993 len = len1;
3994 else
3995 len = len2;
3996
3997 /* If both arguments have side effects, we cannot optimize. */
3998 if (!len || TREE_SIDE_EFFECTS (len))
3999 return NULL_RTX;
4000
4001 /* The actual new length parameter is MIN(len,arg3). */
4002 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4003 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4004
4005 /* If we don't have POINTER_TYPE, call the function. */
4006 if (arg1_align == 0 || arg2_align == 0)
4007 return NULL_RTX;
4008
4009 /* Make a place to write the result of the instruction. */
4010 result = target;
4011 if (! (result != 0
4012 && REG_P (result) && GET_MODE (result) == insn_mode
4013 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4014 result = gen_reg_rtx (insn_mode);
4015
4016 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4017 arg1 = builtin_save_expr (arg1);
4018 arg2 = builtin_save_expr (arg2);
4019 len = builtin_save_expr (len);
4020
4021 arg1_rtx = get_memory_rtx (arg1, len);
4022 arg2_rtx = get_memory_rtx (arg2, len);
4023 arg3_rtx = expand_normal (len);
4024 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4025 GEN_INT (MIN (arg1_align, arg2_align)));
4026 if (insn)
4027 {
4028 emit_insn (insn);
4029
4030 /* Return the value in the proper mode for this function. */
4031 mode = TYPE_MODE (TREE_TYPE (exp));
4032 if (GET_MODE (result) == mode)
4033 return result;
4034 if (target == 0)
4035 return convert_to_mode (mode, result, 0);
4036 convert_move (target, result, 0);
4037 return target;
4038 }
4039
4040 /* Expand the library call ourselves using a stabilized argument
4041 list to avoid re-evaluating the function's arguments twice. */
4042 fndecl = get_callee_fndecl (exp);
4043 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4044 arg1, arg2, len);
4045 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4046 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4047 return expand_call (fn, target, target == const0_rtx);
4048 }
4049 #endif
4050 return NULL_RTX;
4051 }
4052
4053 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4054 if that's convenient. */
4055
4056 rtx
4057 expand_builtin_saveregs (void)
4058 {
4059 rtx val, seq;
4060
4061 /* Don't do __builtin_saveregs more than once in a function.
4062 Save the result of the first call and reuse it. */
4063 if (saveregs_value != 0)
4064 return saveregs_value;
4065
4066 /* When this function is called, it means that registers must be
4067 saved on entry to this function. So we migrate the call to the
4068 first insn of this function. */
4069
4070 start_sequence ();
4071
4072 /* Do whatever the machine needs done in this case. */
4073 val = targetm.calls.expand_builtin_saveregs ();
4074
4075 seq = get_insns ();
4076 end_sequence ();
4077
4078 saveregs_value = val;
4079
4080 /* Put the insns after the NOTE that starts the function. If this
4081 is inside a start_sequence, make the outer-level insn chain current, so
4082 the code is placed at the start of the function. */
4083 push_topmost_sequence ();
4084 emit_insn_after (seq, entry_of_function ());
4085 pop_topmost_sequence ();
4086
4087 return val;
4088 }
4089
4090 /* Expand a call to __builtin_next_arg. */
4091
4092 static rtx
4093 expand_builtin_next_arg (void)
4094 {
4095 /* Checking arguments is already done in fold_builtin_next_arg
4096 that must be called before this function. */
4097 return expand_binop (ptr_mode, add_optab,
4098 crtl->args.internal_arg_pointer,
4099 crtl->args.arg_offset_rtx,
4100 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4101 }
4102
4103 /* Make it easier for the backends by protecting the valist argument
4104 from multiple evaluations. */
4105
4106 static tree
4107 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4108 {
4109 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4110
4111 /* The current way of determining the type of valist is completely
4112 bogus. We should have the information on the va builtin instead. */
4113 if (!vatype)
4114 vatype = targetm.fn_abi_va_list (cfun->decl);
4115
4116 if (TREE_CODE (vatype) == ARRAY_TYPE)
4117 {
4118 if (TREE_SIDE_EFFECTS (valist))
4119 valist = save_expr (valist);
4120
4121 /* For this case, the backends will be expecting a pointer to
4122 vatype, but it's possible we've actually been given an array
4123 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4124 So fix it. */
4125 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4126 {
4127 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4128 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4129 }
4130 }
4131 else
4132 {
4133 tree pt = build_pointer_type (vatype);
4134
4135 if (! needs_lvalue)
4136 {
4137 if (! TREE_SIDE_EFFECTS (valist))
4138 return valist;
4139
4140 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4141 TREE_SIDE_EFFECTS (valist) = 1;
4142 }
4143
4144 if (TREE_SIDE_EFFECTS (valist))
4145 valist = save_expr (valist);
4146 valist = fold_build2_loc (loc, MEM_REF,
4147 vatype, valist, build_int_cst (pt, 0));
4148 }
4149
4150 return valist;
4151 }
4152
4153 /* The "standard" definition of va_list is void*. */
4154
4155 tree
4156 std_build_builtin_va_list (void)
4157 {
4158 return ptr_type_node;
4159 }
4160
4161 /* The "standard" abi va_list is va_list_type_node. */
4162
4163 tree
4164 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4165 {
4166 return va_list_type_node;
4167 }
4168
4169 /* The "standard" type of va_list is va_list_type_node. */
4170
4171 tree
4172 std_canonical_va_list_type (tree type)
4173 {
4174 tree wtype, htype;
4175
4176 if (INDIRECT_REF_P (type))
4177 type = TREE_TYPE (type);
4178 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4179 type = TREE_TYPE (type);
4180 wtype = va_list_type_node;
4181 htype = type;
4182 /* Treat structure va_list types. */
4183 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4184 htype = TREE_TYPE (htype);
4185 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4186 {
4187 /* If va_list is an array type, the argument may have decayed
4188 to a pointer type, e.g. by being passed to another function.
4189 In that case, unwrap both types so that we can compare the
4190 underlying records. */
4191 if (TREE_CODE (htype) == ARRAY_TYPE
4192 || POINTER_TYPE_P (htype))
4193 {
4194 wtype = TREE_TYPE (wtype);
4195 htype = TREE_TYPE (htype);
4196 }
4197 }
4198 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4199 return va_list_type_node;
4200
4201 return NULL_TREE;
4202 }
4203
4204 /* The "standard" implementation of va_start: just assign `nextarg' to
4205 the variable. */
4206
4207 void
4208 std_expand_builtin_va_start (tree valist, rtx nextarg)
4209 {
4210 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4211 convert_move (va_r, nextarg, 0);
4212 }
4213
4214 /* Expand EXP, a call to __builtin_va_start. */
4215
4216 static rtx
4217 expand_builtin_va_start (tree exp)
4218 {
4219 rtx nextarg;
4220 tree valist;
4221 location_t loc = EXPR_LOCATION (exp);
4222
4223 if (call_expr_nargs (exp) < 2)
4224 {
4225 error_at (loc, "too few arguments to function %<va_start%>");
4226 return const0_rtx;
4227 }
4228
4229 if (fold_builtin_next_arg (exp, true))
4230 return const0_rtx;
4231
4232 nextarg = expand_builtin_next_arg ();
4233 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4234
4235 if (targetm.expand_builtin_va_start)
4236 targetm.expand_builtin_va_start (valist, nextarg);
4237 else
4238 std_expand_builtin_va_start (valist, nextarg);
4239
4240 return const0_rtx;
4241 }
4242
4243
4244 /* Return a dummy expression of type TYPE in order to keep going after an
4245 error. */
4246
4247 static tree
4248 dummy_object (tree type)
4249 {
4250 tree t = build_int_cst (build_pointer_type (type), 0);
4251 return build2 (MEM_REF, type, t, t);
4252 }
4253
4254 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4255 builtin function, but a very special sort of operator. */
4256
4257 enum gimplify_status
4258 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4259 {
4260 tree promoted_type, have_va_type;
4261 tree valist = TREE_OPERAND (*expr_p, 0);
4262 tree type = TREE_TYPE (*expr_p);
4263 tree t;
4264 location_t loc = EXPR_LOCATION (*expr_p);
4265
4266 /* Verify that valist is of the proper type. */
4267 have_va_type = TREE_TYPE (valist);
4268 if (have_va_type == error_mark_node)
4269 return GS_ERROR;
4270 have_va_type = targetm.canonical_va_list_type (have_va_type);
4271
4272 if (have_va_type == NULL_TREE)
4273 {
4274 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4275 return GS_ERROR;
4276 }
4277
4278 /* Generate a diagnostic for requesting data of a type that cannot
4279 be passed through `...' due to type promotion at the call site. */
4280 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4281 != type)
4282 {
4283 static bool gave_help;
4284 bool warned;
4285
4286 /* Unfortunately, this is merely undefined, rather than a constraint
4287 violation, so we cannot make this an error. If this call is never
4288 executed, the program is still strictly conforming. */
4289 warned = warning_at (loc, 0,
4290 "%qT is promoted to %qT when passed through %<...%>",
4291 type, promoted_type);
4292 if (!gave_help && warned)
4293 {
4294 gave_help = true;
4295 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4296 promoted_type, type);
4297 }
4298
4299 /* We can, however, treat "undefined" any way we please.
4300 Call abort to encourage the user to fix the program. */
4301 if (warned)
4302 inform (loc, "if this code is reached, the program will abort");
4303 /* Before the abort, allow the evaluation of the va_list
4304 expression to exit or longjmp. */
4305 gimplify_and_add (valist, pre_p);
4306 t = build_call_expr_loc (loc,
4307 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4308 gimplify_and_add (t, pre_p);
4309
4310 /* This is dead code, but go ahead and finish so that the
4311 mode of the result comes out right. */
4312 *expr_p = dummy_object (type);
4313 return GS_ALL_DONE;
4314 }
4315 else
4316 {
4317 /* Make it easier for the backends by protecting the valist argument
4318 from multiple evaluations. */
4319 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4320 {
4321 /* For this case, the backends will be expecting a pointer to
4322 TREE_TYPE (abi), but it's possible we've
4323 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4324 So fix it. */
4325 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4326 {
4327 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4328 valist = fold_convert_loc (loc, p1,
4329 build_fold_addr_expr_loc (loc, valist));
4330 }
4331
4332 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4333 }
4334 else
4335 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4336
4337 if (!targetm.gimplify_va_arg_expr)
4338 /* FIXME: Once most targets are converted we should merely
4339 assert this is non-null. */
4340 return GS_ALL_DONE;
4341
4342 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4343 return GS_OK;
4344 }
4345 }
4346
4347 /* Expand EXP, a call to __builtin_va_end. */
4348
4349 static rtx
4350 expand_builtin_va_end (tree exp)
4351 {
4352 tree valist = CALL_EXPR_ARG (exp, 0);
4353
4354 /* Evaluate for side effects, if needed. I hate macros that don't
4355 do that. */
4356 if (TREE_SIDE_EFFECTS (valist))
4357 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4358
4359 return const0_rtx;
4360 }
4361
4362 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4363 builtin rather than just as an assignment in stdarg.h because of the
4364 nastiness of array-type va_list types. */
4365
4366 static rtx
4367 expand_builtin_va_copy (tree exp)
4368 {
4369 tree dst, src, t;
4370 location_t loc = EXPR_LOCATION (exp);
4371
4372 dst = CALL_EXPR_ARG (exp, 0);
4373 src = CALL_EXPR_ARG (exp, 1);
4374
4375 dst = stabilize_va_list_loc (loc, dst, 1);
4376 src = stabilize_va_list_loc (loc, src, 0);
4377
4378 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4379
4380 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4381 {
4382 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4383 TREE_SIDE_EFFECTS (t) = 1;
4384 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4385 }
4386 else
4387 {
4388 rtx dstb, srcb, size;
4389
4390 /* Evaluate to pointers. */
4391 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4392 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4393 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4394 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4395
4396 dstb = convert_memory_address (Pmode, dstb);
4397 srcb = convert_memory_address (Pmode, srcb);
4398
4399 /* "Dereference" to BLKmode memories. */
4400 dstb = gen_rtx_MEM (BLKmode, dstb);
4401 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4402 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4403 srcb = gen_rtx_MEM (BLKmode, srcb);
4404 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4405 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4406
4407 /* Copy. */
4408 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4409 }
4410
4411 return const0_rtx;
4412 }
4413
4414 /* Expand a call to one of the builtin functions __builtin_frame_address or
4415 __builtin_return_address. */
4416
4417 static rtx
4418 expand_builtin_frame_address (tree fndecl, tree exp)
4419 {
4420 /* The argument must be a nonnegative integer constant.
4421 It counts the number of frames to scan up the stack.
4422 The value is the return address saved in that frame. */
4423 if (call_expr_nargs (exp) == 0)
4424 /* Warning about missing arg was already issued. */
4425 return const0_rtx;
4426 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4427 {
4428 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4429 error ("invalid argument to %<__builtin_frame_address%>");
4430 else
4431 error ("invalid argument to %<__builtin_return_address%>");
4432 return const0_rtx;
4433 }
4434 else
4435 {
4436 rtx tem
4437 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4438 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4439
4440 /* Some ports cannot access arbitrary stack frames. */
4441 if (tem == NULL)
4442 {
4443 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4444 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4445 else
4446 warning (0, "unsupported argument to %<__builtin_return_address%>");
4447 return const0_rtx;
4448 }
4449
4450 /* For __builtin_frame_address, return what we've got. */
4451 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4452 return tem;
4453
4454 if (!REG_P (tem)
4455 && ! CONSTANT_P (tem))
4456 tem = copy_addr_to_reg (tem);
4457 return tem;
4458 }
4459 }
4460
4461 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4462 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4463 is the same as for allocate_dynamic_stack_space. */
4464
4465 static rtx
4466 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4467 {
4468 rtx op0;
4469 rtx result;
4470 bool valid_arglist;
4471 unsigned int align;
4472 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4473 == BUILT_IN_ALLOCA_WITH_ALIGN);
4474
4475 /* Emit normal call if we use mudflap. */
4476 if (flag_mudflap)
4477 return NULL_RTX;
4478
4479 valid_arglist
4480 = (alloca_with_align
4481 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4482 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4483
4484 if (!valid_arglist)
4485 return NULL_RTX;
4486
4487 /* Compute the argument. */
4488 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4489
4490 /* Compute the alignment. */
4491 align = (alloca_with_align
4492 ? tree_to_hwi (CALL_EXPR_ARG (exp, 1))
4493 : BIGGEST_ALIGNMENT);
4494
4495 /* Allocate the desired space. */
4496 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4497 result = convert_memory_address (ptr_mode, result);
4498
4499 return result;
4500 }
4501
4502 /* Expand a call to bswap builtin in EXP.
4503 Return NULL_RTX if a normal call should be emitted rather than expanding the
4504 function in-line. If convenient, the result should be placed in TARGET.
4505 SUBTARGET may be used as the target for computing one of EXP's operands. */
4506
4507 static rtx
4508 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4509 rtx subtarget)
4510 {
4511 tree arg;
4512 rtx op0;
4513
4514 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4515 return NULL_RTX;
4516
4517 arg = CALL_EXPR_ARG (exp, 0);
4518 op0 = expand_expr (arg,
4519 subtarget && GET_MODE (subtarget) == target_mode
4520 ? subtarget : NULL_RTX,
4521 target_mode, EXPAND_NORMAL);
4522 if (GET_MODE (op0) != target_mode)
4523 op0 = convert_to_mode (target_mode, op0, 1);
4524
4525 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4526
4527 gcc_assert (target);
4528
4529 return convert_to_mode (target_mode, target, 1);
4530 }
4531
4532 /* Expand a call to a unary builtin in EXP.
4533 Return NULL_RTX if a normal call should be emitted rather than expanding the
4534 function in-line. If convenient, the result should be placed in TARGET.
4535 SUBTARGET may be used as the target for computing one of EXP's operands. */
4536
4537 static rtx
4538 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4539 rtx subtarget, optab op_optab)
4540 {
4541 rtx op0;
4542
4543 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4544 return NULL_RTX;
4545
4546 /* Compute the argument. */
4547 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4548 (subtarget
4549 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4550 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4551 VOIDmode, EXPAND_NORMAL);
4552 /* Compute op, into TARGET if possible.
4553 Set TARGET to wherever the result comes back. */
4554 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4555 op_optab, op0, target, op_optab != clrsb_optab);
4556 gcc_assert (target);
4557
4558 return convert_to_mode (target_mode, target, 0);
4559 }
4560
4561 /* Expand a call to __builtin_expect. We just return our argument
4562 as the builtin_expect semantic should've been already executed by
4563 tree branch prediction pass. */
4564
4565 static rtx
4566 expand_builtin_expect (tree exp, rtx target)
4567 {
4568 tree arg;
4569
4570 if (call_expr_nargs (exp) < 2)
4571 return const0_rtx;
4572 arg = CALL_EXPR_ARG (exp, 0);
4573
4574 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4575 /* When guessing was done, the hints should be already stripped away. */
4576 gcc_assert (!flag_guess_branch_prob
4577 || optimize == 0 || seen_error ());
4578 return target;
4579 }
4580
4581 /* Expand a call to __builtin_assume_aligned. We just return our first
4582 argument as the builtin_assume_aligned semantic should've been already
4583 executed by CCP. */
4584
4585 static rtx
4586 expand_builtin_assume_aligned (tree exp, rtx target)
4587 {
4588 if (call_expr_nargs (exp) < 2)
4589 return const0_rtx;
4590 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4591 EXPAND_NORMAL);
4592 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4593 && (call_expr_nargs (exp) < 3
4594 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4595 return target;
4596 }
4597
4598 void
4599 expand_builtin_trap (void)
4600 {
4601 #ifdef HAVE_trap
4602 if (HAVE_trap)
4603 {
4604 rtx insn = emit_insn (gen_trap ());
4605 /* For trap insns when not accumulating outgoing args force
4606 REG_ARGS_SIZE note to prevent crossjumping of calls with
4607 different args sizes. */
4608 if (!ACCUMULATE_OUTGOING_ARGS)
4609 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4610 }
4611 else
4612 #endif
4613 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4614 emit_barrier ();
4615 }
4616
4617 /* Expand a call to __builtin_unreachable. We do nothing except emit
4618 a barrier saying that control flow will not pass here.
4619
4620 It is the responsibility of the program being compiled to ensure
4621 that control flow does never reach __builtin_unreachable. */
4622 static void
4623 expand_builtin_unreachable (void)
4624 {
4625 emit_barrier ();
4626 }
4627
4628 /* Expand EXP, a call to fabs, fabsf or fabsl.
4629 Return NULL_RTX if a normal call should be emitted rather than expanding
4630 the function inline. If convenient, the result should be placed
4631 in TARGET. SUBTARGET may be used as the target for computing
4632 the operand. */
4633
4634 static rtx
4635 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4636 {
4637 enum machine_mode mode;
4638 tree arg;
4639 rtx op0;
4640
4641 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4642 return NULL_RTX;
4643
4644 arg = CALL_EXPR_ARG (exp, 0);
4645 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4646 mode = TYPE_MODE (TREE_TYPE (arg));
4647 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4648 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4649 }
4650
4651 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4652 Return NULL is a normal call should be emitted rather than expanding the
4653 function inline. If convenient, the result should be placed in TARGET.
4654 SUBTARGET may be used as the target for computing the operand. */
4655
4656 static rtx
4657 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4658 {
4659 rtx op0, op1;
4660 tree arg;
4661
4662 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4663 return NULL_RTX;
4664
4665 arg = CALL_EXPR_ARG (exp, 0);
4666 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4667
4668 arg = CALL_EXPR_ARG (exp, 1);
4669 op1 = expand_normal (arg);
4670
4671 return expand_copysign (op0, op1, target);
4672 }
4673
4674 /* Create a new constant string literal and return a char* pointer to it.
4675 The STRING_CST value is the LEN characters at STR. */
4676 tree
4677 build_string_literal (int len, const char *str)
4678 {
4679 tree t, elem, index, type;
4680
4681 t = build_string (len, str);
4682 elem = build_type_variant (char_type_node, 1, 0);
4683 index = build_index_type (size_int (len - 1));
4684 type = build_array_type (elem, index);
4685 TREE_TYPE (t) = type;
4686 TREE_CONSTANT (t) = 1;
4687 TREE_READONLY (t) = 1;
4688 TREE_STATIC (t) = 1;
4689
4690 type = build_pointer_type (elem);
4691 t = build1 (ADDR_EXPR, type,
4692 build4 (ARRAY_REF, elem,
4693 t, integer_zero_node, NULL_TREE, NULL_TREE));
4694 return t;
4695 }
4696
4697 /* Expand a call to __builtin___clear_cache. */
4698
4699 static rtx
4700 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4701 {
4702 #ifndef HAVE_clear_cache
4703 #ifdef CLEAR_INSN_CACHE
4704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4705 does something. Just do the default expansion to a call to
4706 __clear_cache(). */
4707 return NULL_RTX;
4708 #else
4709 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4710 does nothing. There is no need to call it. Do nothing. */
4711 return const0_rtx;
4712 #endif /* CLEAR_INSN_CACHE */
4713 #else
4714 /* We have a "clear_cache" insn, and it will handle everything. */
4715 tree begin, end;
4716 rtx begin_rtx, end_rtx;
4717
4718 /* We must not expand to a library call. If we did, any
4719 fallback library function in libgcc that might contain a call to
4720 __builtin___clear_cache() would recurse infinitely. */
4721 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4722 {
4723 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4724 return const0_rtx;
4725 }
4726
4727 if (HAVE_clear_cache)
4728 {
4729 struct expand_operand ops[2];
4730
4731 begin = CALL_EXPR_ARG (exp, 0);
4732 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4733
4734 end = CALL_EXPR_ARG (exp, 1);
4735 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4736
4737 create_address_operand (&ops[0], begin_rtx);
4738 create_address_operand (&ops[1], end_rtx);
4739 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4740 return const0_rtx;
4741 }
4742 return const0_rtx;
4743 #endif /* HAVE_clear_cache */
4744 }
4745
4746 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4747
4748 static rtx
4749 round_trampoline_addr (rtx tramp)
4750 {
4751 rtx temp, addend, mask;
4752
4753 /* If we don't need too much alignment, we'll have been guaranteed
4754 proper alignment by get_trampoline_type. */
4755 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4756 return tramp;
4757
4758 /* Round address up to desired boundary. */
4759 temp = gen_reg_rtx (Pmode);
4760 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4761 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4762
4763 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4764 temp, 0, OPTAB_LIB_WIDEN);
4765 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4766 temp, 0, OPTAB_LIB_WIDEN);
4767
4768 return tramp;
4769 }
4770
4771 static rtx
4772 expand_builtin_init_trampoline (tree exp, bool onstack)
4773 {
4774 tree t_tramp, t_func, t_chain;
4775 rtx m_tramp, r_tramp, r_chain, tmp;
4776
4777 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4778 POINTER_TYPE, VOID_TYPE))
4779 return NULL_RTX;
4780
4781 t_tramp = CALL_EXPR_ARG (exp, 0);
4782 t_func = CALL_EXPR_ARG (exp, 1);
4783 t_chain = CALL_EXPR_ARG (exp, 2);
4784
4785 r_tramp = expand_normal (t_tramp);
4786 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4787 MEM_NOTRAP_P (m_tramp) = 1;
4788
4789 /* If ONSTACK, the TRAMP argument should be the address of a field
4790 within the local function's FRAME decl. Either way, let's see if
4791 we can fill in the MEM_ATTRs for this memory. */
4792 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4793 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4794
4795 /* Creator of a heap trampoline is responsible for making sure the
4796 address is aligned to at least STACK_BOUNDARY. Normally malloc
4797 will ensure this anyhow. */
4798 tmp = round_trampoline_addr (r_tramp);
4799 if (tmp != r_tramp)
4800 {
4801 m_tramp = change_address (m_tramp, BLKmode, tmp);
4802 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4803 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4804 }
4805
4806 /* The FUNC argument should be the address of the nested function.
4807 Extract the actual function decl to pass to the hook. */
4808 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4809 t_func = TREE_OPERAND (t_func, 0);
4810 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4811
4812 r_chain = expand_normal (t_chain);
4813
4814 /* Generate insns to initialize the trampoline. */
4815 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4816
4817 if (onstack)
4818 {
4819 trampolines_created = 1;
4820
4821 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4822 "trampoline generated for nested function %qD", t_func);
4823 }
4824
4825 return const0_rtx;
4826 }
4827
4828 static rtx
4829 expand_builtin_adjust_trampoline (tree exp)
4830 {
4831 rtx tramp;
4832
4833 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4834 return NULL_RTX;
4835
4836 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4837 tramp = round_trampoline_addr (tramp);
4838 if (targetm.calls.trampoline_adjust_address)
4839 tramp = targetm.calls.trampoline_adjust_address (tramp);
4840
4841 return tramp;
4842 }
4843
4844 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4845 function. The function first checks whether the back end provides
4846 an insn to implement signbit for the respective mode. If not, it
4847 checks whether the floating point format of the value is such that
4848 the sign bit can be extracted. If that is not the case, the
4849 function returns NULL_RTX to indicate that a normal call should be
4850 emitted rather than expanding the function in-line. EXP is the
4851 expression that is a call to the builtin function; if convenient,
4852 the result should be placed in TARGET. */
4853 static rtx
4854 expand_builtin_signbit (tree exp, rtx target)
4855 {
4856 const struct real_format *fmt;
4857 enum machine_mode fmode, imode, rmode;
4858 tree arg;
4859 int word, bitpos;
4860 enum insn_code icode;
4861 rtx temp;
4862 location_t loc = EXPR_LOCATION (exp);
4863
4864 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4865 return NULL_RTX;
4866
4867 arg = CALL_EXPR_ARG (exp, 0);
4868 fmode = TYPE_MODE (TREE_TYPE (arg));
4869 rmode = TYPE_MODE (TREE_TYPE (exp));
4870 fmt = REAL_MODE_FORMAT (fmode);
4871
4872 arg = builtin_save_expr (arg);
4873
4874 /* Expand the argument yielding a RTX expression. */
4875 temp = expand_normal (arg);
4876
4877 /* Check if the back end provides an insn that handles signbit for the
4878 argument's mode. */
4879 icode = optab_handler (signbit_optab, fmode);
4880 if (icode != CODE_FOR_nothing)
4881 {
4882 rtx last = get_last_insn ();
4883 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4884 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4885 return target;
4886 delete_insns_since (last);
4887 }
4888
4889 /* For floating point formats without a sign bit, implement signbit
4890 as "ARG < 0.0". */
4891 bitpos = fmt->signbit_ro;
4892 if (bitpos < 0)
4893 {
4894 /* But we can't do this if the format supports signed zero. */
4895 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4896 return NULL_RTX;
4897
4898 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4899 build_real (TREE_TYPE (arg), dconst0));
4900 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4901 }
4902
4903 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4904 {
4905 imode = int_mode_for_mode (fmode);
4906 if (imode == BLKmode)
4907 return NULL_RTX;
4908 temp = gen_lowpart (imode, temp);
4909 }
4910 else
4911 {
4912 imode = word_mode;
4913 /* Handle targets with different FP word orders. */
4914 if (FLOAT_WORDS_BIG_ENDIAN)
4915 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4916 else
4917 word = bitpos / BITS_PER_WORD;
4918 temp = operand_subword_force (temp, word, fmode);
4919 bitpos = bitpos % BITS_PER_WORD;
4920 }
4921
4922 /* Force the intermediate word_mode (or narrower) result into a
4923 register. This avoids attempting to create paradoxical SUBREGs
4924 of floating point modes below. */
4925 temp = force_reg (imode, temp);
4926
4927 /* If the bitpos is within the "result mode" lowpart, the operation
4928 can be implement with a single bitwise AND. Otherwise, we need
4929 a right shift and an AND. */
4930
4931 if (bitpos < GET_MODE_BITSIZE (rmode))
4932 {
4933 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4934
4935 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4936 temp = gen_lowpart (rmode, temp);
4937 temp = expand_binop (rmode, and_optab, temp,
4938 immed_wide_int_const (mask, rmode),
4939 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4940 }
4941 else
4942 {
4943 /* Perform a logical right shift to place the signbit in the least
4944 significant bit, then truncate the result to the desired mode
4945 and mask just this bit. */
4946 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4947 temp = gen_lowpart (rmode, temp);
4948 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4949 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4950 }
4951
4952 return temp;
4953 }
4954
4955 /* Expand fork or exec calls. TARGET is the desired target of the
4956 call. EXP is the call. FN is the
4957 identificator of the actual function. IGNORE is nonzero if the
4958 value is to be ignored. */
4959
4960 static rtx
4961 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4962 {
4963 tree id, decl;
4964 tree call;
4965
4966 /* If we are not profiling, just call the function. */
4967 if (!profile_arc_flag)
4968 return NULL_RTX;
4969
4970 /* Otherwise call the wrapper. This should be equivalent for the rest of
4971 compiler, so the code does not diverge, and the wrapper may run the
4972 code necessary for keeping the profiling sane. */
4973
4974 switch (DECL_FUNCTION_CODE (fn))
4975 {
4976 case BUILT_IN_FORK:
4977 id = get_identifier ("__gcov_fork");
4978 break;
4979
4980 case BUILT_IN_EXECL:
4981 id = get_identifier ("__gcov_execl");
4982 break;
4983
4984 case BUILT_IN_EXECV:
4985 id = get_identifier ("__gcov_execv");
4986 break;
4987
4988 case BUILT_IN_EXECLP:
4989 id = get_identifier ("__gcov_execlp");
4990 break;
4991
4992 case BUILT_IN_EXECLE:
4993 id = get_identifier ("__gcov_execle");
4994 break;
4995
4996 case BUILT_IN_EXECVP:
4997 id = get_identifier ("__gcov_execvp");
4998 break;
4999
5000 case BUILT_IN_EXECVE:
5001 id = get_identifier ("__gcov_execve");
5002 break;
5003
5004 default:
5005 gcc_unreachable ();
5006 }
5007
5008 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5009 FUNCTION_DECL, id, TREE_TYPE (fn));
5010 DECL_EXTERNAL (decl) = 1;
5011 TREE_PUBLIC (decl) = 1;
5012 DECL_ARTIFICIAL (decl) = 1;
5013 TREE_NOTHROW (decl) = 1;
5014 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5015 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5016 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5017 return expand_call (call, target, ignore);
5018 }
5019
5020
5021 \f
5022 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5023 the pointer in these functions is void*, the tree optimizers may remove
5024 casts. The mode computed in expand_builtin isn't reliable either, due
5025 to __sync_bool_compare_and_swap.
5026
5027 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5028 group of builtins. This gives us log2 of the mode size. */
5029
5030 static inline enum machine_mode
5031 get_builtin_sync_mode (int fcode_diff)
5032 {
5033 /* The size is not negotiable, so ask not to get BLKmode in return
5034 if the target indicates that a smaller size would be better. */
5035 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5036 }
5037
5038 /* Expand the memory expression LOC and return the appropriate memory operand
5039 for the builtin_sync operations. */
5040
5041 static rtx
5042 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5043 {
5044 rtx addr, mem;
5045
5046 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5047 addr = convert_memory_address (Pmode, addr);
5048
5049 /* Note that we explicitly do not want any alias information for this
5050 memory, so that we kill all other live memories. Otherwise we don't
5051 satisfy the full barrier semantics of the intrinsic. */
5052 mem = validize_mem (gen_rtx_MEM (mode, addr));
5053
5054 /* The alignment needs to be at least according to that of the mode. */
5055 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5056 get_pointer_alignment (loc)));
5057 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5058 MEM_VOLATILE_P (mem) = 1;
5059
5060 return mem;
5061 }
5062
5063 /* Make sure an argument is in the right mode.
5064 EXP is the tree argument.
5065 MODE is the mode it should be in. */
5066
5067 static rtx
5068 expand_expr_force_mode (tree exp, enum machine_mode mode)
5069 {
5070 rtx val;
5071 enum machine_mode old_mode;
5072
5073 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5074 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5075 of CONST_INTs, where we know the old_mode only from the call argument. */
5076
5077 old_mode = GET_MODE (val);
5078 if (old_mode == VOIDmode)
5079 old_mode = TYPE_MODE (TREE_TYPE (exp));
5080 val = convert_modes (mode, old_mode, val, 1);
5081 return val;
5082 }
5083
5084
5085 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5086 EXP is the CALL_EXPR. CODE is the rtx code
5087 that corresponds to the arithmetic or logical operation from the name;
5088 an exception here is that NOT actually means NAND. TARGET is an optional
5089 place for us to store the results; AFTER is true if this is the
5090 fetch_and_xxx form. */
5091
5092 static rtx
5093 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5094 enum rtx_code code, bool after,
5095 rtx target)
5096 {
5097 rtx val, mem;
5098 location_t loc = EXPR_LOCATION (exp);
5099
5100 if (code == NOT && warn_sync_nand)
5101 {
5102 tree fndecl = get_callee_fndecl (exp);
5103 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5104
5105 static bool warned_f_a_n, warned_n_a_f;
5106
5107 switch (fcode)
5108 {
5109 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5111 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5112 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5113 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5114 if (warned_f_a_n)
5115 break;
5116
5117 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5118 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5119 warned_f_a_n = true;
5120 break;
5121
5122 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5125 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5126 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5127 if (warned_n_a_f)
5128 break;
5129
5130 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5131 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5132 warned_n_a_f = true;
5133 break;
5134
5135 default:
5136 gcc_unreachable ();
5137 }
5138 }
5139
5140 /* Expand the operands. */
5141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5142 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5143
5144 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5145 after);
5146 }
5147
5148 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5149 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5150 true if this is the boolean form. TARGET is a place for us to store the
5151 results; this is NOT optional if IS_BOOL is true. */
5152
5153 static rtx
5154 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5155 bool is_bool, rtx target)
5156 {
5157 rtx old_val, new_val, mem;
5158 rtx *pbool, *poval;
5159
5160 /* Expand the operands. */
5161 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5162 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5163 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5164
5165 pbool = poval = NULL;
5166 if (target != const0_rtx)
5167 {
5168 if (is_bool)
5169 pbool = &target;
5170 else
5171 poval = &target;
5172 }
5173 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5174 false, MEMMODEL_SEQ_CST,
5175 MEMMODEL_SEQ_CST))
5176 return NULL_RTX;
5177
5178 return target;
5179 }
5180
5181 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5182 general form is actually an atomic exchange, and some targets only
5183 support a reduced form with the second argument being a constant 1.
5184 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5185 the results. */
5186
5187 static rtx
5188 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5189 rtx target)
5190 {
5191 rtx val, mem;
5192
5193 /* Expand the operands. */
5194 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5195 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5196
5197 return expand_sync_lock_test_and_set (target, mem, val);
5198 }
5199
5200 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5201
5202 static void
5203 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5204 {
5205 rtx mem;
5206
5207 /* Expand the operands. */
5208 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5209
5210 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5211 }
5212
5213 /* Given an integer representing an ``enum memmodel'', verify its
5214 correctness and return the memory model enum. */
5215
5216 static enum memmodel
5217 get_memmodel (tree exp)
5218 {
5219 rtx op;
5220 unsigned HOST_WIDE_INT val;
5221
5222 /* If the parameter is not a constant, it's a run time value so we'll just
5223 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5224 if (TREE_CODE (exp) != INTEGER_CST)
5225 return MEMMODEL_SEQ_CST;
5226
5227 op = expand_normal (exp);
5228
5229 val = INTVAL (op);
5230 if (targetm.memmodel_check)
5231 val = targetm.memmodel_check (val);
5232 else if (val & ~MEMMODEL_MASK)
5233 {
5234 warning (OPT_Winvalid_memory_model,
5235 "Unknown architecture specifier in memory model to builtin.");
5236 return MEMMODEL_SEQ_CST;
5237 }
5238
5239 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5240 {
5241 warning (OPT_Winvalid_memory_model,
5242 "invalid memory model argument to builtin");
5243 return MEMMODEL_SEQ_CST;
5244 }
5245
5246 return (enum memmodel) val;
5247 }
5248
5249 /* Expand the __atomic_exchange intrinsic:
5250 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5251 EXP is the CALL_EXPR.
5252 TARGET is an optional place for us to store the results. */
5253
5254 static rtx
5255 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5256 {
5257 rtx val, mem;
5258 enum memmodel model;
5259
5260 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5261 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5262 {
5263 error ("invalid memory model for %<__atomic_exchange%>");
5264 return NULL_RTX;
5265 }
5266
5267 if (!flag_inline_atomics)
5268 return NULL_RTX;
5269
5270 /* Expand the operands. */
5271 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5272 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5273
5274 return expand_atomic_exchange (target, mem, val, model);
5275 }
5276
5277 /* Expand the __atomic_compare_exchange intrinsic:
5278 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5279 TYPE desired, BOOL weak,
5280 enum memmodel success,
5281 enum memmodel failure)
5282 EXP is the CALL_EXPR.
5283 TARGET is an optional place for us to store the results. */
5284
5285 static rtx
5286 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5287 rtx target)
5288 {
5289 rtx expect, desired, mem, oldval;
5290 enum memmodel success, failure;
5291 tree weak;
5292 bool is_weak;
5293
5294 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5295 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5296
5297 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5298 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5299 {
5300 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5301 return NULL_RTX;
5302 }
5303
5304 if (failure > success)
5305 {
5306 error ("failure memory model cannot be stronger than success "
5307 "memory model for %<__atomic_compare_exchange%>");
5308 return NULL_RTX;
5309 }
5310
5311 if (!flag_inline_atomics)
5312 return NULL_RTX;
5313
5314 /* Expand the operands. */
5315 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5316
5317 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5318 expect = convert_memory_address (Pmode, expect);
5319 expect = gen_rtx_MEM (mode, expect);
5320 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5321
5322 weak = CALL_EXPR_ARG (exp, 3);
5323 is_weak = false;
5324 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5325 is_weak = true;
5326
5327 oldval = expect;
5328 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5329 &oldval, mem, oldval, desired,
5330 is_weak, success, failure))
5331 return NULL_RTX;
5332
5333 if (oldval != expect)
5334 emit_move_insn (expect, oldval);
5335
5336 return target;
5337 }
5338
5339 /* Expand the __atomic_load intrinsic:
5340 TYPE __atomic_load (TYPE *object, enum memmodel)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5343
5344 static rtx
5345 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5346 {
5347 rtx mem;
5348 enum memmodel model;
5349
5350 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5351 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5352 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5353 {
5354 error ("invalid memory model for %<__atomic_load%>");
5355 return NULL_RTX;
5356 }
5357
5358 if (!flag_inline_atomics)
5359 return NULL_RTX;
5360
5361 /* Expand the operand. */
5362 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5363
5364 return expand_atomic_load (target, mem, model);
5365 }
5366
5367
5368 /* Expand the __atomic_store intrinsic:
5369 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5370 EXP is the CALL_EXPR.
5371 TARGET is an optional place for us to store the results. */
5372
5373 static rtx
5374 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5375 {
5376 rtx mem, val;
5377 enum memmodel model;
5378
5379 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5380 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5381 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5382 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5383 {
5384 error ("invalid memory model for %<__atomic_store%>");
5385 return NULL_RTX;
5386 }
5387
5388 if (!flag_inline_atomics)
5389 return NULL_RTX;
5390
5391 /* Expand the operands. */
5392 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5393 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5394
5395 return expand_atomic_store (mem, val, model, false);
5396 }
5397
5398 /* Expand the __atomic_fetch_XXX intrinsic:
5399 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5400 EXP is the CALL_EXPR.
5401 TARGET is an optional place for us to store the results.
5402 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5403 FETCH_AFTER is true if returning the result of the operation.
5404 FETCH_AFTER is false if returning the value before the operation.
5405 IGNORE is true if the result is not used.
5406 EXT_CALL is the correct builtin for an external call if this cannot be
5407 resolved to an instruction sequence. */
5408
5409 static rtx
5410 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5411 enum rtx_code code, bool fetch_after,
5412 bool ignore, enum built_in_function ext_call)
5413 {
5414 rtx val, mem, ret;
5415 enum memmodel model;
5416 tree fndecl;
5417 tree addr;
5418
5419 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5420
5421 /* Expand the operands. */
5422 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5423 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5424
5425 /* Only try generating instructions if inlining is turned on. */
5426 if (flag_inline_atomics)
5427 {
5428 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5429 if (ret)
5430 return ret;
5431 }
5432
5433 /* Return if a different routine isn't needed for the library call. */
5434 if (ext_call == BUILT_IN_NONE)
5435 return NULL_RTX;
5436
5437 /* Change the call to the specified function. */
5438 fndecl = get_callee_fndecl (exp);
5439 addr = CALL_EXPR_FN (exp);
5440 STRIP_NOPS (addr);
5441
5442 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5443 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5444
5445 /* Expand the call here so we can emit trailing code. */
5446 ret = expand_call (exp, target, ignore);
5447
5448 /* Replace the original function just in case it matters. */
5449 TREE_OPERAND (addr, 0) = fndecl;
5450
5451 /* Then issue the arithmetic correction to return the right result. */
5452 if (!ignore)
5453 {
5454 if (code == NOT)
5455 {
5456 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5457 OPTAB_LIB_WIDEN);
5458 ret = expand_simple_unop (mode, NOT, ret, target, true);
5459 }
5460 else
5461 ret = expand_simple_binop (mode, code, ret, val, target, true,
5462 OPTAB_LIB_WIDEN);
5463 }
5464 return ret;
5465 }
5466
5467
5468 #ifndef HAVE_atomic_clear
5469 # define HAVE_atomic_clear 0
5470 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5471 #endif
5472
5473 /* Expand an atomic clear operation.
5474 void _atomic_clear (BOOL *obj, enum memmodel)
5475 EXP is the call expression. */
5476
5477 static rtx
5478 expand_builtin_atomic_clear (tree exp)
5479 {
5480 enum machine_mode mode;
5481 rtx mem, ret;
5482 enum memmodel model;
5483
5484 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5485 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5486 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5487
5488 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5489 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5490 {
5491 error ("invalid memory model for %<__atomic_store%>");
5492 return const0_rtx;
5493 }
5494
5495 if (HAVE_atomic_clear)
5496 {
5497 emit_insn (gen_atomic_clear (mem, model));
5498 return const0_rtx;
5499 }
5500
5501 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5502 Failing that, a store is issued by __atomic_store. The only way this can
5503 fail is if the bool type is larger than a word size. Unlikely, but
5504 handle it anyway for completeness. Assume a single threaded model since
5505 there is no atomic support in this case, and no barriers are required. */
5506 ret = expand_atomic_store (mem, const0_rtx, model, true);
5507 if (!ret)
5508 emit_move_insn (mem, const0_rtx);
5509 return const0_rtx;
5510 }
5511
5512 /* Expand an atomic test_and_set operation.
5513 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5514 EXP is the call expression. */
5515
5516 static rtx
5517 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5518 {
5519 rtx mem;
5520 enum memmodel model;
5521 enum machine_mode mode;
5522
5523 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5524 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5526
5527 return expand_atomic_test_and_set (target, mem, model);
5528 }
5529
5530
5531 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5532 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5533
5534 static tree
5535 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5536 {
5537 int size;
5538 enum machine_mode mode;
5539 unsigned int mode_align, type_align;
5540
5541 if (TREE_CODE (arg0) != INTEGER_CST)
5542 return NULL_TREE;
5543
5544 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5545 mode = mode_for_size (size, MODE_INT, 0);
5546 mode_align = GET_MODE_ALIGNMENT (mode);
5547
5548 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5549 type_align = mode_align;
5550 else
5551 {
5552 tree ttype = TREE_TYPE (arg1);
5553
5554 /* This function is usually invoked and folded immediately by the front
5555 end before anything else has a chance to look at it. The pointer
5556 parameter at this point is usually cast to a void *, so check for that
5557 and look past the cast. */
5558 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5559 && VOID_TYPE_P (TREE_TYPE (ttype)))
5560 arg1 = TREE_OPERAND (arg1, 0);
5561
5562 ttype = TREE_TYPE (arg1);
5563 gcc_assert (POINTER_TYPE_P (ttype));
5564
5565 /* Get the underlying type of the object. */
5566 ttype = TREE_TYPE (ttype);
5567 type_align = TYPE_ALIGN (ttype);
5568 }
5569
5570 /* If the object has smaller alignment, the the lock free routines cannot
5571 be used. */
5572 if (type_align < mode_align)
5573 return boolean_false_node;
5574
5575 /* Check if a compare_and_swap pattern exists for the mode which represents
5576 the required size. The pattern is not allowed to fail, so the existence
5577 of the pattern indicates support is present. */
5578 if (can_compare_and_swap_p (mode, true))
5579 return boolean_true_node;
5580 else
5581 return boolean_false_node;
5582 }
5583
5584 /* Return true if the parameters to call EXP represent an object which will
5585 always generate lock free instructions. The first argument represents the
5586 size of the object, and the second parameter is a pointer to the object
5587 itself. If NULL is passed for the object, then the result is based on
5588 typical alignment for an object of the specified size. Otherwise return
5589 false. */
5590
5591 static rtx
5592 expand_builtin_atomic_always_lock_free (tree exp)
5593 {
5594 tree size;
5595 tree arg0 = CALL_EXPR_ARG (exp, 0);
5596 tree arg1 = CALL_EXPR_ARG (exp, 1);
5597
5598 if (TREE_CODE (arg0) != INTEGER_CST)
5599 {
5600 error ("non-constant argument 1 to __atomic_always_lock_free");
5601 return const0_rtx;
5602 }
5603
5604 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5605 if (size == boolean_true_node)
5606 return const1_rtx;
5607 return const0_rtx;
5608 }
5609
5610 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5611 is lock free on this architecture. */
5612
5613 static tree
5614 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5615 {
5616 if (!flag_inline_atomics)
5617 return NULL_TREE;
5618
5619 /* If it isn't always lock free, don't generate a result. */
5620 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5621 return boolean_true_node;
5622
5623 return NULL_TREE;
5624 }
5625
5626 /* Return true if the parameters to call EXP represent an object which will
5627 always generate lock free instructions. The first argument represents the
5628 size of the object, and the second parameter is a pointer to the object
5629 itself. If NULL is passed for the object, then the result is based on
5630 typical alignment for an object of the specified size. Otherwise return
5631 NULL*/
5632
5633 static rtx
5634 expand_builtin_atomic_is_lock_free (tree exp)
5635 {
5636 tree size;
5637 tree arg0 = CALL_EXPR_ARG (exp, 0);
5638 tree arg1 = CALL_EXPR_ARG (exp, 1);
5639
5640 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5641 {
5642 error ("non-integer argument 1 to __atomic_is_lock_free");
5643 return NULL_RTX;
5644 }
5645
5646 if (!flag_inline_atomics)
5647 return NULL_RTX;
5648
5649 /* If the value is known at compile time, return the RTX for it. */
5650 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5651 if (size == boolean_true_node)
5652 return const1_rtx;
5653
5654 return NULL_RTX;
5655 }
5656
5657 /* Expand the __atomic_thread_fence intrinsic:
5658 void __atomic_thread_fence (enum memmodel)
5659 EXP is the CALL_EXPR. */
5660
5661 static void
5662 expand_builtin_atomic_thread_fence (tree exp)
5663 {
5664 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5665 expand_mem_thread_fence (model);
5666 }
5667
5668 /* Expand the __atomic_signal_fence intrinsic:
5669 void __atomic_signal_fence (enum memmodel)
5670 EXP is the CALL_EXPR. */
5671
5672 static void
5673 expand_builtin_atomic_signal_fence (tree exp)
5674 {
5675 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5676 expand_mem_signal_fence (model);
5677 }
5678
5679 /* Expand the __sync_synchronize intrinsic. */
5680
5681 static void
5682 expand_builtin_sync_synchronize (void)
5683 {
5684 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5685 }
5686
5687 static rtx
5688 expand_builtin_thread_pointer (tree exp, rtx target)
5689 {
5690 enum insn_code icode;
5691 if (!validate_arglist (exp, VOID_TYPE))
5692 return const0_rtx;
5693 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5694 if (icode != CODE_FOR_nothing)
5695 {
5696 struct expand_operand op;
5697 if (!REG_P (target) || GET_MODE (target) != Pmode)
5698 target = gen_reg_rtx (Pmode);
5699 create_output_operand (&op, target, Pmode);
5700 expand_insn (icode, 1, &op);
5701 return target;
5702 }
5703 error ("__builtin_thread_pointer is not supported on this target");
5704 return const0_rtx;
5705 }
5706
5707 static void
5708 expand_builtin_set_thread_pointer (tree exp)
5709 {
5710 enum insn_code icode;
5711 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5712 return;
5713 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5714 if (icode != CODE_FOR_nothing)
5715 {
5716 struct expand_operand op;
5717 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5718 Pmode, EXPAND_NORMAL);
5719 create_input_operand (&op, val, Pmode);
5720 expand_insn (icode, 1, &op);
5721 return;
5722 }
5723 error ("__builtin_set_thread_pointer is not supported on this target");
5724 }
5725
5726 \f
5727 /* Expand an expression EXP that calls a built-in function,
5728 with result going to TARGET if that's convenient
5729 (and in mode MODE if that's convenient).
5730 SUBTARGET may be used as the target for computing one of EXP's operands.
5731 IGNORE is nonzero if the value is to be ignored. */
5732
5733 rtx
5734 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5735 int ignore)
5736 {
5737 tree fndecl = get_callee_fndecl (exp);
5738 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5739 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5740 int flags;
5741
5742 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5743 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5744
5745 /* When not optimizing, generate calls to library functions for a certain
5746 set of builtins. */
5747 if (!optimize
5748 && !called_as_built_in (fndecl)
5749 && fcode != BUILT_IN_FORK
5750 && fcode != BUILT_IN_EXECL
5751 && fcode != BUILT_IN_EXECV
5752 && fcode != BUILT_IN_EXECLP
5753 && fcode != BUILT_IN_EXECLE
5754 && fcode != BUILT_IN_EXECVP
5755 && fcode != BUILT_IN_EXECVE
5756 && fcode != BUILT_IN_ALLOCA
5757 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5758 && fcode != BUILT_IN_FREE)
5759 return expand_call (exp, target, ignore);
5760
5761 /* The built-in function expanders test for target == const0_rtx
5762 to determine whether the function's result will be ignored. */
5763 if (ignore)
5764 target = const0_rtx;
5765
5766 /* If the result of a pure or const built-in function is ignored, and
5767 none of its arguments are volatile, we can avoid expanding the
5768 built-in call and just evaluate the arguments for side-effects. */
5769 if (target == const0_rtx
5770 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5771 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5772 {
5773 bool volatilep = false;
5774 tree arg;
5775 call_expr_arg_iterator iter;
5776
5777 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5778 if (TREE_THIS_VOLATILE (arg))
5779 {
5780 volatilep = true;
5781 break;
5782 }
5783
5784 if (! volatilep)
5785 {
5786 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5787 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5788 return const0_rtx;
5789 }
5790 }
5791
5792 switch (fcode)
5793 {
5794 CASE_FLT_FN (BUILT_IN_FABS):
5795 case BUILT_IN_FABSD32:
5796 case BUILT_IN_FABSD64:
5797 case BUILT_IN_FABSD128:
5798 target = expand_builtin_fabs (exp, target, subtarget);
5799 if (target)
5800 return target;
5801 break;
5802
5803 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5804 target = expand_builtin_copysign (exp, target, subtarget);
5805 if (target)
5806 return target;
5807 break;
5808
5809 /* Just do a normal library call if we were unable to fold
5810 the values. */
5811 CASE_FLT_FN (BUILT_IN_CABS):
5812 break;
5813
5814 CASE_FLT_FN (BUILT_IN_EXP):
5815 CASE_FLT_FN (BUILT_IN_EXP10):
5816 CASE_FLT_FN (BUILT_IN_POW10):
5817 CASE_FLT_FN (BUILT_IN_EXP2):
5818 CASE_FLT_FN (BUILT_IN_EXPM1):
5819 CASE_FLT_FN (BUILT_IN_LOGB):
5820 CASE_FLT_FN (BUILT_IN_LOG):
5821 CASE_FLT_FN (BUILT_IN_LOG10):
5822 CASE_FLT_FN (BUILT_IN_LOG2):
5823 CASE_FLT_FN (BUILT_IN_LOG1P):
5824 CASE_FLT_FN (BUILT_IN_TAN):
5825 CASE_FLT_FN (BUILT_IN_ASIN):
5826 CASE_FLT_FN (BUILT_IN_ACOS):
5827 CASE_FLT_FN (BUILT_IN_ATAN):
5828 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5829 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5830 because of possible accuracy problems. */
5831 if (! flag_unsafe_math_optimizations)
5832 break;
5833 CASE_FLT_FN (BUILT_IN_SQRT):
5834 CASE_FLT_FN (BUILT_IN_FLOOR):
5835 CASE_FLT_FN (BUILT_IN_CEIL):
5836 CASE_FLT_FN (BUILT_IN_TRUNC):
5837 CASE_FLT_FN (BUILT_IN_ROUND):
5838 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5839 CASE_FLT_FN (BUILT_IN_RINT):
5840 target = expand_builtin_mathfn (exp, target, subtarget);
5841 if (target)
5842 return target;
5843 break;
5844
5845 CASE_FLT_FN (BUILT_IN_FMA):
5846 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5847 if (target)
5848 return target;
5849 break;
5850
5851 CASE_FLT_FN (BUILT_IN_ILOGB):
5852 if (! flag_unsafe_math_optimizations)
5853 break;
5854 CASE_FLT_FN (BUILT_IN_ISINF):
5855 CASE_FLT_FN (BUILT_IN_FINITE):
5856 case BUILT_IN_ISFINITE:
5857 case BUILT_IN_ISNORMAL:
5858 target = expand_builtin_interclass_mathfn (exp, target);
5859 if (target)
5860 return target;
5861 break;
5862
5863 CASE_FLT_FN (BUILT_IN_ICEIL):
5864 CASE_FLT_FN (BUILT_IN_LCEIL):
5865 CASE_FLT_FN (BUILT_IN_LLCEIL):
5866 CASE_FLT_FN (BUILT_IN_LFLOOR):
5867 CASE_FLT_FN (BUILT_IN_IFLOOR):
5868 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5869 target = expand_builtin_int_roundingfn (exp, target);
5870 if (target)
5871 return target;
5872 break;
5873
5874 CASE_FLT_FN (BUILT_IN_IRINT):
5875 CASE_FLT_FN (BUILT_IN_LRINT):
5876 CASE_FLT_FN (BUILT_IN_LLRINT):
5877 CASE_FLT_FN (BUILT_IN_IROUND):
5878 CASE_FLT_FN (BUILT_IN_LROUND):
5879 CASE_FLT_FN (BUILT_IN_LLROUND):
5880 target = expand_builtin_int_roundingfn_2 (exp, target);
5881 if (target)
5882 return target;
5883 break;
5884
5885 CASE_FLT_FN (BUILT_IN_POWI):
5886 target = expand_builtin_powi (exp, target);
5887 if (target)
5888 return target;
5889 break;
5890
5891 CASE_FLT_FN (BUILT_IN_ATAN2):
5892 CASE_FLT_FN (BUILT_IN_LDEXP):
5893 CASE_FLT_FN (BUILT_IN_SCALB):
5894 CASE_FLT_FN (BUILT_IN_SCALBN):
5895 CASE_FLT_FN (BUILT_IN_SCALBLN):
5896 if (! flag_unsafe_math_optimizations)
5897 break;
5898
5899 CASE_FLT_FN (BUILT_IN_FMOD):
5900 CASE_FLT_FN (BUILT_IN_REMAINDER):
5901 CASE_FLT_FN (BUILT_IN_DREM):
5902 CASE_FLT_FN (BUILT_IN_POW):
5903 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5904 if (target)
5905 return target;
5906 break;
5907
5908 CASE_FLT_FN (BUILT_IN_CEXPI):
5909 target = expand_builtin_cexpi (exp, target);
5910 gcc_assert (target);
5911 return target;
5912
5913 CASE_FLT_FN (BUILT_IN_SIN):
5914 CASE_FLT_FN (BUILT_IN_COS):
5915 if (! flag_unsafe_math_optimizations)
5916 break;
5917 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5918 if (target)
5919 return target;
5920 break;
5921
5922 CASE_FLT_FN (BUILT_IN_SINCOS):
5923 if (! flag_unsafe_math_optimizations)
5924 break;
5925 target = expand_builtin_sincos (exp);
5926 if (target)
5927 return target;
5928 break;
5929
5930 case BUILT_IN_APPLY_ARGS:
5931 return expand_builtin_apply_args ();
5932
5933 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5934 FUNCTION with a copy of the parameters described by
5935 ARGUMENTS, and ARGSIZE. It returns a block of memory
5936 allocated on the stack into which is stored all the registers
5937 that might possibly be used for returning the result of a
5938 function. ARGUMENTS is the value returned by
5939 __builtin_apply_args. ARGSIZE is the number of bytes of
5940 arguments that must be copied. ??? How should this value be
5941 computed? We'll also need a safe worst case value for varargs
5942 functions. */
5943 case BUILT_IN_APPLY:
5944 if (!validate_arglist (exp, POINTER_TYPE,
5945 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5946 && !validate_arglist (exp, REFERENCE_TYPE,
5947 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5948 return const0_rtx;
5949 else
5950 {
5951 rtx ops[3];
5952
5953 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5954 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5955 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5956
5957 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5958 }
5959
5960 /* __builtin_return (RESULT) causes the function to return the
5961 value described by RESULT. RESULT is address of the block of
5962 memory returned by __builtin_apply. */
5963 case BUILT_IN_RETURN:
5964 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5965 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5966 return const0_rtx;
5967
5968 case BUILT_IN_SAVEREGS:
5969 return expand_builtin_saveregs ();
5970
5971 case BUILT_IN_VA_ARG_PACK:
5972 /* All valid uses of __builtin_va_arg_pack () are removed during
5973 inlining. */
5974 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5975 return const0_rtx;
5976
5977 case BUILT_IN_VA_ARG_PACK_LEN:
5978 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5979 inlining. */
5980 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5981 return const0_rtx;
5982
5983 /* Return the address of the first anonymous stack arg. */
5984 case BUILT_IN_NEXT_ARG:
5985 if (fold_builtin_next_arg (exp, false))
5986 return const0_rtx;
5987 return expand_builtin_next_arg ();
5988
5989 case BUILT_IN_CLEAR_CACHE:
5990 target = expand_builtin___clear_cache (exp);
5991 if (target)
5992 return target;
5993 break;
5994
5995 case BUILT_IN_CLASSIFY_TYPE:
5996 return expand_builtin_classify_type (exp);
5997
5998 case BUILT_IN_CONSTANT_P:
5999 return const0_rtx;
6000
6001 case BUILT_IN_FRAME_ADDRESS:
6002 case BUILT_IN_RETURN_ADDRESS:
6003 return expand_builtin_frame_address (fndecl, exp);
6004
6005 /* Returns the address of the area where the structure is returned.
6006 0 otherwise. */
6007 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6008 if (call_expr_nargs (exp) != 0
6009 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6010 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6011 return const0_rtx;
6012 else
6013 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6014
6015 case BUILT_IN_ALLOCA:
6016 case BUILT_IN_ALLOCA_WITH_ALIGN:
6017 /* If the allocation stems from the declaration of a variable-sized
6018 object, it cannot accumulate. */
6019 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6020 if (target)
6021 return target;
6022 break;
6023
6024 case BUILT_IN_STACK_SAVE:
6025 return expand_stack_save ();
6026
6027 case BUILT_IN_STACK_RESTORE:
6028 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6029 return const0_rtx;
6030
6031 case BUILT_IN_BSWAP16:
6032 case BUILT_IN_BSWAP32:
6033 case BUILT_IN_BSWAP64:
6034 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6035 if (target)
6036 return target;
6037 break;
6038
6039 CASE_INT_FN (BUILT_IN_FFS):
6040 target = expand_builtin_unop (target_mode, exp, target,
6041 subtarget, ffs_optab);
6042 if (target)
6043 return target;
6044 break;
6045
6046 CASE_INT_FN (BUILT_IN_CLZ):
6047 target = expand_builtin_unop (target_mode, exp, target,
6048 subtarget, clz_optab);
6049 if (target)
6050 return target;
6051 break;
6052
6053 CASE_INT_FN (BUILT_IN_CTZ):
6054 target = expand_builtin_unop (target_mode, exp, target,
6055 subtarget, ctz_optab);
6056 if (target)
6057 return target;
6058 break;
6059
6060 CASE_INT_FN (BUILT_IN_CLRSB):
6061 target = expand_builtin_unop (target_mode, exp, target,
6062 subtarget, clrsb_optab);
6063 if (target)
6064 return target;
6065 break;
6066
6067 CASE_INT_FN (BUILT_IN_POPCOUNT):
6068 target = expand_builtin_unop (target_mode, exp, target,
6069 subtarget, popcount_optab);
6070 if (target)
6071 return target;
6072 break;
6073
6074 CASE_INT_FN (BUILT_IN_PARITY):
6075 target = expand_builtin_unop (target_mode, exp, target,
6076 subtarget, parity_optab);
6077 if (target)
6078 return target;
6079 break;
6080
6081 case BUILT_IN_STRLEN:
6082 target = expand_builtin_strlen (exp, target, target_mode);
6083 if (target)
6084 return target;
6085 break;
6086
6087 case BUILT_IN_STRCPY:
6088 target = expand_builtin_strcpy (exp, target);
6089 if (target)
6090 return target;
6091 break;
6092
6093 case BUILT_IN_STRNCPY:
6094 target = expand_builtin_strncpy (exp, target);
6095 if (target)
6096 return target;
6097 break;
6098
6099 case BUILT_IN_STPCPY:
6100 target = expand_builtin_stpcpy (exp, target, mode);
6101 if (target)
6102 return target;
6103 break;
6104
6105 case BUILT_IN_MEMCPY:
6106 target = expand_builtin_memcpy (exp, target);
6107 if (target)
6108 return target;
6109 break;
6110
6111 case BUILT_IN_MEMPCPY:
6112 target = expand_builtin_mempcpy (exp, target, mode);
6113 if (target)
6114 return target;
6115 break;
6116
6117 case BUILT_IN_MEMSET:
6118 target = expand_builtin_memset (exp, target, mode);
6119 if (target)
6120 return target;
6121 break;
6122
6123 case BUILT_IN_BZERO:
6124 target = expand_builtin_bzero (exp);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_STRCMP:
6130 target = expand_builtin_strcmp (exp, target);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_STRNCMP:
6136 target = expand_builtin_strncmp (exp, target, mode);
6137 if (target)
6138 return target;
6139 break;
6140
6141 case BUILT_IN_BCMP:
6142 case BUILT_IN_MEMCMP:
6143 target = expand_builtin_memcmp (exp, target, mode);
6144 if (target)
6145 return target;
6146 break;
6147
6148 case BUILT_IN_SETJMP:
6149 /* This should have been lowered to the builtins below. */
6150 gcc_unreachable ();
6151
6152 case BUILT_IN_SETJMP_SETUP:
6153 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6154 and the receiver label. */
6155 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6156 {
6157 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6158 VOIDmode, EXPAND_NORMAL);
6159 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6160 rtx label_r = label_rtx (label);
6161
6162 /* This is copied from the handling of non-local gotos. */
6163 expand_builtin_setjmp_setup (buf_addr, label_r);
6164 nonlocal_goto_handler_labels
6165 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6166 nonlocal_goto_handler_labels);
6167 /* ??? Do not let expand_label treat us as such since we would
6168 not want to be both on the list of non-local labels and on
6169 the list of forced labels. */
6170 FORCED_LABEL (label) = 0;
6171 return const0_rtx;
6172 }
6173 break;
6174
6175 case BUILT_IN_SETJMP_DISPATCHER:
6176 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6177 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6178 {
6179 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6180 rtx label_r = label_rtx (label);
6181
6182 /* Remove the dispatcher label from the list of non-local labels
6183 since the receiver labels have been added to it above. */
6184 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6185 return const0_rtx;
6186 }
6187 break;
6188
6189 case BUILT_IN_SETJMP_RECEIVER:
6190 /* __builtin_setjmp_receiver is passed the receiver label. */
6191 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6192 {
6193 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6194 rtx label_r = label_rtx (label);
6195
6196 expand_builtin_setjmp_receiver (label_r);
6197 return const0_rtx;
6198 }
6199 break;
6200
6201 /* __builtin_longjmp is passed a pointer to an array of five words.
6202 It's similar to the C library longjmp function but works with
6203 __builtin_setjmp above. */
6204 case BUILT_IN_LONGJMP:
6205 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6206 {
6207 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6208 VOIDmode, EXPAND_NORMAL);
6209 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6210
6211 if (value != const1_rtx)
6212 {
6213 error ("%<__builtin_longjmp%> second argument must be 1");
6214 return const0_rtx;
6215 }
6216
6217 expand_builtin_longjmp (buf_addr, value);
6218 return const0_rtx;
6219 }
6220 break;
6221
6222 case BUILT_IN_NONLOCAL_GOTO:
6223 target = expand_builtin_nonlocal_goto (exp);
6224 if (target)
6225 return target;
6226 break;
6227
6228 /* This updates the setjmp buffer that is its argument with the value
6229 of the current stack pointer. */
6230 case BUILT_IN_UPDATE_SETJMP_BUF:
6231 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6232 {
6233 rtx buf_addr
6234 = expand_normal (CALL_EXPR_ARG (exp, 0));
6235
6236 expand_builtin_update_setjmp_buf (buf_addr);
6237 return const0_rtx;
6238 }
6239 break;
6240
6241 case BUILT_IN_TRAP:
6242 expand_builtin_trap ();
6243 return const0_rtx;
6244
6245 case BUILT_IN_UNREACHABLE:
6246 expand_builtin_unreachable ();
6247 return const0_rtx;
6248
6249 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6250 case BUILT_IN_SIGNBITD32:
6251 case BUILT_IN_SIGNBITD64:
6252 case BUILT_IN_SIGNBITD128:
6253 target = expand_builtin_signbit (exp, target);
6254 if (target)
6255 return target;
6256 break;
6257
6258 /* Various hooks for the DWARF 2 __throw routine. */
6259 case BUILT_IN_UNWIND_INIT:
6260 expand_builtin_unwind_init ();
6261 return const0_rtx;
6262 case BUILT_IN_DWARF_CFA:
6263 return virtual_cfa_rtx;
6264 #ifdef DWARF2_UNWIND_INFO
6265 case BUILT_IN_DWARF_SP_COLUMN:
6266 return expand_builtin_dwarf_sp_column ();
6267 case BUILT_IN_INIT_DWARF_REG_SIZES:
6268 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6269 return const0_rtx;
6270 #endif
6271 case BUILT_IN_FROB_RETURN_ADDR:
6272 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6273 case BUILT_IN_EXTRACT_RETURN_ADDR:
6274 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6275 case BUILT_IN_EH_RETURN:
6276 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6277 CALL_EXPR_ARG (exp, 1));
6278 return const0_rtx;
6279 #ifdef EH_RETURN_DATA_REGNO
6280 case BUILT_IN_EH_RETURN_DATA_REGNO:
6281 return expand_builtin_eh_return_data_regno (exp);
6282 #endif
6283 case BUILT_IN_EXTEND_POINTER:
6284 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6285 case BUILT_IN_EH_POINTER:
6286 return expand_builtin_eh_pointer (exp);
6287 case BUILT_IN_EH_FILTER:
6288 return expand_builtin_eh_filter (exp);
6289 case BUILT_IN_EH_COPY_VALUES:
6290 return expand_builtin_eh_copy_values (exp);
6291
6292 case BUILT_IN_VA_START:
6293 return expand_builtin_va_start (exp);
6294 case BUILT_IN_VA_END:
6295 return expand_builtin_va_end (exp);
6296 case BUILT_IN_VA_COPY:
6297 return expand_builtin_va_copy (exp);
6298 case BUILT_IN_EXPECT:
6299 return expand_builtin_expect (exp, target);
6300 case BUILT_IN_ASSUME_ALIGNED:
6301 return expand_builtin_assume_aligned (exp, target);
6302 case BUILT_IN_PREFETCH:
6303 expand_builtin_prefetch (exp);
6304 return const0_rtx;
6305
6306 case BUILT_IN_INIT_TRAMPOLINE:
6307 return expand_builtin_init_trampoline (exp, true);
6308 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6309 return expand_builtin_init_trampoline (exp, false);
6310 case BUILT_IN_ADJUST_TRAMPOLINE:
6311 return expand_builtin_adjust_trampoline (exp);
6312
6313 case BUILT_IN_FORK:
6314 case BUILT_IN_EXECL:
6315 case BUILT_IN_EXECV:
6316 case BUILT_IN_EXECLP:
6317 case BUILT_IN_EXECLE:
6318 case BUILT_IN_EXECVP:
6319 case BUILT_IN_EXECVE:
6320 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6326 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6327 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6328 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6329 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6330 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6331 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6332 if (target)
6333 return target;
6334 break;
6335
6336 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6337 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6338 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6339 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6340 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6341 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6342 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6343 if (target)
6344 return target;
6345 break;
6346
6347 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6348 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6349 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6350 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6351 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6352 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6353 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6354 if (target)
6355 return target;
6356 break;
6357
6358 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6359 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6360 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6361 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6362 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6364 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6370 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6371 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6372 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6373 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6375 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6381 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6382 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6383 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6384 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6385 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6386 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6392 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6393 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6394 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6395 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6396 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6397 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6398 if (target)
6399 return target;
6400 break;
6401
6402 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6403 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6404 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6405 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6406 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6407 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6408 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6409 if (target)
6410 return target;
6411 break;
6412
6413 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6414 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6415 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6416 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6417 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6419 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6425 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6426 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6427 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6428 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6430 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6431 if (target)
6432 return target;
6433 break;
6434
6435 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6436 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6437 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6438 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6439 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6441 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6447 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6448 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6449 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6450 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6452 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6453 if (target)
6454 return target;
6455 break;
6456
6457 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6458 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6459 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6460 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6461 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6462 if (mode == VOIDmode)
6463 mode = TYPE_MODE (boolean_type_node);
6464 if (!target || !register_operand (target, mode))
6465 target = gen_reg_rtx (mode);
6466
6467 mode = get_builtin_sync_mode
6468 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6469 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6470 if (target)
6471 return target;
6472 break;
6473
6474 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6475 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6476 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6477 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6478 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6479 mode = get_builtin_sync_mode
6480 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6481 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6482 if (target)
6483 return target;
6484 break;
6485
6486 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6487 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6488 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6489 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6490 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6492 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6493 if (target)
6494 return target;
6495 break;
6496
6497 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6498 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6499 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6500 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6501 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6503 expand_builtin_sync_lock_release (mode, exp);
6504 return const0_rtx;
6505
6506 case BUILT_IN_SYNC_SYNCHRONIZE:
6507 expand_builtin_sync_synchronize ();
6508 return const0_rtx;
6509
6510 case BUILT_IN_ATOMIC_EXCHANGE_1:
6511 case BUILT_IN_ATOMIC_EXCHANGE_2:
6512 case BUILT_IN_ATOMIC_EXCHANGE_4:
6513 case BUILT_IN_ATOMIC_EXCHANGE_8:
6514 case BUILT_IN_ATOMIC_EXCHANGE_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6516 target = expand_builtin_atomic_exchange (mode, exp, target);
6517 if (target)
6518 return target;
6519 break;
6520
6521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6523 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6524 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6525 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6526 {
6527 unsigned int nargs, z;
6528 vec<tree, va_gc> *vec;
6529
6530 mode =
6531 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6532 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6533 if (target)
6534 return target;
6535
6536 /* If this is turned into an external library call, the weak parameter
6537 must be dropped to match the expected parameter list. */
6538 nargs = call_expr_nargs (exp);
6539 vec_alloc (vec, nargs - 1);
6540 for (z = 0; z < 3; z++)
6541 vec->quick_push (CALL_EXPR_ARG (exp, z));
6542 /* Skip the boolean weak parameter. */
6543 for (z = 4; z < 6; z++)
6544 vec->quick_push (CALL_EXPR_ARG (exp, z));
6545 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6546 break;
6547 }
6548
6549 case BUILT_IN_ATOMIC_LOAD_1:
6550 case BUILT_IN_ATOMIC_LOAD_2:
6551 case BUILT_IN_ATOMIC_LOAD_4:
6552 case BUILT_IN_ATOMIC_LOAD_8:
6553 case BUILT_IN_ATOMIC_LOAD_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6555 target = expand_builtin_atomic_load (mode, exp, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_ATOMIC_STORE_1:
6561 case BUILT_IN_ATOMIC_STORE_2:
6562 case BUILT_IN_ATOMIC_STORE_4:
6563 case BUILT_IN_ATOMIC_STORE_8:
6564 case BUILT_IN_ATOMIC_STORE_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6566 target = expand_builtin_atomic_store (mode, exp);
6567 if (target)
6568 return const0_rtx;
6569 break;
6570
6571 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6572 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6573 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6574 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6575 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6576 {
6577 enum built_in_function lib;
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6579 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6580 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6581 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6582 ignore, lib);
6583 if (target)
6584 return target;
6585 break;
6586 }
6587 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6588 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6589 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6590 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6591 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6592 {
6593 enum built_in_function lib;
6594 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6595 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6596 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6597 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6598 ignore, lib);
6599 if (target)
6600 return target;
6601 break;
6602 }
6603 case BUILT_IN_ATOMIC_AND_FETCH_1:
6604 case BUILT_IN_ATOMIC_AND_FETCH_2:
6605 case BUILT_IN_ATOMIC_AND_FETCH_4:
6606 case BUILT_IN_ATOMIC_AND_FETCH_8:
6607 case BUILT_IN_ATOMIC_AND_FETCH_16:
6608 {
6609 enum built_in_function lib;
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6611 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6612 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6613 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6614 ignore, lib);
6615 if (target)
6616 return target;
6617 break;
6618 }
6619 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6620 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6621 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6622 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6623 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6624 {
6625 enum built_in_function lib;
6626 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6627 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6628 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6629 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6630 ignore, lib);
6631 if (target)
6632 return target;
6633 break;
6634 }
6635 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6636 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6637 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6638 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6639 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6640 {
6641 enum built_in_function lib;
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6643 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6644 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6645 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6646 ignore, lib);
6647 if (target)
6648 return target;
6649 break;
6650 }
6651 case BUILT_IN_ATOMIC_OR_FETCH_1:
6652 case BUILT_IN_ATOMIC_OR_FETCH_2:
6653 case BUILT_IN_ATOMIC_OR_FETCH_4:
6654 case BUILT_IN_ATOMIC_OR_FETCH_8:
6655 case BUILT_IN_ATOMIC_OR_FETCH_16:
6656 {
6657 enum built_in_function lib;
6658 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6659 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6660 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6661 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6662 ignore, lib);
6663 if (target)
6664 return target;
6665 break;
6666 }
6667 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6668 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6669 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6670 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6671 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6673 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6674 ignore, BUILT_IN_NONE);
6675 if (target)
6676 return target;
6677 break;
6678
6679 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6680 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6681 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6682 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6683 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6685 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6686 ignore, BUILT_IN_NONE);
6687 if (target)
6688 return target;
6689 break;
6690
6691 case BUILT_IN_ATOMIC_FETCH_AND_1:
6692 case BUILT_IN_ATOMIC_FETCH_AND_2:
6693 case BUILT_IN_ATOMIC_FETCH_AND_4:
6694 case BUILT_IN_ATOMIC_FETCH_AND_8:
6695 case BUILT_IN_ATOMIC_FETCH_AND_16:
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6698 ignore, BUILT_IN_NONE);
6699 if (target)
6700 return target;
6701 break;
6702
6703 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6704 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6705 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6706 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6707 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6709 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6710 ignore, BUILT_IN_NONE);
6711 if (target)
6712 return target;
6713 break;
6714
6715 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6716 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6717 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6718 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6719 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6721 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6722 ignore, BUILT_IN_NONE);
6723 if (target)
6724 return target;
6725 break;
6726
6727 case BUILT_IN_ATOMIC_FETCH_OR_1:
6728 case BUILT_IN_ATOMIC_FETCH_OR_2:
6729 case BUILT_IN_ATOMIC_FETCH_OR_4:
6730 case BUILT_IN_ATOMIC_FETCH_OR_8:
6731 case BUILT_IN_ATOMIC_FETCH_OR_16:
6732 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6733 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6734 ignore, BUILT_IN_NONE);
6735 if (target)
6736 return target;
6737 break;
6738
6739 case BUILT_IN_ATOMIC_TEST_AND_SET:
6740 return expand_builtin_atomic_test_and_set (exp, target);
6741
6742 case BUILT_IN_ATOMIC_CLEAR:
6743 return expand_builtin_atomic_clear (exp);
6744
6745 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6746 return expand_builtin_atomic_always_lock_free (exp);
6747
6748 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6749 target = expand_builtin_atomic_is_lock_free (exp);
6750 if (target)
6751 return target;
6752 break;
6753
6754 case BUILT_IN_ATOMIC_THREAD_FENCE:
6755 expand_builtin_atomic_thread_fence (exp);
6756 return const0_rtx;
6757
6758 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6759 expand_builtin_atomic_signal_fence (exp);
6760 return const0_rtx;
6761
6762 case BUILT_IN_OBJECT_SIZE:
6763 return expand_builtin_object_size (exp);
6764
6765 case BUILT_IN_MEMCPY_CHK:
6766 case BUILT_IN_MEMPCPY_CHK:
6767 case BUILT_IN_MEMMOVE_CHK:
6768 case BUILT_IN_MEMSET_CHK:
6769 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6770 if (target)
6771 return target;
6772 break;
6773
6774 case BUILT_IN_STRCPY_CHK:
6775 case BUILT_IN_STPCPY_CHK:
6776 case BUILT_IN_STRNCPY_CHK:
6777 case BUILT_IN_STPNCPY_CHK:
6778 case BUILT_IN_STRCAT_CHK:
6779 case BUILT_IN_STRNCAT_CHK:
6780 case BUILT_IN_SNPRINTF_CHK:
6781 case BUILT_IN_VSNPRINTF_CHK:
6782 maybe_emit_chk_warning (exp, fcode);
6783 break;
6784
6785 case BUILT_IN_SPRINTF_CHK:
6786 case BUILT_IN_VSPRINTF_CHK:
6787 maybe_emit_sprintf_chk_warning (exp, fcode);
6788 break;
6789
6790 case BUILT_IN_FREE:
6791 if (warn_free_nonheap_object)
6792 maybe_emit_free_warning (exp);
6793 break;
6794
6795 case BUILT_IN_THREAD_POINTER:
6796 return expand_builtin_thread_pointer (exp, target);
6797
6798 case BUILT_IN_SET_THREAD_POINTER:
6799 expand_builtin_set_thread_pointer (exp);
6800 return const0_rtx;
6801
6802 default: /* just do library call, if unknown builtin */
6803 break;
6804 }
6805
6806 /* The switch statement above can drop through to cause the function
6807 to be called normally. */
6808 return expand_call (exp, target, ignore);
6809 }
6810
6811 /* Determine whether a tree node represents a call to a built-in
6812 function. If the tree T is a call to a built-in function with
6813 the right number of arguments of the appropriate types, return
6814 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6815 Otherwise the return value is END_BUILTINS. */
6816
6817 enum built_in_function
6818 builtin_mathfn_code (const_tree t)
6819 {
6820 const_tree fndecl, arg, parmlist;
6821 const_tree argtype, parmtype;
6822 const_call_expr_arg_iterator iter;
6823
6824 if (TREE_CODE (t) != CALL_EXPR
6825 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6826 return END_BUILTINS;
6827
6828 fndecl = get_callee_fndecl (t);
6829 if (fndecl == NULL_TREE
6830 || TREE_CODE (fndecl) != FUNCTION_DECL
6831 || ! DECL_BUILT_IN (fndecl)
6832 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6833 return END_BUILTINS;
6834
6835 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6836 init_const_call_expr_arg_iterator (t, &iter);
6837 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6838 {
6839 /* If a function doesn't take a variable number of arguments,
6840 the last element in the list will have type `void'. */
6841 parmtype = TREE_VALUE (parmlist);
6842 if (VOID_TYPE_P (parmtype))
6843 {
6844 if (more_const_call_expr_args_p (&iter))
6845 return END_BUILTINS;
6846 return DECL_FUNCTION_CODE (fndecl);
6847 }
6848
6849 if (! more_const_call_expr_args_p (&iter))
6850 return END_BUILTINS;
6851
6852 arg = next_const_call_expr_arg (&iter);
6853 argtype = TREE_TYPE (arg);
6854
6855 if (SCALAR_FLOAT_TYPE_P (parmtype))
6856 {
6857 if (! SCALAR_FLOAT_TYPE_P (argtype))
6858 return END_BUILTINS;
6859 }
6860 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6861 {
6862 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6863 return END_BUILTINS;
6864 }
6865 else if (POINTER_TYPE_P (parmtype))
6866 {
6867 if (! POINTER_TYPE_P (argtype))
6868 return END_BUILTINS;
6869 }
6870 else if (INTEGRAL_TYPE_P (parmtype))
6871 {
6872 if (! INTEGRAL_TYPE_P (argtype))
6873 return END_BUILTINS;
6874 }
6875 else
6876 return END_BUILTINS;
6877 }
6878
6879 /* Variable-length argument list. */
6880 return DECL_FUNCTION_CODE (fndecl);
6881 }
6882
6883 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6884 evaluate to a constant. */
6885
6886 static tree
6887 fold_builtin_constant_p (tree arg)
6888 {
6889 /* We return 1 for a numeric type that's known to be a constant
6890 value at compile-time or for an aggregate type that's a
6891 literal constant. */
6892 STRIP_NOPS (arg);
6893
6894 /* If we know this is a constant, emit the constant of one. */
6895 if (CONSTANT_CLASS_P (arg)
6896 || (TREE_CODE (arg) == CONSTRUCTOR
6897 && TREE_CONSTANT (arg)))
6898 return integer_one_node;
6899 if (TREE_CODE (arg) == ADDR_EXPR)
6900 {
6901 tree op = TREE_OPERAND (arg, 0);
6902 if (TREE_CODE (op) == STRING_CST
6903 || (TREE_CODE (op) == ARRAY_REF
6904 && integer_zerop (TREE_OPERAND (op, 1))
6905 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6906 return integer_one_node;
6907 }
6908
6909 /* If this expression has side effects, show we don't know it to be a
6910 constant. Likewise if it's a pointer or aggregate type since in
6911 those case we only want literals, since those are only optimized
6912 when generating RTL, not later.
6913 And finally, if we are compiling an initializer, not code, we
6914 need to return a definite result now; there's not going to be any
6915 more optimization done. */
6916 if (TREE_SIDE_EFFECTS (arg)
6917 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6918 || POINTER_TYPE_P (TREE_TYPE (arg))
6919 || cfun == 0
6920 || folding_initializer
6921 || force_folding_builtin_constant_p)
6922 return integer_zero_node;
6923
6924 return NULL_TREE;
6925 }
6926
6927 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6928 return it as a truthvalue. */
6929
6930 static tree
6931 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6932 {
6933 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6934
6935 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6936 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6937 ret_type = TREE_TYPE (TREE_TYPE (fn));
6938 pred_type = TREE_VALUE (arg_types);
6939 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6940
6941 pred = fold_convert_loc (loc, pred_type, pred);
6942 expected = fold_convert_loc (loc, expected_type, expected);
6943 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6944
6945 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6946 build_int_cst (ret_type, 0));
6947 }
6948
6949 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6950 NULL_TREE if no simplification is possible. */
6951
6952 static tree
6953 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6954 {
6955 tree inner, fndecl, inner_arg0;
6956 enum tree_code code;
6957
6958 /* Distribute the expected value over short-circuiting operators.
6959 See through the cast from truthvalue_type_node to long. */
6960 inner_arg0 = arg0;
6961 while (TREE_CODE (inner_arg0) == NOP_EXPR
6962 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6963 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6964 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6965
6966 /* If this is a builtin_expect within a builtin_expect keep the
6967 inner one. See through a comparison against a constant. It
6968 might have been added to create a thruthvalue. */
6969 inner = inner_arg0;
6970
6971 if (COMPARISON_CLASS_P (inner)
6972 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6973 inner = TREE_OPERAND (inner, 0);
6974
6975 if (TREE_CODE (inner) == CALL_EXPR
6976 && (fndecl = get_callee_fndecl (inner))
6977 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6978 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6979 return arg0;
6980
6981 inner = inner_arg0;
6982 code = TREE_CODE (inner);
6983 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6984 {
6985 tree op0 = TREE_OPERAND (inner, 0);
6986 tree op1 = TREE_OPERAND (inner, 1);
6987
6988 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6989 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6990 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6991
6992 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6993 }
6994
6995 /* If the argument isn't invariant then there's nothing else we can do. */
6996 if (!TREE_CONSTANT (inner_arg0))
6997 return NULL_TREE;
6998
6999 /* If we expect that a comparison against the argument will fold to
7000 a constant return the constant. In practice, this means a true
7001 constant or the address of a non-weak symbol. */
7002 inner = inner_arg0;
7003 STRIP_NOPS (inner);
7004 if (TREE_CODE (inner) == ADDR_EXPR)
7005 {
7006 do
7007 {
7008 inner = TREE_OPERAND (inner, 0);
7009 }
7010 while (TREE_CODE (inner) == COMPONENT_REF
7011 || TREE_CODE (inner) == ARRAY_REF);
7012 if ((TREE_CODE (inner) == VAR_DECL
7013 || TREE_CODE (inner) == FUNCTION_DECL)
7014 && DECL_WEAK (inner))
7015 return NULL_TREE;
7016 }
7017
7018 /* Otherwise, ARG0 already has the proper type for the return value. */
7019 return arg0;
7020 }
7021
7022 /* Fold a call to __builtin_classify_type with argument ARG. */
7023
7024 static tree
7025 fold_builtin_classify_type (tree arg)
7026 {
7027 if (arg == 0)
7028 return build_int_cst (integer_type_node, no_type_class);
7029
7030 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7031 }
7032
7033 /* Fold a call to __builtin_strlen with argument ARG. */
7034
7035 static tree
7036 fold_builtin_strlen (location_t loc, tree type, tree arg)
7037 {
7038 if (!validate_arg (arg, POINTER_TYPE))
7039 return NULL_TREE;
7040 else
7041 {
7042 tree len = c_strlen (arg, 0);
7043
7044 if (len)
7045 return fold_convert_loc (loc, type, len);
7046
7047 return NULL_TREE;
7048 }
7049 }
7050
7051 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7052
7053 static tree
7054 fold_builtin_inf (location_t loc, tree type, int warn)
7055 {
7056 REAL_VALUE_TYPE real;
7057
7058 /* __builtin_inff is intended to be usable to define INFINITY on all
7059 targets. If an infinity is not available, INFINITY expands "to a
7060 positive constant of type float that overflows at translation
7061 time", footnote "In this case, using INFINITY will violate the
7062 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7063 Thus we pedwarn to ensure this constraint violation is
7064 diagnosed. */
7065 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7066 pedwarn (loc, 0, "target format does not support infinity");
7067
7068 real_inf (&real);
7069 return build_real (type, real);
7070 }
7071
7072 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7073
7074 static tree
7075 fold_builtin_nan (tree arg, tree type, int quiet)
7076 {
7077 REAL_VALUE_TYPE real;
7078 const char *str;
7079
7080 if (!validate_arg (arg, POINTER_TYPE))
7081 return NULL_TREE;
7082 str = c_getstr (arg);
7083 if (!str)
7084 return NULL_TREE;
7085
7086 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7087 return NULL_TREE;
7088
7089 return build_real (type, real);
7090 }
7091
7092 /* Return true if the floating point expression T has an integer value.
7093 We also allow +Inf, -Inf and NaN to be considered integer values. */
7094
7095 static bool
7096 integer_valued_real_p (tree t)
7097 {
7098 switch (TREE_CODE (t))
7099 {
7100 case FLOAT_EXPR:
7101 return true;
7102
7103 case ABS_EXPR:
7104 case SAVE_EXPR:
7105 return integer_valued_real_p (TREE_OPERAND (t, 0));
7106
7107 case COMPOUND_EXPR:
7108 case MODIFY_EXPR:
7109 case BIND_EXPR:
7110 return integer_valued_real_p (TREE_OPERAND (t, 1));
7111
7112 case PLUS_EXPR:
7113 case MINUS_EXPR:
7114 case MULT_EXPR:
7115 case MIN_EXPR:
7116 case MAX_EXPR:
7117 return integer_valued_real_p (TREE_OPERAND (t, 0))
7118 && integer_valued_real_p (TREE_OPERAND (t, 1));
7119
7120 case COND_EXPR:
7121 return integer_valued_real_p (TREE_OPERAND (t, 1))
7122 && integer_valued_real_p (TREE_OPERAND (t, 2));
7123
7124 case REAL_CST:
7125 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7126
7127 case NOP_EXPR:
7128 {
7129 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7130 if (TREE_CODE (type) == INTEGER_TYPE)
7131 return true;
7132 if (TREE_CODE (type) == REAL_TYPE)
7133 return integer_valued_real_p (TREE_OPERAND (t, 0));
7134 break;
7135 }
7136
7137 case CALL_EXPR:
7138 switch (builtin_mathfn_code (t))
7139 {
7140 CASE_FLT_FN (BUILT_IN_CEIL):
7141 CASE_FLT_FN (BUILT_IN_FLOOR):
7142 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7143 CASE_FLT_FN (BUILT_IN_RINT):
7144 CASE_FLT_FN (BUILT_IN_ROUND):
7145 CASE_FLT_FN (BUILT_IN_TRUNC):
7146 return true;
7147
7148 CASE_FLT_FN (BUILT_IN_FMIN):
7149 CASE_FLT_FN (BUILT_IN_FMAX):
7150 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7151 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7152
7153 default:
7154 break;
7155 }
7156 break;
7157
7158 default:
7159 break;
7160 }
7161 return false;
7162 }
7163
7164 /* FNDECL is assumed to be a builtin where truncation can be propagated
7165 across (for instance floor((double)f) == (double)floorf (f).
7166 Do the transformation for a call with argument ARG. */
7167
7168 static tree
7169 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7170 {
7171 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7172
7173 if (!validate_arg (arg, REAL_TYPE))
7174 return NULL_TREE;
7175
7176 /* Integer rounding functions are idempotent. */
7177 if (fcode == builtin_mathfn_code (arg))
7178 return arg;
7179
7180 /* If argument is already integer valued, and we don't need to worry
7181 about setting errno, there's no need to perform rounding. */
7182 if (! flag_errno_math && integer_valued_real_p (arg))
7183 return arg;
7184
7185 if (optimize)
7186 {
7187 tree arg0 = strip_float_extensions (arg);
7188 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7189 tree newtype = TREE_TYPE (arg0);
7190 tree decl;
7191
7192 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7193 && (decl = mathfn_built_in (newtype, fcode)))
7194 return fold_convert_loc (loc, ftype,
7195 build_call_expr_loc (loc, decl, 1,
7196 fold_convert_loc (loc,
7197 newtype,
7198 arg0)));
7199 }
7200 return NULL_TREE;
7201 }
7202
7203 /* FNDECL is assumed to be builtin which can narrow the FP type of
7204 the argument, for instance lround((double)f) -> lroundf (f).
7205 Do the transformation for a call with argument ARG. */
7206
7207 static tree
7208 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7209 {
7210 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7211
7212 if (!validate_arg (arg, REAL_TYPE))
7213 return NULL_TREE;
7214
7215 /* If argument is already integer valued, and we don't need to worry
7216 about setting errno, there's no need to perform rounding. */
7217 if (! flag_errno_math && integer_valued_real_p (arg))
7218 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7219 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7220
7221 if (optimize)
7222 {
7223 tree ftype = TREE_TYPE (arg);
7224 tree arg0 = strip_float_extensions (arg);
7225 tree newtype = TREE_TYPE (arg0);
7226 tree decl;
7227
7228 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7229 && (decl = mathfn_built_in (newtype, fcode)))
7230 return build_call_expr_loc (loc, decl, 1,
7231 fold_convert_loc (loc, newtype, arg0));
7232 }
7233
7234 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7235 sizeof (int) == sizeof (long). */
7236 if (TYPE_PRECISION (integer_type_node)
7237 == TYPE_PRECISION (long_integer_type_node))
7238 {
7239 tree newfn = NULL_TREE;
7240 switch (fcode)
7241 {
7242 CASE_FLT_FN (BUILT_IN_ICEIL):
7243 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7244 break;
7245
7246 CASE_FLT_FN (BUILT_IN_IFLOOR):
7247 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7248 break;
7249
7250 CASE_FLT_FN (BUILT_IN_IROUND):
7251 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7252 break;
7253
7254 CASE_FLT_FN (BUILT_IN_IRINT):
7255 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7256 break;
7257
7258 default:
7259 break;
7260 }
7261
7262 if (newfn)
7263 {
7264 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7265 return fold_convert_loc (loc,
7266 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7267 }
7268 }
7269
7270 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7271 sizeof (long long) == sizeof (long). */
7272 if (TYPE_PRECISION (long_long_integer_type_node)
7273 == TYPE_PRECISION (long_integer_type_node))
7274 {
7275 tree newfn = NULL_TREE;
7276 switch (fcode)
7277 {
7278 CASE_FLT_FN (BUILT_IN_LLCEIL):
7279 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7280 break;
7281
7282 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7283 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7284 break;
7285
7286 CASE_FLT_FN (BUILT_IN_LLROUND):
7287 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7288 break;
7289
7290 CASE_FLT_FN (BUILT_IN_LLRINT):
7291 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7292 break;
7293
7294 default:
7295 break;
7296 }
7297
7298 if (newfn)
7299 {
7300 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7301 return fold_convert_loc (loc,
7302 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7303 }
7304 }
7305
7306 return NULL_TREE;
7307 }
7308
7309 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7310 return type. Return NULL_TREE if no simplification can be made. */
7311
7312 static tree
7313 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7314 {
7315 tree res;
7316
7317 if (!validate_arg (arg, COMPLEX_TYPE)
7318 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7319 return NULL_TREE;
7320
7321 /* Calculate the result when the argument is a constant. */
7322 if (TREE_CODE (arg) == COMPLEX_CST
7323 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7324 type, mpfr_hypot)))
7325 return res;
7326
7327 if (TREE_CODE (arg) == COMPLEX_EXPR)
7328 {
7329 tree real = TREE_OPERAND (arg, 0);
7330 tree imag = TREE_OPERAND (arg, 1);
7331
7332 /* If either part is zero, cabs is fabs of the other. */
7333 if (real_zerop (real))
7334 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7335 if (real_zerop (imag))
7336 return fold_build1_loc (loc, ABS_EXPR, type, real);
7337
7338 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7339 if (flag_unsafe_math_optimizations
7340 && operand_equal_p (real, imag, OEP_PURE_SAME))
7341 {
7342 const REAL_VALUE_TYPE sqrt2_trunc
7343 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7344 STRIP_NOPS (real);
7345 return fold_build2_loc (loc, MULT_EXPR, type,
7346 fold_build1_loc (loc, ABS_EXPR, type, real),
7347 build_real (type, sqrt2_trunc));
7348 }
7349 }
7350
7351 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7352 if (TREE_CODE (arg) == NEGATE_EXPR
7353 || TREE_CODE (arg) == CONJ_EXPR)
7354 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7355
7356 /* Don't do this when optimizing for size. */
7357 if (flag_unsafe_math_optimizations
7358 && optimize && optimize_function_for_speed_p (cfun))
7359 {
7360 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7361
7362 if (sqrtfn != NULL_TREE)
7363 {
7364 tree rpart, ipart, result;
7365
7366 arg = builtin_save_expr (arg);
7367
7368 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7369 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7370
7371 rpart = builtin_save_expr (rpart);
7372 ipart = builtin_save_expr (ipart);
7373
7374 result = fold_build2_loc (loc, PLUS_EXPR, type,
7375 fold_build2_loc (loc, MULT_EXPR, type,
7376 rpart, rpart),
7377 fold_build2_loc (loc, MULT_EXPR, type,
7378 ipart, ipart));
7379
7380 return build_call_expr_loc (loc, sqrtfn, 1, result);
7381 }
7382 }
7383
7384 return NULL_TREE;
7385 }
7386
7387 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7388 complex tree type of the result. If NEG is true, the imaginary
7389 zero is negative. */
7390
7391 static tree
7392 build_complex_cproj (tree type, bool neg)
7393 {
7394 REAL_VALUE_TYPE rinf, rzero = dconst0;
7395
7396 real_inf (&rinf);
7397 rzero.sign = neg;
7398 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7399 build_real (TREE_TYPE (type), rzero));
7400 }
7401
7402 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7403 return type. Return NULL_TREE if no simplification can be made. */
7404
7405 static tree
7406 fold_builtin_cproj (location_t loc, tree arg, tree type)
7407 {
7408 if (!validate_arg (arg, COMPLEX_TYPE)
7409 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7410 return NULL_TREE;
7411
7412 /* If there are no infinities, return arg. */
7413 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7414 return non_lvalue_loc (loc, arg);
7415
7416 /* Calculate the result when the argument is a constant. */
7417 if (TREE_CODE (arg) == COMPLEX_CST)
7418 {
7419 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7420 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7421
7422 if (real_isinf (real) || real_isinf (imag))
7423 return build_complex_cproj (type, imag->sign);
7424 else
7425 return arg;
7426 }
7427 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7428 {
7429 tree real = TREE_OPERAND (arg, 0);
7430 tree imag = TREE_OPERAND (arg, 1);
7431
7432 STRIP_NOPS (real);
7433 STRIP_NOPS (imag);
7434
7435 /* If the real part is inf and the imag part is known to be
7436 nonnegative, return (inf + 0i). Remember side-effects are
7437 possible in the imag part. */
7438 if (TREE_CODE (real) == REAL_CST
7439 && real_isinf (TREE_REAL_CST_PTR (real))
7440 && tree_expr_nonnegative_p (imag))
7441 return omit_one_operand_loc (loc, type,
7442 build_complex_cproj (type, false),
7443 arg);
7444
7445 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7446 Remember side-effects are possible in the real part. */
7447 if (TREE_CODE (imag) == REAL_CST
7448 && real_isinf (TREE_REAL_CST_PTR (imag)))
7449 return
7450 omit_one_operand_loc (loc, type,
7451 build_complex_cproj (type, TREE_REAL_CST_PTR
7452 (imag)->sign), arg);
7453 }
7454
7455 return NULL_TREE;
7456 }
7457
7458 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7459 Return NULL_TREE if no simplification can be made. */
7460
7461 static tree
7462 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7463 {
7464
7465 enum built_in_function fcode;
7466 tree res;
7467
7468 if (!validate_arg (arg, REAL_TYPE))
7469 return NULL_TREE;
7470
7471 /* Calculate the result when the argument is a constant. */
7472 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7473 return res;
7474
7475 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7476 fcode = builtin_mathfn_code (arg);
7477 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7478 {
7479 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7480 arg = fold_build2_loc (loc, MULT_EXPR, type,
7481 CALL_EXPR_ARG (arg, 0),
7482 build_real (type, dconsthalf));
7483 return build_call_expr_loc (loc, expfn, 1, arg);
7484 }
7485
7486 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7487 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7488 {
7489 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7490
7491 if (powfn)
7492 {
7493 tree arg0 = CALL_EXPR_ARG (arg, 0);
7494 tree tree_root;
7495 /* The inner root was either sqrt or cbrt. */
7496 /* This was a conditional expression but it triggered a bug
7497 in Sun C 5.5. */
7498 REAL_VALUE_TYPE dconstroot;
7499 if (BUILTIN_SQRT_P (fcode))
7500 dconstroot = dconsthalf;
7501 else
7502 dconstroot = dconst_third ();
7503
7504 /* Adjust for the outer root. */
7505 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7506 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7507 tree_root = build_real (type, dconstroot);
7508 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7509 }
7510 }
7511
7512 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7513 if (flag_unsafe_math_optimizations
7514 && (fcode == BUILT_IN_POW
7515 || fcode == BUILT_IN_POWF
7516 || fcode == BUILT_IN_POWL))
7517 {
7518 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7519 tree arg0 = CALL_EXPR_ARG (arg, 0);
7520 tree arg1 = CALL_EXPR_ARG (arg, 1);
7521 tree narg1;
7522 if (!tree_expr_nonnegative_p (arg0))
7523 arg0 = build1 (ABS_EXPR, type, arg0);
7524 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7525 build_real (type, dconsthalf));
7526 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7527 }
7528
7529 return NULL_TREE;
7530 }
7531
7532 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7533 Return NULL_TREE if no simplification can be made. */
7534
7535 static tree
7536 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7537 {
7538 const enum built_in_function fcode = builtin_mathfn_code (arg);
7539 tree res;
7540
7541 if (!validate_arg (arg, REAL_TYPE))
7542 return NULL_TREE;
7543
7544 /* Calculate the result when the argument is a constant. */
7545 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7546 return res;
7547
7548 if (flag_unsafe_math_optimizations)
7549 {
7550 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7551 if (BUILTIN_EXPONENT_P (fcode))
7552 {
7553 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7554 const REAL_VALUE_TYPE third_trunc =
7555 real_value_truncate (TYPE_MODE (type), dconst_third ());
7556 arg = fold_build2_loc (loc, MULT_EXPR, type,
7557 CALL_EXPR_ARG (arg, 0),
7558 build_real (type, third_trunc));
7559 return build_call_expr_loc (loc, expfn, 1, arg);
7560 }
7561
7562 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7563 if (BUILTIN_SQRT_P (fcode))
7564 {
7565 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7566
7567 if (powfn)
7568 {
7569 tree arg0 = CALL_EXPR_ARG (arg, 0);
7570 tree tree_root;
7571 REAL_VALUE_TYPE dconstroot = dconst_third ();
7572
7573 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7574 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7575 tree_root = build_real (type, dconstroot);
7576 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7577 }
7578 }
7579
7580 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7581 if (BUILTIN_CBRT_P (fcode))
7582 {
7583 tree arg0 = CALL_EXPR_ARG (arg, 0);
7584 if (tree_expr_nonnegative_p (arg0))
7585 {
7586 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7587
7588 if (powfn)
7589 {
7590 tree tree_root;
7591 REAL_VALUE_TYPE dconstroot;
7592
7593 real_arithmetic (&dconstroot, MULT_EXPR,
7594 dconst_third_ptr (), dconst_third_ptr ());
7595 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7596 tree_root = build_real (type, dconstroot);
7597 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7598 }
7599 }
7600 }
7601
7602 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7603 if (fcode == BUILT_IN_POW
7604 || fcode == BUILT_IN_POWF
7605 || fcode == BUILT_IN_POWL)
7606 {
7607 tree arg00 = CALL_EXPR_ARG (arg, 0);
7608 tree arg01 = CALL_EXPR_ARG (arg, 1);
7609 if (tree_expr_nonnegative_p (arg00))
7610 {
7611 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7612 const REAL_VALUE_TYPE dconstroot
7613 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7614 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7615 build_real (type, dconstroot));
7616 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7617 }
7618 }
7619 }
7620 return NULL_TREE;
7621 }
7622
7623 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7624 TYPE is the type of the return value. Return NULL_TREE if no
7625 simplification can be made. */
7626
7627 static tree
7628 fold_builtin_cos (location_t loc,
7629 tree arg, tree type, tree fndecl)
7630 {
7631 tree res, narg;
7632
7633 if (!validate_arg (arg, REAL_TYPE))
7634 return NULL_TREE;
7635
7636 /* Calculate the result when the argument is a constant. */
7637 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7638 return res;
7639
7640 /* Optimize cos(-x) into cos (x). */
7641 if ((narg = fold_strip_sign_ops (arg)))
7642 return build_call_expr_loc (loc, fndecl, 1, narg);
7643
7644 return NULL_TREE;
7645 }
7646
7647 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7648 Return NULL_TREE if no simplification can be made. */
7649
7650 static tree
7651 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7652 {
7653 if (validate_arg (arg, REAL_TYPE))
7654 {
7655 tree res, narg;
7656
7657 /* Calculate the result when the argument is a constant. */
7658 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7659 return res;
7660
7661 /* Optimize cosh(-x) into cosh (x). */
7662 if ((narg = fold_strip_sign_ops (arg)))
7663 return build_call_expr_loc (loc, fndecl, 1, narg);
7664 }
7665
7666 return NULL_TREE;
7667 }
7668
7669 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7670 argument ARG. TYPE is the type of the return value. Return
7671 NULL_TREE if no simplification can be made. */
7672
7673 static tree
7674 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7675 bool hyper)
7676 {
7677 if (validate_arg (arg, COMPLEX_TYPE)
7678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7679 {
7680 tree tmp;
7681
7682 /* Calculate the result when the argument is a constant. */
7683 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7684 return tmp;
7685
7686 /* Optimize fn(-x) into fn(x). */
7687 if ((tmp = fold_strip_sign_ops (arg)))
7688 return build_call_expr_loc (loc, fndecl, 1, tmp);
7689 }
7690
7691 return NULL_TREE;
7692 }
7693
7694 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7695 Return NULL_TREE if no simplification can be made. */
7696
7697 static tree
7698 fold_builtin_tan (tree arg, tree type)
7699 {
7700 enum built_in_function fcode;
7701 tree res;
7702
7703 if (!validate_arg (arg, REAL_TYPE))
7704 return NULL_TREE;
7705
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7708 return res;
7709
7710 /* Optimize tan(atan(x)) = x. */
7711 fcode = builtin_mathfn_code (arg);
7712 if (flag_unsafe_math_optimizations
7713 && (fcode == BUILT_IN_ATAN
7714 || fcode == BUILT_IN_ATANF
7715 || fcode == BUILT_IN_ATANL))
7716 return CALL_EXPR_ARG (arg, 0);
7717
7718 return NULL_TREE;
7719 }
7720
7721 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7722 NULL_TREE if no simplification can be made. */
7723
7724 static tree
7725 fold_builtin_sincos (location_t loc,
7726 tree arg0, tree arg1, tree arg2)
7727 {
7728 tree type;
7729 tree res, fn, call;
7730
7731 if (!validate_arg (arg0, REAL_TYPE)
7732 || !validate_arg (arg1, POINTER_TYPE)
7733 || !validate_arg (arg2, POINTER_TYPE))
7734 return NULL_TREE;
7735
7736 type = TREE_TYPE (arg0);
7737
7738 /* Calculate the result when the argument is a constant. */
7739 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7740 return res;
7741
7742 /* Canonicalize sincos to cexpi. */
7743 if (!targetm.libc_has_function (function_c99_math_complex))
7744 return NULL_TREE;
7745 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7746 if (!fn)
7747 return NULL_TREE;
7748
7749 call = build_call_expr_loc (loc, fn, 1, arg0);
7750 call = builtin_save_expr (call);
7751
7752 return build2 (COMPOUND_EXPR, void_type_node,
7753 build2 (MODIFY_EXPR, void_type_node,
7754 build_fold_indirect_ref_loc (loc, arg1),
7755 build1 (IMAGPART_EXPR, type, call)),
7756 build2 (MODIFY_EXPR, void_type_node,
7757 build_fold_indirect_ref_loc (loc, arg2),
7758 build1 (REALPART_EXPR, type, call)));
7759 }
7760
7761 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7762 NULL_TREE if no simplification can be made. */
7763
7764 static tree
7765 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7766 {
7767 tree rtype;
7768 tree realp, imagp, ifn;
7769 tree res;
7770
7771 if (!validate_arg (arg0, COMPLEX_TYPE)
7772 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7773 return NULL_TREE;
7774
7775 /* Calculate the result when the argument is a constant. */
7776 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7777 return res;
7778
7779 rtype = TREE_TYPE (TREE_TYPE (arg0));
7780
7781 /* In case we can figure out the real part of arg0 and it is constant zero
7782 fold to cexpi. */
7783 if (!targetm.libc_has_function (function_c99_math_complex))
7784 return NULL_TREE;
7785 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7786 if (!ifn)
7787 return NULL_TREE;
7788
7789 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7790 && real_zerop (realp))
7791 {
7792 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7793 return build_call_expr_loc (loc, ifn, 1, narg);
7794 }
7795
7796 /* In case we can easily decompose real and imaginary parts split cexp
7797 to exp (r) * cexpi (i). */
7798 if (flag_unsafe_math_optimizations
7799 && realp)
7800 {
7801 tree rfn, rcall, icall;
7802
7803 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7804 if (!rfn)
7805 return NULL_TREE;
7806
7807 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7808 if (!imagp)
7809 return NULL_TREE;
7810
7811 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7812 icall = builtin_save_expr (icall);
7813 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7814 rcall = builtin_save_expr (rcall);
7815 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7816 fold_build2_loc (loc, MULT_EXPR, rtype,
7817 rcall,
7818 fold_build1_loc (loc, REALPART_EXPR,
7819 rtype, icall)),
7820 fold_build2_loc (loc, MULT_EXPR, rtype,
7821 rcall,
7822 fold_build1_loc (loc, IMAGPART_EXPR,
7823 rtype, icall)));
7824 }
7825
7826 return NULL_TREE;
7827 }
7828
7829 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7830 Return NULL_TREE if no simplification can be made. */
7831
7832 static tree
7833 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7834 {
7835 if (!validate_arg (arg, REAL_TYPE))
7836 return NULL_TREE;
7837
7838 /* Optimize trunc of constant value. */
7839 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7840 {
7841 REAL_VALUE_TYPE r, x;
7842 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7843
7844 x = TREE_REAL_CST (arg);
7845 real_trunc (&r, TYPE_MODE (type), &x);
7846 return build_real (type, r);
7847 }
7848
7849 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7850 }
7851
7852 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7853 Return NULL_TREE if no simplification can be made. */
7854
7855 static tree
7856 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7857 {
7858 if (!validate_arg (arg, REAL_TYPE))
7859 return NULL_TREE;
7860
7861 /* Optimize floor of constant value. */
7862 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7863 {
7864 REAL_VALUE_TYPE x;
7865
7866 x = TREE_REAL_CST (arg);
7867 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7868 {
7869 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7870 REAL_VALUE_TYPE r;
7871
7872 real_floor (&r, TYPE_MODE (type), &x);
7873 return build_real (type, r);
7874 }
7875 }
7876
7877 /* Fold floor (x) where x is nonnegative to trunc (x). */
7878 if (tree_expr_nonnegative_p (arg))
7879 {
7880 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7881 if (truncfn)
7882 return build_call_expr_loc (loc, truncfn, 1, arg);
7883 }
7884
7885 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7886 }
7887
7888 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7889 Return NULL_TREE if no simplification can be made. */
7890
7891 static tree
7892 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7893 {
7894 if (!validate_arg (arg, REAL_TYPE))
7895 return NULL_TREE;
7896
7897 /* Optimize ceil of constant value. */
7898 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7899 {
7900 REAL_VALUE_TYPE x;
7901
7902 x = TREE_REAL_CST (arg);
7903 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7904 {
7905 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7906 REAL_VALUE_TYPE r;
7907
7908 real_ceil (&r, TYPE_MODE (type), &x);
7909 return build_real (type, r);
7910 }
7911 }
7912
7913 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7914 }
7915
7916 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7917 Return NULL_TREE if no simplification can be made. */
7918
7919 static tree
7920 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7921 {
7922 if (!validate_arg (arg, REAL_TYPE))
7923 return NULL_TREE;
7924
7925 /* Optimize round of constant value. */
7926 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7927 {
7928 REAL_VALUE_TYPE x;
7929
7930 x = TREE_REAL_CST (arg);
7931 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7932 {
7933 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7934 REAL_VALUE_TYPE r;
7935
7936 real_round (&r, TYPE_MODE (type), &x);
7937 return build_real (type, r);
7938 }
7939 }
7940
7941 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7942 }
7943
7944 /* Fold function call to builtin lround, lroundf or lroundl (or the
7945 corresponding long long versions) and other rounding functions. ARG
7946 is the argument to the call. Return NULL_TREE if no simplification
7947 can be made. */
7948
7949 static tree
7950 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7951 {
7952 if (!validate_arg (arg, REAL_TYPE))
7953 return NULL_TREE;
7954
7955 /* Optimize lround of constant value. */
7956 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7957 {
7958 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7959
7960 if (real_isfinite (&x))
7961 {
7962 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7963 tree ftype = TREE_TYPE (arg);
7964 wide_int val;
7965 REAL_VALUE_TYPE r;
7966 bool fail = false;
7967
7968 switch (DECL_FUNCTION_CODE (fndecl))
7969 {
7970 CASE_FLT_FN (BUILT_IN_IFLOOR):
7971 CASE_FLT_FN (BUILT_IN_LFLOOR):
7972 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7973 real_floor (&r, TYPE_MODE (ftype), &x);
7974 break;
7975
7976 CASE_FLT_FN (BUILT_IN_ICEIL):
7977 CASE_FLT_FN (BUILT_IN_LCEIL):
7978 CASE_FLT_FN (BUILT_IN_LLCEIL):
7979 real_ceil (&r, TYPE_MODE (ftype), &x);
7980 break;
7981
7982 CASE_FLT_FN (BUILT_IN_IROUND):
7983 CASE_FLT_FN (BUILT_IN_LROUND):
7984 CASE_FLT_FN (BUILT_IN_LLROUND):
7985 real_round (&r, TYPE_MODE (ftype), &x);
7986 break;
7987
7988 default:
7989 gcc_unreachable ();
7990 }
7991
7992 val = real_to_integer (&r, &fail,
7993 TYPE_PRECISION (itype));
7994 if (!fail)
7995 return wide_int_to_tree (itype, val);
7996 }
7997 }
7998
7999 switch (DECL_FUNCTION_CODE (fndecl))
8000 {
8001 CASE_FLT_FN (BUILT_IN_LFLOOR):
8002 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8003 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8004 if (tree_expr_nonnegative_p (arg))
8005 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8006 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8007 break;
8008 default:;
8009 }
8010
8011 return fold_fixed_mathfn (loc, fndecl, arg);
8012 }
8013
8014 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8015 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8016 the argument to the call. Return NULL_TREE if no simplification can
8017 be made. */
8018
8019 static tree
8020 fold_builtin_bitop (tree fndecl, tree arg)
8021 {
8022 if (!validate_arg (arg, INTEGER_TYPE))
8023 return NULL_TREE;
8024
8025 /* Optimize for constant argument. */
8026 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8027 {
8028 wide_int warg = arg;
8029 int result;
8030
8031 switch (DECL_FUNCTION_CODE (fndecl))
8032 {
8033 CASE_INT_FN (BUILT_IN_FFS):
8034 result = wi::ffs (warg);
8035 break;
8036
8037 CASE_INT_FN (BUILT_IN_CLZ):
8038 result = wi::clz (warg);
8039 break;
8040
8041 CASE_INT_FN (BUILT_IN_CTZ):
8042 result = wi::ctz (warg);
8043 break;
8044
8045 CASE_INT_FN (BUILT_IN_CLRSB):
8046 result = wi::clrsb (warg);
8047 break;
8048
8049 CASE_INT_FN (BUILT_IN_POPCOUNT):
8050 result = wi::popcount (warg);
8051 break;
8052
8053 CASE_INT_FN (BUILT_IN_PARITY):
8054 result = wi::parity (warg);
8055 break;
8056
8057 default:
8058 gcc_unreachable ();
8059 }
8060
8061 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8062 }
8063
8064 return NULL_TREE;
8065 }
8066
8067 /* Fold function call to builtin_bswap and the short, long and long long
8068 variants. Return NULL_TREE if no simplification can be made. */
8069 static tree
8070 fold_builtin_bswap (tree fndecl, tree arg)
8071 {
8072 if (! validate_arg (arg, INTEGER_TYPE))
8073 return NULL_TREE;
8074
8075 /* Optimize constant value. */
8076 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8077 {
8078 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8079
8080 switch (DECL_FUNCTION_CODE (fndecl))
8081 {
8082 case BUILT_IN_BSWAP16:
8083 case BUILT_IN_BSWAP32:
8084 case BUILT_IN_BSWAP64:
8085 {
8086 signop sgn = TYPE_SIGN (type);
8087 tree result =
8088 wide_int_to_tree (type,
8089 wide_int::from (arg, TYPE_PRECISION (type),
8090 sgn).bswap ());
8091 return result;
8092 }
8093 default:
8094 gcc_unreachable ();
8095 }
8096 }
8097
8098 return NULL_TREE;
8099 }
8100
8101 /* A subroutine of fold_builtin to fold the various logarithmic
8102 functions. Return NULL_TREE if no simplification can me made.
8103 FUNC is the corresponding MPFR logarithm function. */
8104
8105 static tree
8106 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8107 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8108 {
8109 if (validate_arg (arg, REAL_TYPE))
8110 {
8111 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8112 tree res;
8113 const enum built_in_function fcode = builtin_mathfn_code (arg);
8114
8115 /* Calculate the result when the argument is a constant. */
8116 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8117 return res;
8118
8119 /* Special case, optimize logN(expN(x)) = x. */
8120 if (flag_unsafe_math_optimizations
8121 && ((func == mpfr_log
8122 && (fcode == BUILT_IN_EXP
8123 || fcode == BUILT_IN_EXPF
8124 || fcode == BUILT_IN_EXPL))
8125 || (func == mpfr_log2
8126 && (fcode == BUILT_IN_EXP2
8127 || fcode == BUILT_IN_EXP2F
8128 || fcode == BUILT_IN_EXP2L))
8129 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8130 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8131
8132 /* Optimize logN(func()) for various exponential functions. We
8133 want to determine the value "x" and the power "exponent" in
8134 order to transform logN(x**exponent) into exponent*logN(x). */
8135 if (flag_unsafe_math_optimizations)
8136 {
8137 tree exponent = 0, x = 0;
8138
8139 switch (fcode)
8140 {
8141 CASE_FLT_FN (BUILT_IN_EXP):
8142 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8143 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8144 dconst_e ()));
8145 exponent = CALL_EXPR_ARG (arg, 0);
8146 break;
8147 CASE_FLT_FN (BUILT_IN_EXP2):
8148 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8149 x = build_real (type, dconst2);
8150 exponent = CALL_EXPR_ARG (arg, 0);
8151 break;
8152 CASE_FLT_FN (BUILT_IN_EXP10):
8153 CASE_FLT_FN (BUILT_IN_POW10):
8154 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8155 {
8156 REAL_VALUE_TYPE dconst10;
8157 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8158 x = build_real (type, dconst10);
8159 }
8160 exponent = CALL_EXPR_ARG (arg, 0);
8161 break;
8162 CASE_FLT_FN (BUILT_IN_SQRT):
8163 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8164 x = CALL_EXPR_ARG (arg, 0);
8165 exponent = build_real (type, dconsthalf);
8166 break;
8167 CASE_FLT_FN (BUILT_IN_CBRT):
8168 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8169 x = CALL_EXPR_ARG (arg, 0);
8170 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8171 dconst_third ()));
8172 break;
8173 CASE_FLT_FN (BUILT_IN_POW):
8174 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8175 x = CALL_EXPR_ARG (arg, 0);
8176 exponent = CALL_EXPR_ARG (arg, 1);
8177 break;
8178 default:
8179 break;
8180 }
8181
8182 /* Now perform the optimization. */
8183 if (x && exponent)
8184 {
8185 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8186 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8187 }
8188 }
8189 }
8190
8191 return NULL_TREE;
8192 }
8193
8194 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8195 NULL_TREE if no simplification can be made. */
8196
8197 static tree
8198 fold_builtin_hypot (location_t loc, tree fndecl,
8199 tree arg0, tree arg1, tree type)
8200 {
8201 tree res, narg0, narg1;
8202
8203 if (!validate_arg (arg0, REAL_TYPE)
8204 || !validate_arg (arg1, REAL_TYPE))
8205 return NULL_TREE;
8206
8207 /* Calculate the result when the argument is a constant. */
8208 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8209 return res;
8210
8211 /* If either argument to hypot has a negate or abs, strip that off.
8212 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8213 narg0 = fold_strip_sign_ops (arg0);
8214 narg1 = fold_strip_sign_ops (arg1);
8215 if (narg0 || narg1)
8216 {
8217 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8218 narg1 ? narg1 : arg1);
8219 }
8220
8221 /* If either argument is zero, hypot is fabs of the other. */
8222 if (real_zerop (arg0))
8223 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8224 else if (real_zerop (arg1))
8225 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8226
8227 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8228 if (flag_unsafe_math_optimizations
8229 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8230 {
8231 const REAL_VALUE_TYPE sqrt2_trunc
8232 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8233 return fold_build2_loc (loc, MULT_EXPR, type,
8234 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8235 build_real (type, sqrt2_trunc));
8236 }
8237
8238 return NULL_TREE;
8239 }
8240
8241
8242 /* Fold a builtin function call to pow, powf, or powl. Return
8243 NULL_TREE if no simplification can be made. */
8244 static tree
8245 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8246 {
8247 tree res;
8248
8249 if (!validate_arg (arg0, REAL_TYPE)
8250 || !validate_arg (arg1, REAL_TYPE))
8251 return NULL_TREE;
8252
8253 /* Calculate the result when the argument is a constant. */
8254 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8255 return res;
8256
8257 /* Optimize pow(1.0,y) = 1.0. */
8258 if (real_onep (arg0))
8259 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8260
8261 if (TREE_CODE (arg1) == REAL_CST
8262 && !TREE_OVERFLOW (arg1))
8263 {
8264 REAL_VALUE_TYPE cint;
8265 REAL_VALUE_TYPE c;
8266 HOST_WIDE_INT n;
8267
8268 c = TREE_REAL_CST (arg1);
8269
8270 /* Optimize pow(x,0.0) = 1.0. */
8271 if (REAL_VALUES_EQUAL (c, dconst0))
8272 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8273 arg0);
8274
8275 /* Optimize pow(x,1.0) = x. */
8276 if (REAL_VALUES_EQUAL (c, dconst1))
8277 return arg0;
8278
8279 /* Optimize pow(x,-1.0) = 1.0/x. */
8280 if (REAL_VALUES_EQUAL (c, dconstm1))
8281 return fold_build2_loc (loc, RDIV_EXPR, type,
8282 build_real (type, dconst1), arg0);
8283
8284 /* Optimize pow(x,0.5) = sqrt(x). */
8285 if (flag_unsafe_math_optimizations
8286 && REAL_VALUES_EQUAL (c, dconsthalf))
8287 {
8288 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8289
8290 if (sqrtfn != NULL_TREE)
8291 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8292 }
8293
8294 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8295 if (flag_unsafe_math_optimizations)
8296 {
8297 const REAL_VALUE_TYPE dconstroot
8298 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8299
8300 if (REAL_VALUES_EQUAL (c, dconstroot))
8301 {
8302 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8303 if (cbrtfn != NULL_TREE)
8304 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8305 }
8306 }
8307
8308 /* Check for an integer exponent. */
8309 n = real_to_integer (&c);
8310 real_from_integer (&cint, VOIDmode, n, SIGNED);
8311 if (real_identical (&c, &cint))
8312 {
8313 /* Attempt to evaluate pow at compile-time, unless this should
8314 raise an exception. */
8315 if (TREE_CODE (arg0) == REAL_CST
8316 && !TREE_OVERFLOW (arg0)
8317 && (n > 0
8318 || (!flag_trapping_math && !flag_errno_math)
8319 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8320 {
8321 REAL_VALUE_TYPE x;
8322 bool inexact;
8323
8324 x = TREE_REAL_CST (arg0);
8325 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8326 if (flag_unsafe_math_optimizations || !inexact)
8327 return build_real (type, x);
8328 }
8329
8330 /* Strip sign ops from even integer powers. */
8331 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8332 {
8333 tree narg0 = fold_strip_sign_ops (arg0);
8334 if (narg0)
8335 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8336 }
8337 }
8338 }
8339
8340 if (flag_unsafe_math_optimizations)
8341 {
8342 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8343
8344 /* Optimize pow(expN(x),y) = expN(x*y). */
8345 if (BUILTIN_EXPONENT_P (fcode))
8346 {
8347 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8348 tree arg = CALL_EXPR_ARG (arg0, 0);
8349 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8350 return build_call_expr_loc (loc, expfn, 1, arg);
8351 }
8352
8353 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8354 if (BUILTIN_SQRT_P (fcode))
8355 {
8356 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8357 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8358 build_real (type, dconsthalf));
8359 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8360 }
8361
8362 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8363 if (BUILTIN_CBRT_P (fcode))
8364 {
8365 tree arg = CALL_EXPR_ARG (arg0, 0);
8366 if (tree_expr_nonnegative_p (arg))
8367 {
8368 const REAL_VALUE_TYPE dconstroot
8369 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8370 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8371 build_real (type, dconstroot));
8372 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8373 }
8374 }
8375
8376 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8377 if (fcode == BUILT_IN_POW
8378 || fcode == BUILT_IN_POWF
8379 || fcode == BUILT_IN_POWL)
8380 {
8381 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8382 if (tree_expr_nonnegative_p (arg00))
8383 {
8384 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8385 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8386 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8387 }
8388 }
8389 }
8390
8391 return NULL_TREE;
8392 }
8393
8394 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8395 Return NULL_TREE if no simplification can be made. */
8396 static tree
8397 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8398 tree arg0, tree arg1, tree type)
8399 {
8400 if (!validate_arg (arg0, REAL_TYPE)
8401 || !validate_arg (arg1, INTEGER_TYPE))
8402 return NULL_TREE;
8403
8404 /* Optimize pow(1.0,y) = 1.0. */
8405 if (real_onep (arg0))
8406 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8407
8408 if (tree_fits_shwi_p (arg1))
8409 {
8410 HOST_WIDE_INT c = tree_to_shwi (arg1);
8411
8412 /* Evaluate powi at compile-time. */
8413 if (TREE_CODE (arg0) == REAL_CST
8414 && !TREE_OVERFLOW (arg0))
8415 {
8416 REAL_VALUE_TYPE x;
8417 x = TREE_REAL_CST (arg0);
8418 real_powi (&x, TYPE_MODE (type), &x, c);
8419 return build_real (type, x);
8420 }
8421
8422 /* Optimize pow(x,0) = 1.0. */
8423 if (c == 0)
8424 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8425 arg0);
8426
8427 /* Optimize pow(x,1) = x. */
8428 if (c == 1)
8429 return arg0;
8430
8431 /* Optimize pow(x,-1) = 1.0/x. */
8432 if (c == -1)
8433 return fold_build2_loc (loc, RDIV_EXPR, type,
8434 build_real (type, dconst1), arg0);
8435 }
8436
8437 return NULL_TREE;
8438 }
8439
8440 /* A subroutine of fold_builtin to fold the various exponent
8441 functions. Return NULL_TREE if no simplification can be made.
8442 FUNC is the corresponding MPFR exponent function. */
8443
8444 static tree
8445 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8446 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8447 {
8448 if (validate_arg (arg, REAL_TYPE))
8449 {
8450 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8451 tree res;
8452
8453 /* Calculate the result when the argument is a constant. */
8454 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8455 return res;
8456
8457 /* Optimize expN(logN(x)) = x. */
8458 if (flag_unsafe_math_optimizations)
8459 {
8460 const enum built_in_function fcode = builtin_mathfn_code (arg);
8461
8462 if ((func == mpfr_exp
8463 && (fcode == BUILT_IN_LOG
8464 || fcode == BUILT_IN_LOGF
8465 || fcode == BUILT_IN_LOGL))
8466 || (func == mpfr_exp2
8467 && (fcode == BUILT_IN_LOG2
8468 || fcode == BUILT_IN_LOG2F
8469 || fcode == BUILT_IN_LOG2L))
8470 || (func == mpfr_exp10
8471 && (fcode == BUILT_IN_LOG10
8472 || fcode == BUILT_IN_LOG10F
8473 || fcode == BUILT_IN_LOG10L)))
8474 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8475 }
8476 }
8477
8478 return NULL_TREE;
8479 }
8480
8481 /* Return true if VAR is a VAR_DECL or a component thereof. */
8482
8483 static bool
8484 var_decl_component_p (tree var)
8485 {
8486 tree inner = var;
8487 while (handled_component_p (inner))
8488 inner = TREE_OPERAND (inner, 0);
8489 return SSA_VAR_P (inner);
8490 }
8491
8492 /* Fold function call to builtin memset. Return
8493 NULL_TREE if no simplification can be made. */
8494
8495 static tree
8496 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8497 tree type, bool ignore)
8498 {
8499 tree var, ret, etype;
8500 unsigned HOST_WIDE_INT length, cval;
8501
8502 if (! validate_arg (dest, POINTER_TYPE)
8503 || ! validate_arg (c, INTEGER_TYPE)
8504 || ! validate_arg (len, INTEGER_TYPE))
8505 return NULL_TREE;
8506
8507 if (! tree_fits_uhwi_p (len))
8508 return NULL_TREE;
8509
8510 /* If the LEN parameter is zero, return DEST. */
8511 if (integer_zerop (len))
8512 return omit_one_operand_loc (loc, type, dest, c);
8513
8514 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8515 return NULL_TREE;
8516
8517 var = dest;
8518 STRIP_NOPS (var);
8519 if (TREE_CODE (var) != ADDR_EXPR)
8520 return NULL_TREE;
8521
8522 var = TREE_OPERAND (var, 0);
8523 if (TREE_THIS_VOLATILE (var))
8524 return NULL_TREE;
8525
8526 etype = TREE_TYPE (var);
8527 if (TREE_CODE (etype) == ARRAY_TYPE)
8528 etype = TREE_TYPE (etype);
8529
8530 if (!INTEGRAL_TYPE_P (etype)
8531 && !POINTER_TYPE_P (etype))
8532 return NULL_TREE;
8533
8534 if (! var_decl_component_p (var))
8535 return NULL_TREE;
8536
8537 length = tree_to_uhwi (len);
8538 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8539 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8540 return NULL_TREE;
8541
8542 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8543 return NULL_TREE;
8544
8545 if (integer_zerop (c))
8546 cval = 0;
8547 else
8548 {
8549 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8550 return NULL_TREE;
8551
8552 cval = tree_to_hwi (c);
8553 cval &= 0xff;
8554 cval |= cval << 8;
8555 cval |= cval << 16;
8556 cval |= (cval << 31) << 1;
8557 }
8558
8559 ret = build_int_cst_type (etype, cval);
8560 var = build_fold_indirect_ref_loc (loc,
8561 fold_convert_loc (loc,
8562 build_pointer_type (etype),
8563 dest));
8564 ret = build2 (MODIFY_EXPR, etype, var, ret);
8565 if (ignore)
8566 return ret;
8567
8568 return omit_one_operand_loc (loc, type, dest, ret);
8569 }
8570
8571 /* Fold function call to builtin memset. Return
8572 NULL_TREE if no simplification can be made. */
8573
8574 static tree
8575 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8576 {
8577 if (! validate_arg (dest, POINTER_TYPE)
8578 || ! validate_arg (size, INTEGER_TYPE))
8579 return NULL_TREE;
8580
8581 if (!ignore)
8582 return NULL_TREE;
8583
8584 /* New argument list transforming bzero(ptr x, int y) to
8585 memset(ptr x, int 0, size_t y). This is done this way
8586 so that if it isn't expanded inline, we fallback to
8587 calling bzero instead of memset. */
8588
8589 return fold_builtin_memset (loc, dest, integer_zero_node,
8590 fold_convert_loc (loc, size_type_node, size),
8591 void_type_node, ignore);
8592 }
8593
8594 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8595 NULL_TREE if no simplification can be made.
8596 If ENDP is 0, return DEST (like memcpy).
8597 If ENDP is 1, return DEST+LEN (like mempcpy).
8598 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8599 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8600 (memmove). */
8601
8602 static tree
8603 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8604 tree len, tree type, bool ignore, int endp)
8605 {
8606 tree destvar, srcvar, expr;
8607
8608 if (! validate_arg (dest, POINTER_TYPE)
8609 || ! validate_arg (src, POINTER_TYPE)
8610 || ! validate_arg (len, INTEGER_TYPE))
8611 return NULL_TREE;
8612
8613 /* If the LEN parameter is zero, return DEST. */
8614 if (integer_zerop (len))
8615 return omit_one_operand_loc (loc, type, dest, src);
8616
8617 /* If SRC and DEST are the same (and not volatile), return
8618 DEST{,+LEN,+LEN-1}. */
8619 if (operand_equal_p (src, dest, 0))
8620 expr = len;
8621 else
8622 {
8623 tree srctype, desttype;
8624 unsigned int src_align, dest_align;
8625 tree off0;
8626
8627 if (endp == 3)
8628 {
8629 src_align = get_pointer_alignment (src);
8630 dest_align = get_pointer_alignment (dest);
8631
8632 /* Both DEST and SRC must be pointer types.
8633 ??? This is what old code did. Is the testing for pointer types
8634 really mandatory?
8635
8636 If either SRC is readonly or length is 1, we can use memcpy. */
8637 if (!dest_align || !src_align)
8638 return NULL_TREE;
8639 if (readonly_data_expr (src)
8640 || (tree_fits_uhwi_p (len)
8641 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8642 >= (unsigned HOST_WIDE_INT) tree_to_uhwi (len))))
8643 {
8644 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8645 if (!fn)
8646 return NULL_TREE;
8647 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8648 }
8649
8650 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8651 if (TREE_CODE (src) == ADDR_EXPR
8652 && TREE_CODE (dest) == ADDR_EXPR)
8653 {
8654 tree src_base, dest_base, fn;
8655 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8656 HOST_WIDE_INT size = -1;
8657 HOST_WIDE_INT maxsize = -1;
8658
8659 srcvar = TREE_OPERAND (src, 0);
8660 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8661 &size, &maxsize);
8662 destvar = TREE_OPERAND (dest, 0);
8663 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8664 &size, &maxsize);
8665 if (tree_fits_uhwi_p (len))
8666 maxsize = tree_to_uhwi (len);
8667 else
8668 maxsize = -1;
8669 src_offset /= BITS_PER_UNIT;
8670 dest_offset /= BITS_PER_UNIT;
8671 if (SSA_VAR_P (src_base)
8672 && SSA_VAR_P (dest_base))
8673 {
8674 if (operand_equal_p (src_base, dest_base, 0)
8675 && ranges_overlap_p (src_offset, maxsize,
8676 dest_offset, maxsize))
8677 return NULL_TREE;
8678 }
8679 else if (TREE_CODE (src_base) == MEM_REF
8680 && TREE_CODE (dest_base) == MEM_REF)
8681 {
8682 addr_wide_int off;
8683 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8684 TREE_OPERAND (dest_base, 0), 0))
8685 return NULL_TREE;
8686 off = mem_ref_offset (src_base) + src_offset;
8687 if (!wi::fits_shwi_p (off))
8688 return NULL_TREE;
8689 src_offset = off.to_shwi ();
8690
8691 off = mem_ref_offset (dest_base) + dest_offset;
8692 if (!wi::fits_shwi_p (off))
8693 return NULL_TREE;
8694 dest_offset = off.to_shwi ();
8695 if (ranges_overlap_p (src_offset, maxsize,
8696 dest_offset, maxsize))
8697 return NULL_TREE;
8698 }
8699 else
8700 return NULL_TREE;
8701
8702 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8703 if (!fn)
8704 return NULL_TREE;
8705 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8706 }
8707
8708 /* If the destination and source do not alias optimize into
8709 memcpy as well. */
8710 if ((is_gimple_min_invariant (dest)
8711 || TREE_CODE (dest) == SSA_NAME)
8712 && (is_gimple_min_invariant (src)
8713 || TREE_CODE (src) == SSA_NAME))
8714 {
8715 ao_ref destr, srcr;
8716 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8717 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8718 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8719 {
8720 tree fn;
8721 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8722 if (!fn)
8723 return NULL_TREE;
8724 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8725 }
8726 }
8727
8728 return NULL_TREE;
8729 }
8730
8731 if (!tree_fits_shwi_p (len))
8732 return NULL_TREE;
8733 /* FIXME:
8734 This logic lose for arguments like (type *)malloc (sizeof (type)),
8735 since we strip the casts of up to VOID return value from malloc.
8736 Perhaps we ought to inherit type from non-VOID argument here? */
8737 STRIP_NOPS (src);
8738 STRIP_NOPS (dest);
8739 if (!POINTER_TYPE_P (TREE_TYPE (src))
8740 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8741 return NULL_TREE;
8742 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8743 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8744 {
8745 tree tem = TREE_OPERAND (src, 0);
8746 STRIP_NOPS (tem);
8747 if (tem != TREE_OPERAND (src, 0))
8748 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8749 }
8750 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8751 {
8752 tree tem = TREE_OPERAND (dest, 0);
8753 STRIP_NOPS (tem);
8754 if (tem != TREE_OPERAND (dest, 0))
8755 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8756 }
8757 srctype = TREE_TYPE (TREE_TYPE (src));
8758 if (TREE_CODE (srctype) == ARRAY_TYPE
8759 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8760 {
8761 srctype = TREE_TYPE (srctype);
8762 STRIP_NOPS (src);
8763 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8764 }
8765 desttype = TREE_TYPE (TREE_TYPE (dest));
8766 if (TREE_CODE (desttype) == ARRAY_TYPE
8767 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8768 {
8769 desttype = TREE_TYPE (desttype);
8770 STRIP_NOPS (dest);
8771 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8772 }
8773 if (TREE_ADDRESSABLE (srctype)
8774 || TREE_ADDRESSABLE (desttype))
8775 return NULL_TREE;
8776
8777 src_align = get_pointer_alignment (src);
8778 dest_align = get_pointer_alignment (dest);
8779 if (dest_align < TYPE_ALIGN (desttype)
8780 || src_align < TYPE_ALIGN (srctype))
8781 return NULL_TREE;
8782
8783 if (!ignore)
8784 dest = builtin_save_expr (dest);
8785
8786 /* Build accesses at offset zero with a ref-all character type. */
8787 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8788 ptr_mode, true), 0);
8789
8790 destvar = dest;
8791 STRIP_NOPS (destvar);
8792 if (TREE_CODE (destvar) == ADDR_EXPR
8793 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8794 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8795 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8796 else
8797 destvar = NULL_TREE;
8798
8799 srcvar = src;
8800 STRIP_NOPS (srcvar);
8801 if (TREE_CODE (srcvar) == ADDR_EXPR
8802 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8803 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8804 {
8805 if (!destvar
8806 || src_align >= TYPE_ALIGN (desttype))
8807 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8808 srcvar, off0);
8809 else if (!STRICT_ALIGNMENT)
8810 {
8811 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8812 src_align);
8813 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8814 }
8815 else
8816 srcvar = NULL_TREE;
8817 }
8818 else
8819 srcvar = NULL_TREE;
8820
8821 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8822 return NULL_TREE;
8823
8824 if (srcvar == NULL_TREE)
8825 {
8826 STRIP_NOPS (src);
8827 if (src_align >= TYPE_ALIGN (desttype))
8828 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8829 else
8830 {
8831 if (STRICT_ALIGNMENT)
8832 return NULL_TREE;
8833 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8834 src_align);
8835 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8836 }
8837 }
8838 else if (destvar == NULL_TREE)
8839 {
8840 STRIP_NOPS (dest);
8841 if (dest_align >= TYPE_ALIGN (srctype))
8842 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8843 else
8844 {
8845 if (STRICT_ALIGNMENT)
8846 return NULL_TREE;
8847 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8848 dest_align);
8849 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8850 }
8851 }
8852
8853 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8854 }
8855
8856 if (ignore)
8857 return expr;
8858
8859 if (endp == 0 || endp == 3)
8860 return omit_one_operand_loc (loc, type, dest, expr);
8861
8862 if (expr == len)
8863 expr = NULL_TREE;
8864
8865 if (endp == 2)
8866 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8867 ssize_int (1));
8868
8869 dest = fold_build_pointer_plus_loc (loc, dest, len);
8870 dest = fold_convert_loc (loc, type, dest);
8871 if (expr)
8872 dest = omit_one_operand_loc (loc, type, dest, expr);
8873 return dest;
8874 }
8875
8876 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8877 If LEN is not NULL, it represents the length of the string to be
8878 copied. Return NULL_TREE if no simplification can be made. */
8879
8880 tree
8881 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8882 {
8883 tree fn;
8884
8885 if (!validate_arg (dest, POINTER_TYPE)
8886 || !validate_arg (src, POINTER_TYPE))
8887 return NULL_TREE;
8888
8889 /* If SRC and DEST are the same (and not volatile), return DEST. */
8890 if (operand_equal_p (src, dest, 0))
8891 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8892
8893 if (optimize_function_for_size_p (cfun))
8894 return NULL_TREE;
8895
8896 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8897 if (!fn)
8898 return NULL_TREE;
8899
8900 if (!len)
8901 {
8902 len = c_strlen (src, 1);
8903 if (! len || TREE_SIDE_EFFECTS (len))
8904 return NULL_TREE;
8905 }
8906
8907 len = fold_convert_loc (loc, size_type_node, len);
8908 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8909 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8910 build_call_expr_loc (loc, fn, 3, dest, src, len));
8911 }
8912
8913 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8914 Return NULL_TREE if no simplification can be made. */
8915
8916 static tree
8917 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8918 {
8919 tree fn, len, lenp1, call, type;
8920
8921 if (!validate_arg (dest, POINTER_TYPE)
8922 || !validate_arg (src, POINTER_TYPE))
8923 return NULL_TREE;
8924
8925 len = c_strlen (src, 1);
8926 if (!len
8927 || TREE_CODE (len) != INTEGER_CST)
8928 return NULL_TREE;
8929
8930 if (optimize_function_for_size_p (cfun)
8931 /* If length is zero it's small enough. */
8932 && !integer_zerop (len))
8933 return NULL_TREE;
8934
8935 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8936 if (!fn)
8937 return NULL_TREE;
8938
8939 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8940 fold_convert_loc (loc, size_type_node, len),
8941 build_int_cst (size_type_node, 1));
8942 /* We use dest twice in building our expression. Save it from
8943 multiple expansions. */
8944 dest = builtin_save_expr (dest);
8945 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8946
8947 type = TREE_TYPE (TREE_TYPE (fndecl));
8948 dest = fold_build_pointer_plus_loc (loc, dest, len);
8949 dest = fold_convert_loc (loc, type, dest);
8950 dest = omit_one_operand_loc (loc, type, dest, call);
8951 return dest;
8952 }
8953
8954 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8955 If SLEN is not NULL, it represents the length of the source string.
8956 Return NULL_TREE if no simplification can be made. */
8957
8958 tree
8959 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8960 tree src, tree len, tree slen)
8961 {
8962 tree fn;
8963
8964 if (!validate_arg (dest, POINTER_TYPE)
8965 || !validate_arg (src, POINTER_TYPE)
8966 || !validate_arg (len, INTEGER_TYPE))
8967 return NULL_TREE;
8968
8969 /* If the LEN parameter is zero, return DEST. */
8970 if (integer_zerop (len))
8971 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8972
8973 /* We can't compare slen with len as constants below if len is not a
8974 constant. */
8975 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8976 return NULL_TREE;
8977
8978 if (!slen)
8979 slen = c_strlen (src, 1);
8980
8981 /* Now, we must be passed a constant src ptr parameter. */
8982 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8983 return NULL_TREE;
8984
8985 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8986
8987 /* We do not support simplification of this case, though we do
8988 support it when expanding trees into RTL. */
8989 /* FIXME: generate a call to __builtin_memset. */
8990 if (tree_int_cst_lt (slen, len))
8991 return NULL_TREE;
8992
8993 /* OK transform into builtin memcpy. */
8994 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8995 if (!fn)
8996 return NULL_TREE;
8997
8998 len = fold_convert_loc (loc, size_type_node, len);
8999 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9000 build_call_expr_loc (loc, fn, 3, dest, src, len));
9001 }
9002
9003 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9004 arguments to the call, and TYPE is its return type.
9005 Return NULL_TREE if no simplification can be made. */
9006
9007 static tree
9008 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9009 {
9010 if (!validate_arg (arg1, POINTER_TYPE)
9011 || !validate_arg (arg2, INTEGER_TYPE)
9012 || !validate_arg (len, INTEGER_TYPE))
9013 return NULL_TREE;
9014 else
9015 {
9016 const char *p1;
9017
9018 if (TREE_CODE (arg2) != INTEGER_CST
9019 || !tree_fits_uhwi_p (len))
9020 return NULL_TREE;
9021
9022 p1 = c_getstr (arg1);
9023 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9024 {
9025 char c;
9026 const char *r;
9027 tree tem;
9028
9029 if (target_char_cast (arg2, &c))
9030 return NULL_TREE;
9031
9032 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9033
9034 if (r == NULL)
9035 return build_int_cst (TREE_TYPE (arg1), 0);
9036
9037 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9038 return fold_convert_loc (loc, type, tem);
9039 }
9040 return NULL_TREE;
9041 }
9042 }
9043
9044 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9045 Return NULL_TREE if no simplification can be made. */
9046
9047 static tree
9048 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9049 {
9050 const char *p1, *p2;
9051
9052 if (!validate_arg (arg1, POINTER_TYPE)
9053 || !validate_arg (arg2, POINTER_TYPE)
9054 || !validate_arg (len, INTEGER_TYPE))
9055 return NULL_TREE;
9056
9057 /* If the LEN parameter is zero, return zero. */
9058 if (integer_zerop (len))
9059 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9060 arg1, arg2);
9061
9062 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9063 if (operand_equal_p (arg1, arg2, 0))
9064 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9065
9066 p1 = c_getstr (arg1);
9067 p2 = c_getstr (arg2);
9068
9069 /* If all arguments are constant, and the value of len is not greater
9070 than the lengths of arg1 and arg2, evaluate at compile-time. */
9071 if (tree_fits_uhwi_p (len) && p1 && p2
9072 && compare_tree_int (len, strlen (p1) + 1) <= 0
9073 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9074 {
9075 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9076
9077 if (r > 0)
9078 return integer_one_node;
9079 else if (r < 0)
9080 return integer_minus_one_node;
9081 else
9082 return integer_zero_node;
9083 }
9084
9085 /* If len parameter is one, return an expression corresponding to
9086 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9087 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9088 {
9089 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9090 tree cst_uchar_ptr_node
9091 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9092
9093 tree ind1
9094 = fold_convert_loc (loc, integer_type_node,
9095 build1 (INDIRECT_REF, cst_uchar_node,
9096 fold_convert_loc (loc,
9097 cst_uchar_ptr_node,
9098 arg1)));
9099 tree ind2
9100 = fold_convert_loc (loc, integer_type_node,
9101 build1 (INDIRECT_REF, cst_uchar_node,
9102 fold_convert_loc (loc,
9103 cst_uchar_ptr_node,
9104 arg2)));
9105 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9106 }
9107
9108 return NULL_TREE;
9109 }
9110
9111 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9112 Return NULL_TREE if no simplification can be made. */
9113
9114 static tree
9115 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9116 {
9117 const char *p1, *p2;
9118
9119 if (!validate_arg (arg1, POINTER_TYPE)
9120 || !validate_arg (arg2, POINTER_TYPE))
9121 return NULL_TREE;
9122
9123 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9124 if (operand_equal_p (arg1, arg2, 0))
9125 return integer_zero_node;
9126
9127 p1 = c_getstr (arg1);
9128 p2 = c_getstr (arg2);
9129
9130 if (p1 && p2)
9131 {
9132 const int i = strcmp (p1, p2);
9133 if (i < 0)
9134 return integer_minus_one_node;
9135 else if (i > 0)
9136 return integer_one_node;
9137 else
9138 return integer_zero_node;
9139 }
9140
9141 /* If the second arg is "", return *(const unsigned char*)arg1. */
9142 if (p2 && *p2 == '\0')
9143 {
9144 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9145 tree cst_uchar_ptr_node
9146 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9147
9148 return fold_convert_loc (loc, integer_type_node,
9149 build1 (INDIRECT_REF, cst_uchar_node,
9150 fold_convert_loc (loc,
9151 cst_uchar_ptr_node,
9152 arg1)));
9153 }
9154
9155 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9156 if (p1 && *p1 == '\0')
9157 {
9158 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9159 tree cst_uchar_ptr_node
9160 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9161
9162 tree temp
9163 = fold_convert_loc (loc, integer_type_node,
9164 build1 (INDIRECT_REF, cst_uchar_node,
9165 fold_convert_loc (loc,
9166 cst_uchar_ptr_node,
9167 arg2)));
9168 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9169 }
9170
9171 return NULL_TREE;
9172 }
9173
9174 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9175 Return NULL_TREE if no simplification can be made. */
9176
9177 static tree
9178 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9179 {
9180 const char *p1, *p2;
9181
9182 if (!validate_arg (arg1, POINTER_TYPE)
9183 || !validate_arg (arg2, POINTER_TYPE)
9184 || !validate_arg (len, INTEGER_TYPE))
9185 return NULL_TREE;
9186
9187 /* If the LEN parameter is zero, return zero. */
9188 if (integer_zerop (len))
9189 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9190 arg1, arg2);
9191
9192 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9193 if (operand_equal_p (arg1, arg2, 0))
9194 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9195
9196 p1 = c_getstr (arg1);
9197 p2 = c_getstr (arg2);
9198
9199 if (tree_fits_uhwi_p (len) && p1 && p2)
9200 {
9201 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9202 if (i > 0)
9203 return integer_one_node;
9204 else if (i < 0)
9205 return integer_minus_one_node;
9206 else
9207 return integer_zero_node;
9208 }
9209
9210 /* If the second arg is "", and the length is greater than zero,
9211 return *(const unsigned char*)arg1. */
9212 if (p2 && *p2 == '\0'
9213 && TREE_CODE (len) == INTEGER_CST
9214 && tree_int_cst_sgn (len) == 1)
9215 {
9216 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9217 tree cst_uchar_ptr_node
9218 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9219
9220 return fold_convert_loc (loc, integer_type_node,
9221 build1 (INDIRECT_REF, cst_uchar_node,
9222 fold_convert_loc (loc,
9223 cst_uchar_ptr_node,
9224 arg1)));
9225 }
9226
9227 /* If the first arg is "", and the length is greater than zero,
9228 return -*(const unsigned char*)arg2. */
9229 if (p1 && *p1 == '\0'
9230 && TREE_CODE (len) == INTEGER_CST
9231 && tree_int_cst_sgn (len) == 1)
9232 {
9233 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9234 tree cst_uchar_ptr_node
9235 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9236
9237 tree temp = fold_convert_loc (loc, integer_type_node,
9238 build1 (INDIRECT_REF, cst_uchar_node,
9239 fold_convert_loc (loc,
9240 cst_uchar_ptr_node,
9241 arg2)));
9242 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9243 }
9244
9245 /* If len parameter is one, return an expression corresponding to
9246 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9247 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9248 {
9249 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9250 tree cst_uchar_ptr_node
9251 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9252
9253 tree ind1 = fold_convert_loc (loc, integer_type_node,
9254 build1 (INDIRECT_REF, cst_uchar_node,
9255 fold_convert_loc (loc,
9256 cst_uchar_ptr_node,
9257 arg1)));
9258 tree ind2 = fold_convert_loc (loc, integer_type_node,
9259 build1 (INDIRECT_REF, cst_uchar_node,
9260 fold_convert_loc (loc,
9261 cst_uchar_ptr_node,
9262 arg2)));
9263 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9264 }
9265
9266 return NULL_TREE;
9267 }
9268
9269 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9270 ARG. Return NULL_TREE if no simplification can be made. */
9271
9272 static tree
9273 fold_builtin_signbit (location_t loc, tree arg, tree type)
9274 {
9275 if (!validate_arg (arg, REAL_TYPE))
9276 return NULL_TREE;
9277
9278 /* If ARG is a compile-time constant, determine the result. */
9279 if (TREE_CODE (arg) == REAL_CST
9280 && !TREE_OVERFLOW (arg))
9281 {
9282 REAL_VALUE_TYPE c;
9283
9284 c = TREE_REAL_CST (arg);
9285 return (REAL_VALUE_NEGATIVE (c)
9286 ? build_one_cst (type)
9287 : build_zero_cst (type));
9288 }
9289
9290 /* If ARG is non-negative, the result is always zero. */
9291 if (tree_expr_nonnegative_p (arg))
9292 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9293
9294 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9295 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9296 return fold_convert (type,
9297 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9298 build_real (TREE_TYPE (arg), dconst0)));
9299
9300 return NULL_TREE;
9301 }
9302
9303 /* Fold function call to builtin copysign, copysignf or copysignl with
9304 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9305 be made. */
9306
9307 static tree
9308 fold_builtin_copysign (location_t loc, tree fndecl,
9309 tree arg1, tree arg2, tree type)
9310 {
9311 tree tem;
9312
9313 if (!validate_arg (arg1, REAL_TYPE)
9314 || !validate_arg (arg2, REAL_TYPE))
9315 return NULL_TREE;
9316
9317 /* copysign(X,X) is X. */
9318 if (operand_equal_p (arg1, arg2, 0))
9319 return fold_convert_loc (loc, type, arg1);
9320
9321 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9322 if (TREE_CODE (arg1) == REAL_CST
9323 && TREE_CODE (arg2) == REAL_CST
9324 && !TREE_OVERFLOW (arg1)
9325 && !TREE_OVERFLOW (arg2))
9326 {
9327 REAL_VALUE_TYPE c1, c2;
9328
9329 c1 = TREE_REAL_CST (arg1);
9330 c2 = TREE_REAL_CST (arg2);
9331 /* c1.sign := c2.sign. */
9332 real_copysign (&c1, &c2);
9333 return build_real (type, c1);
9334 }
9335
9336 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9337 Remember to evaluate Y for side-effects. */
9338 if (tree_expr_nonnegative_p (arg2))
9339 return omit_one_operand_loc (loc, type,
9340 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9341 arg2);
9342
9343 /* Strip sign changing operations for the first argument. */
9344 tem = fold_strip_sign_ops (arg1);
9345 if (tem)
9346 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9347
9348 return NULL_TREE;
9349 }
9350
9351 /* Fold a call to builtin isascii with argument ARG. */
9352
9353 static tree
9354 fold_builtin_isascii (location_t loc, tree arg)
9355 {
9356 if (!validate_arg (arg, INTEGER_TYPE))
9357 return NULL_TREE;
9358 else
9359 {
9360 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9361 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9362 build_int_cst (integer_type_node,
9363 ~ (unsigned HOST_WIDE_INT) 0x7f));
9364 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9365 arg, integer_zero_node);
9366 }
9367 }
9368
9369 /* Fold a call to builtin toascii with argument ARG. */
9370
9371 static tree
9372 fold_builtin_toascii (location_t loc, tree arg)
9373 {
9374 if (!validate_arg (arg, INTEGER_TYPE))
9375 return NULL_TREE;
9376
9377 /* Transform toascii(c) -> (c & 0x7f). */
9378 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9379 build_int_cst (integer_type_node, 0x7f));
9380 }
9381
9382 /* Fold a call to builtin isdigit with argument ARG. */
9383
9384 static tree
9385 fold_builtin_isdigit (location_t loc, tree arg)
9386 {
9387 if (!validate_arg (arg, INTEGER_TYPE))
9388 return NULL_TREE;
9389 else
9390 {
9391 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9392 /* According to the C standard, isdigit is unaffected by locale.
9393 However, it definitely is affected by the target character set. */
9394 unsigned HOST_WIDE_INT target_digit0
9395 = lang_hooks.to_target_charset ('0');
9396
9397 if (target_digit0 == 0)
9398 return NULL_TREE;
9399
9400 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9401 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9402 build_int_cst (unsigned_type_node, target_digit0));
9403 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9404 build_int_cst (unsigned_type_node, 9));
9405 }
9406 }
9407
9408 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9409
9410 static tree
9411 fold_builtin_fabs (location_t loc, tree arg, tree type)
9412 {
9413 if (!validate_arg (arg, REAL_TYPE))
9414 return NULL_TREE;
9415
9416 arg = fold_convert_loc (loc, type, arg);
9417 if (TREE_CODE (arg) == REAL_CST)
9418 return fold_abs_const (arg, type);
9419 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9420 }
9421
9422 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9423
9424 static tree
9425 fold_builtin_abs (location_t loc, tree arg, tree type)
9426 {
9427 if (!validate_arg (arg, INTEGER_TYPE))
9428 return NULL_TREE;
9429
9430 arg = fold_convert_loc (loc, type, arg);
9431 if (TREE_CODE (arg) == INTEGER_CST)
9432 return fold_abs_const (arg, type);
9433 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9434 }
9435
9436 /* Fold a fma operation with arguments ARG[012]. */
9437
9438 tree
9439 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9440 tree type, tree arg0, tree arg1, tree arg2)
9441 {
9442 if (TREE_CODE (arg0) == REAL_CST
9443 && TREE_CODE (arg1) == REAL_CST
9444 && TREE_CODE (arg2) == REAL_CST)
9445 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9446
9447 return NULL_TREE;
9448 }
9449
9450 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9451
9452 static tree
9453 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9454 {
9455 if (validate_arg (arg0, REAL_TYPE)
9456 && validate_arg (arg1, REAL_TYPE)
9457 && validate_arg (arg2, REAL_TYPE))
9458 {
9459 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9460 if (tem)
9461 return tem;
9462
9463 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9464 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9465 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9466 }
9467 return NULL_TREE;
9468 }
9469
9470 /* Fold a call to builtin fmin or fmax. */
9471
9472 static tree
9473 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9474 tree type, bool max)
9475 {
9476 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9477 {
9478 /* Calculate the result when the argument is a constant. */
9479 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9480
9481 if (res)
9482 return res;
9483
9484 /* If either argument is NaN, return the other one. Avoid the
9485 transformation if we get (and honor) a signalling NaN. Using
9486 omit_one_operand() ensures we create a non-lvalue. */
9487 if (TREE_CODE (arg0) == REAL_CST
9488 && real_isnan (&TREE_REAL_CST (arg0))
9489 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9490 || ! TREE_REAL_CST (arg0).signalling))
9491 return omit_one_operand_loc (loc, type, arg1, arg0);
9492 if (TREE_CODE (arg1) == REAL_CST
9493 && real_isnan (&TREE_REAL_CST (arg1))
9494 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9495 || ! TREE_REAL_CST (arg1).signalling))
9496 return omit_one_operand_loc (loc, type, arg0, arg1);
9497
9498 /* Transform fmin/fmax(x,x) -> x. */
9499 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9500 return omit_one_operand_loc (loc, type, arg0, arg1);
9501
9502 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9503 functions to return the numeric arg if the other one is NaN.
9504 These tree codes don't honor that, so only transform if
9505 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9506 handled, so we don't have to worry about it either. */
9507 if (flag_finite_math_only)
9508 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9509 fold_convert_loc (loc, type, arg0),
9510 fold_convert_loc (loc, type, arg1));
9511 }
9512 return NULL_TREE;
9513 }
9514
9515 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9516
9517 static tree
9518 fold_builtin_carg (location_t loc, tree arg, tree type)
9519 {
9520 if (validate_arg (arg, COMPLEX_TYPE)
9521 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9522 {
9523 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9524
9525 if (atan2_fn)
9526 {
9527 tree new_arg = builtin_save_expr (arg);
9528 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9529 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9530 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9531 }
9532 }
9533
9534 return NULL_TREE;
9535 }
9536
9537 /* Fold a call to builtin logb/ilogb. */
9538
9539 static tree
9540 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9541 {
9542 if (! validate_arg (arg, REAL_TYPE))
9543 return NULL_TREE;
9544
9545 STRIP_NOPS (arg);
9546
9547 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9548 {
9549 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9550
9551 switch (value->cl)
9552 {
9553 case rvc_nan:
9554 case rvc_inf:
9555 /* If arg is Inf or NaN and we're logb, return it. */
9556 if (TREE_CODE (rettype) == REAL_TYPE)
9557 {
9558 /* For logb(-Inf) we have to return +Inf. */
9559 if (real_isinf (value) && real_isneg (value))
9560 {
9561 REAL_VALUE_TYPE tem;
9562 real_inf (&tem);
9563 return build_real (rettype, tem);
9564 }
9565 return fold_convert_loc (loc, rettype, arg);
9566 }
9567 /* Fall through... */
9568 case rvc_zero:
9569 /* Zero may set errno and/or raise an exception for logb, also
9570 for ilogb we don't know FP_ILOGB0. */
9571 return NULL_TREE;
9572 case rvc_normal:
9573 /* For normal numbers, proceed iff radix == 2. In GCC,
9574 normalized significands are in the range [0.5, 1.0). We
9575 want the exponent as if they were [1.0, 2.0) so get the
9576 exponent and subtract 1. */
9577 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9578 return fold_convert_loc (loc, rettype,
9579 build_int_cst (integer_type_node,
9580 REAL_EXP (value)-1));
9581 break;
9582 }
9583 }
9584
9585 return NULL_TREE;
9586 }
9587
9588 /* Fold a call to builtin significand, if radix == 2. */
9589
9590 static tree
9591 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9592 {
9593 if (! validate_arg (arg, REAL_TYPE))
9594 return NULL_TREE;
9595
9596 STRIP_NOPS (arg);
9597
9598 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9599 {
9600 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9601
9602 switch (value->cl)
9603 {
9604 case rvc_zero:
9605 case rvc_nan:
9606 case rvc_inf:
9607 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9608 return fold_convert_loc (loc, rettype, arg);
9609 case rvc_normal:
9610 /* For normal numbers, proceed iff radix == 2. */
9611 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9612 {
9613 REAL_VALUE_TYPE result = *value;
9614 /* In GCC, normalized significands are in the range [0.5,
9615 1.0). We want them to be [1.0, 2.0) so set the
9616 exponent to 1. */
9617 SET_REAL_EXP (&result, 1);
9618 return build_real (rettype, result);
9619 }
9620 break;
9621 }
9622 }
9623
9624 return NULL_TREE;
9625 }
9626
9627 /* Fold a call to builtin frexp, we can assume the base is 2. */
9628
9629 static tree
9630 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9631 {
9632 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9633 return NULL_TREE;
9634
9635 STRIP_NOPS (arg0);
9636
9637 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9638 return NULL_TREE;
9639
9640 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9641
9642 /* Proceed if a valid pointer type was passed in. */
9643 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9644 {
9645 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9646 tree frac, exp;
9647
9648 switch (value->cl)
9649 {
9650 case rvc_zero:
9651 /* For +-0, return (*exp = 0, +-0). */
9652 exp = integer_zero_node;
9653 frac = arg0;
9654 break;
9655 case rvc_nan:
9656 case rvc_inf:
9657 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9658 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9659 case rvc_normal:
9660 {
9661 /* Since the frexp function always expects base 2, and in
9662 GCC normalized significands are already in the range
9663 [0.5, 1.0), we have exactly what frexp wants. */
9664 REAL_VALUE_TYPE frac_rvt = *value;
9665 SET_REAL_EXP (&frac_rvt, 0);
9666 frac = build_real (rettype, frac_rvt);
9667 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9668 }
9669 break;
9670 default:
9671 gcc_unreachable ();
9672 }
9673
9674 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9675 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9676 TREE_SIDE_EFFECTS (arg1) = 1;
9677 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9678 }
9679
9680 return NULL_TREE;
9681 }
9682
9683 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9684 then we can assume the base is two. If it's false, then we have to
9685 check the mode of the TYPE parameter in certain cases. */
9686
9687 static tree
9688 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9689 tree type, bool ldexp)
9690 {
9691 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9692 {
9693 STRIP_NOPS (arg0);
9694 STRIP_NOPS (arg1);
9695
9696 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9697 if (real_zerop (arg0) || integer_zerop (arg1)
9698 || (TREE_CODE (arg0) == REAL_CST
9699 && !real_isfinite (&TREE_REAL_CST (arg0))))
9700 return omit_one_operand_loc (loc, type, arg0, arg1);
9701
9702 /* If both arguments are constant, then try to evaluate it. */
9703 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9704 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9705 && tree_fits_shwi_p (arg1))
9706 {
9707 /* Bound the maximum adjustment to twice the range of the
9708 mode's valid exponents. Use abs to ensure the range is
9709 positive as a sanity check. */
9710 const long max_exp_adj = 2 *
9711 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9712 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9713
9714 /* Get the user-requested adjustment. */
9715 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9716
9717 /* The requested adjustment must be inside this range. This
9718 is a preliminary cap to avoid things like overflow, we
9719 may still fail to compute the result for other reasons. */
9720 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9721 {
9722 REAL_VALUE_TYPE initial_result;
9723
9724 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9725
9726 /* Ensure we didn't overflow. */
9727 if (! real_isinf (&initial_result))
9728 {
9729 const REAL_VALUE_TYPE trunc_result
9730 = real_value_truncate (TYPE_MODE (type), initial_result);
9731
9732 /* Only proceed if the target mode can hold the
9733 resulting value. */
9734 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9735 return build_real (type, trunc_result);
9736 }
9737 }
9738 }
9739 }
9740
9741 return NULL_TREE;
9742 }
9743
9744 /* Fold a call to builtin modf. */
9745
9746 static tree
9747 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9748 {
9749 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9750 return NULL_TREE;
9751
9752 STRIP_NOPS (arg0);
9753
9754 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9755 return NULL_TREE;
9756
9757 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9758
9759 /* Proceed if a valid pointer type was passed in. */
9760 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9761 {
9762 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9763 REAL_VALUE_TYPE trunc, frac;
9764
9765 switch (value->cl)
9766 {
9767 case rvc_nan:
9768 case rvc_zero:
9769 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9770 trunc = frac = *value;
9771 break;
9772 case rvc_inf:
9773 /* For +-Inf, return (*arg1 = arg0, +-0). */
9774 frac = dconst0;
9775 frac.sign = value->sign;
9776 trunc = *value;
9777 break;
9778 case rvc_normal:
9779 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9780 real_trunc (&trunc, VOIDmode, value);
9781 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9782 /* If the original number was negative and already
9783 integral, then the fractional part is -0.0. */
9784 if (value->sign && frac.cl == rvc_zero)
9785 frac.sign = value->sign;
9786 break;
9787 }
9788
9789 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9790 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9791 build_real (rettype, trunc));
9792 TREE_SIDE_EFFECTS (arg1) = 1;
9793 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9794 build_real (rettype, frac));
9795 }
9796
9797 return NULL_TREE;
9798 }
9799
9800 /* Given a location LOC, an interclass builtin function decl FNDECL
9801 and its single argument ARG, return an folded expression computing
9802 the same, or NULL_TREE if we either couldn't or didn't want to fold
9803 (the latter happen if there's an RTL instruction available). */
9804
9805 static tree
9806 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9807 {
9808 enum machine_mode mode;
9809
9810 if (!validate_arg (arg, REAL_TYPE))
9811 return NULL_TREE;
9812
9813 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9814 return NULL_TREE;
9815
9816 mode = TYPE_MODE (TREE_TYPE (arg));
9817
9818 /* If there is no optab, try generic code. */
9819 switch (DECL_FUNCTION_CODE (fndecl))
9820 {
9821 tree result;
9822
9823 CASE_FLT_FN (BUILT_IN_ISINF):
9824 {
9825 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9826 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9827 tree const type = TREE_TYPE (arg);
9828 REAL_VALUE_TYPE r;
9829 char buf[128];
9830
9831 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9832 real_from_string (&r, buf);
9833 result = build_call_expr (isgr_fn, 2,
9834 fold_build1_loc (loc, ABS_EXPR, type, arg),
9835 build_real (type, r));
9836 return result;
9837 }
9838 CASE_FLT_FN (BUILT_IN_FINITE):
9839 case BUILT_IN_ISFINITE:
9840 {
9841 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9842 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9843 tree const type = TREE_TYPE (arg);
9844 REAL_VALUE_TYPE r;
9845 char buf[128];
9846
9847 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9848 real_from_string (&r, buf);
9849 result = build_call_expr (isle_fn, 2,
9850 fold_build1_loc (loc, ABS_EXPR, type, arg),
9851 build_real (type, r));
9852 /*result = fold_build2_loc (loc, UNGT_EXPR,
9853 TREE_TYPE (TREE_TYPE (fndecl)),
9854 fold_build1_loc (loc, ABS_EXPR, type, arg),
9855 build_real (type, r));
9856 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9857 TREE_TYPE (TREE_TYPE (fndecl)),
9858 result);*/
9859 return result;
9860 }
9861 case BUILT_IN_ISNORMAL:
9862 {
9863 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9864 islessequal(fabs(x),DBL_MAX). */
9865 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9866 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9867 tree const type = TREE_TYPE (arg);
9868 REAL_VALUE_TYPE rmax, rmin;
9869 char buf[128];
9870
9871 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9872 real_from_string (&rmax, buf);
9873 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9874 real_from_string (&rmin, buf);
9875 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9876 result = build_call_expr (isle_fn, 2, arg,
9877 build_real (type, rmax));
9878 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9879 build_call_expr (isge_fn, 2, arg,
9880 build_real (type, rmin)));
9881 return result;
9882 }
9883 default:
9884 break;
9885 }
9886
9887 return NULL_TREE;
9888 }
9889
9890 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9891 ARG is the argument for the call. */
9892
9893 static tree
9894 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9895 {
9896 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9897 REAL_VALUE_TYPE r;
9898
9899 if (!validate_arg (arg, REAL_TYPE))
9900 return NULL_TREE;
9901
9902 switch (builtin_index)
9903 {
9904 case BUILT_IN_ISINF:
9905 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9906 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9907
9908 if (TREE_CODE (arg) == REAL_CST)
9909 {
9910 r = TREE_REAL_CST (arg);
9911 if (real_isinf (&r))
9912 return real_compare (GT_EXPR, &r, &dconst0)
9913 ? integer_one_node : integer_minus_one_node;
9914 else
9915 return integer_zero_node;
9916 }
9917
9918 return NULL_TREE;
9919
9920 case BUILT_IN_ISINF_SIGN:
9921 {
9922 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9923 /* In a boolean context, GCC will fold the inner COND_EXPR to
9924 1. So e.g. "if (isinf_sign(x))" would be folded to just
9925 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9926 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9927 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9928 tree tmp = NULL_TREE;
9929
9930 arg = builtin_save_expr (arg);
9931
9932 if (signbit_fn && isinf_fn)
9933 {
9934 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9935 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9936
9937 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9938 signbit_call, integer_zero_node);
9939 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9940 isinf_call, integer_zero_node);
9941
9942 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9943 integer_minus_one_node, integer_one_node);
9944 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9945 isinf_call, tmp,
9946 integer_zero_node);
9947 }
9948
9949 return tmp;
9950 }
9951
9952 case BUILT_IN_ISFINITE:
9953 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9954 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9955 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9956
9957 if (TREE_CODE (arg) == REAL_CST)
9958 {
9959 r = TREE_REAL_CST (arg);
9960 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9961 }
9962
9963 return NULL_TREE;
9964
9965 case BUILT_IN_ISNAN:
9966 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9967 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9968
9969 if (TREE_CODE (arg) == REAL_CST)
9970 {
9971 r = TREE_REAL_CST (arg);
9972 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9973 }
9974
9975 arg = builtin_save_expr (arg);
9976 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9977
9978 default:
9979 gcc_unreachable ();
9980 }
9981 }
9982
9983 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9984 This builtin will generate code to return the appropriate floating
9985 point classification depending on the value of the floating point
9986 number passed in. The possible return values must be supplied as
9987 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9988 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9989 one floating point argument which is "type generic". */
9990
9991 static tree
9992 fold_builtin_fpclassify (location_t loc, tree exp)
9993 {
9994 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9995 arg, type, res, tmp;
9996 enum machine_mode mode;
9997 REAL_VALUE_TYPE r;
9998 char buf[128];
9999
10000 /* Verify the required arguments in the original call. */
10001 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10002 INTEGER_TYPE, INTEGER_TYPE,
10003 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10004 return NULL_TREE;
10005
10006 fp_nan = CALL_EXPR_ARG (exp, 0);
10007 fp_infinite = CALL_EXPR_ARG (exp, 1);
10008 fp_normal = CALL_EXPR_ARG (exp, 2);
10009 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10010 fp_zero = CALL_EXPR_ARG (exp, 4);
10011 arg = CALL_EXPR_ARG (exp, 5);
10012 type = TREE_TYPE (arg);
10013 mode = TYPE_MODE (type);
10014 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10015
10016 /* fpclassify(x) ->
10017 isnan(x) ? FP_NAN :
10018 (fabs(x) == Inf ? FP_INFINITE :
10019 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10020 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10021
10022 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10023 build_real (type, dconst0));
10024 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10025 tmp, fp_zero, fp_subnormal);
10026
10027 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10028 real_from_string (&r, buf);
10029 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10030 arg, build_real (type, r));
10031 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10032
10033 if (HONOR_INFINITIES (mode))
10034 {
10035 real_inf (&r);
10036 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10037 build_real (type, r));
10038 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10039 fp_infinite, res);
10040 }
10041
10042 if (HONOR_NANS (mode))
10043 {
10044 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10045 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10046 }
10047
10048 return res;
10049 }
10050
10051 /* Fold a call to an unordered comparison function such as
10052 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10053 being called and ARG0 and ARG1 are the arguments for the call.
10054 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10055 the opposite of the desired result. UNORDERED_CODE is used
10056 for modes that can hold NaNs and ORDERED_CODE is used for
10057 the rest. */
10058
10059 static tree
10060 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10061 enum tree_code unordered_code,
10062 enum tree_code ordered_code)
10063 {
10064 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10065 enum tree_code code;
10066 tree type0, type1;
10067 enum tree_code code0, code1;
10068 tree cmp_type = NULL_TREE;
10069
10070 type0 = TREE_TYPE (arg0);
10071 type1 = TREE_TYPE (arg1);
10072
10073 code0 = TREE_CODE (type0);
10074 code1 = TREE_CODE (type1);
10075
10076 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10077 /* Choose the wider of two real types. */
10078 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10079 ? type0 : type1;
10080 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10081 cmp_type = type0;
10082 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10083 cmp_type = type1;
10084
10085 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10086 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10087
10088 if (unordered_code == UNORDERED_EXPR)
10089 {
10090 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10091 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10092 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10093 }
10094
10095 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10096 : ordered_code;
10097 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10098 fold_build2_loc (loc, code, type, arg0, arg1));
10099 }
10100
10101 /* Fold a call to built-in function FNDECL with 0 arguments.
10102 IGNORE is true if the result of the function call is ignored. This
10103 function returns NULL_TREE if no simplification was possible. */
10104
10105 static tree
10106 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10107 {
10108 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10110 switch (fcode)
10111 {
10112 CASE_FLT_FN (BUILT_IN_INF):
10113 case BUILT_IN_INFD32:
10114 case BUILT_IN_INFD64:
10115 case BUILT_IN_INFD128:
10116 return fold_builtin_inf (loc, type, true);
10117
10118 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10119 return fold_builtin_inf (loc, type, false);
10120
10121 case BUILT_IN_CLASSIFY_TYPE:
10122 return fold_builtin_classify_type (NULL_TREE);
10123
10124 case BUILT_IN_UNREACHABLE:
10125 if (flag_sanitize & SANITIZE_UNREACHABLE
10126 && (current_function_decl == NULL
10127 || !lookup_attribute ("no_sanitize_undefined",
10128 DECL_ATTRIBUTES (current_function_decl))))
10129 return ubsan_instrument_unreachable (loc);
10130 break;
10131
10132 default:
10133 break;
10134 }
10135 return NULL_TREE;
10136 }
10137
10138 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10139 IGNORE is true if the result of the function call is ignored. This
10140 function returns NULL_TREE if no simplification was possible. */
10141
10142 static tree
10143 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10144 {
10145 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10146 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10147 switch (fcode)
10148 {
10149 case BUILT_IN_CONSTANT_P:
10150 {
10151 tree val = fold_builtin_constant_p (arg0);
10152
10153 /* Gimplification will pull the CALL_EXPR for the builtin out of
10154 an if condition. When not optimizing, we'll not CSE it back.
10155 To avoid link error types of regressions, return false now. */
10156 if (!val && !optimize)
10157 val = integer_zero_node;
10158
10159 return val;
10160 }
10161
10162 case BUILT_IN_CLASSIFY_TYPE:
10163 return fold_builtin_classify_type (arg0);
10164
10165 case BUILT_IN_STRLEN:
10166 return fold_builtin_strlen (loc, type, arg0);
10167
10168 CASE_FLT_FN (BUILT_IN_FABS):
10169 case BUILT_IN_FABSD32:
10170 case BUILT_IN_FABSD64:
10171 case BUILT_IN_FABSD128:
10172 return fold_builtin_fabs (loc, arg0, type);
10173
10174 case BUILT_IN_ABS:
10175 case BUILT_IN_LABS:
10176 case BUILT_IN_LLABS:
10177 case BUILT_IN_IMAXABS:
10178 return fold_builtin_abs (loc, arg0, type);
10179
10180 CASE_FLT_FN (BUILT_IN_CONJ):
10181 if (validate_arg (arg0, COMPLEX_TYPE)
10182 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10183 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10184 break;
10185
10186 CASE_FLT_FN (BUILT_IN_CREAL):
10187 if (validate_arg (arg0, COMPLEX_TYPE)
10188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10189 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10190 break;
10191
10192 CASE_FLT_FN (BUILT_IN_CIMAG):
10193 if (validate_arg (arg0, COMPLEX_TYPE)
10194 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10195 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10196 break;
10197
10198 CASE_FLT_FN (BUILT_IN_CCOS):
10199 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10200
10201 CASE_FLT_FN (BUILT_IN_CCOSH):
10202 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10203
10204 CASE_FLT_FN (BUILT_IN_CPROJ):
10205 return fold_builtin_cproj (loc, arg0, type);
10206
10207 CASE_FLT_FN (BUILT_IN_CSIN):
10208 if (validate_arg (arg0, COMPLEX_TYPE)
10209 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10210 return do_mpc_arg1 (arg0, type, mpc_sin);
10211 break;
10212
10213 CASE_FLT_FN (BUILT_IN_CSINH):
10214 if (validate_arg (arg0, COMPLEX_TYPE)
10215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10216 return do_mpc_arg1 (arg0, type, mpc_sinh);
10217 break;
10218
10219 CASE_FLT_FN (BUILT_IN_CTAN):
10220 if (validate_arg (arg0, COMPLEX_TYPE)
10221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10222 return do_mpc_arg1 (arg0, type, mpc_tan);
10223 break;
10224
10225 CASE_FLT_FN (BUILT_IN_CTANH):
10226 if (validate_arg (arg0, COMPLEX_TYPE)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10228 return do_mpc_arg1 (arg0, type, mpc_tanh);
10229 break;
10230
10231 CASE_FLT_FN (BUILT_IN_CLOG):
10232 if (validate_arg (arg0, COMPLEX_TYPE)
10233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10234 return do_mpc_arg1 (arg0, type, mpc_log);
10235 break;
10236
10237 CASE_FLT_FN (BUILT_IN_CSQRT):
10238 if (validate_arg (arg0, COMPLEX_TYPE)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10240 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10241 break;
10242
10243 CASE_FLT_FN (BUILT_IN_CASIN):
10244 if (validate_arg (arg0, COMPLEX_TYPE)
10245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10246 return do_mpc_arg1 (arg0, type, mpc_asin);
10247 break;
10248
10249 CASE_FLT_FN (BUILT_IN_CACOS):
10250 if (validate_arg (arg0, COMPLEX_TYPE)
10251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10252 return do_mpc_arg1 (arg0, type, mpc_acos);
10253 break;
10254
10255 CASE_FLT_FN (BUILT_IN_CATAN):
10256 if (validate_arg (arg0, COMPLEX_TYPE)
10257 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10258 return do_mpc_arg1 (arg0, type, mpc_atan);
10259 break;
10260
10261 CASE_FLT_FN (BUILT_IN_CASINH):
10262 if (validate_arg (arg0, COMPLEX_TYPE)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10264 return do_mpc_arg1 (arg0, type, mpc_asinh);
10265 break;
10266
10267 CASE_FLT_FN (BUILT_IN_CACOSH):
10268 if (validate_arg (arg0, COMPLEX_TYPE)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10270 return do_mpc_arg1 (arg0, type, mpc_acosh);
10271 break;
10272
10273 CASE_FLT_FN (BUILT_IN_CATANH):
10274 if (validate_arg (arg0, COMPLEX_TYPE)
10275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10276 return do_mpc_arg1 (arg0, type, mpc_atanh);
10277 break;
10278
10279 CASE_FLT_FN (BUILT_IN_CABS):
10280 return fold_builtin_cabs (loc, arg0, type, fndecl);
10281
10282 CASE_FLT_FN (BUILT_IN_CARG):
10283 return fold_builtin_carg (loc, arg0, type);
10284
10285 CASE_FLT_FN (BUILT_IN_SQRT):
10286 return fold_builtin_sqrt (loc, arg0, type);
10287
10288 CASE_FLT_FN (BUILT_IN_CBRT):
10289 return fold_builtin_cbrt (loc, arg0, type);
10290
10291 CASE_FLT_FN (BUILT_IN_ASIN):
10292 if (validate_arg (arg0, REAL_TYPE))
10293 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10294 &dconstm1, &dconst1, true);
10295 break;
10296
10297 CASE_FLT_FN (BUILT_IN_ACOS):
10298 if (validate_arg (arg0, REAL_TYPE))
10299 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10300 &dconstm1, &dconst1, true);
10301 break;
10302
10303 CASE_FLT_FN (BUILT_IN_ATAN):
10304 if (validate_arg (arg0, REAL_TYPE))
10305 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10306 break;
10307
10308 CASE_FLT_FN (BUILT_IN_ASINH):
10309 if (validate_arg (arg0, REAL_TYPE))
10310 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_ACOSH):
10314 if (validate_arg (arg0, REAL_TYPE))
10315 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10316 &dconst1, NULL, true);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_ATANH):
10320 if (validate_arg (arg0, REAL_TYPE))
10321 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10322 &dconstm1, &dconst1, false);
10323 break;
10324
10325 CASE_FLT_FN (BUILT_IN_SIN):
10326 if (validate_arg (arg0, REAL_TYPE))
10327 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10328 break;
10329
10330 CASE_FLT_FN (BUILT_IN_COS):
10331 return fold_builtin_cos (loc, arg0, type, fndecl);
10332
10333 CASE_FLT_FN (BUILT_IN_TAN):
10334 return fold_builtin_tan (arg0, type);
10335
10336 CASE_FLT_FN (BUILT_IN_CEXP):
10337 return fold_builtin_cexp (loc, arg0, type);
10338
10339 CASE_FLT_FN (BUILT_IN_CEXPI):
10340 if (validate_arg (arg0, REAL_TYPE))
10341 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10342 break;
10343
10344 CASE_FLT_FN (BUILT_IN_SINH):
10345 if (validate_arg (arg0, REAL_TYPE))
10346 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10347 break;
10348
10349 CASE_FLT_FN (BUILT_IN_COSH):
10350 return fold_builtin_cosh (loc, arg0, type, fndecl);
10351
10352 CASE_FLT_FN (BUILT_IN_TANH):
10353 if (validate_arg (arg0, REAL_TYPE))
10354 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10355 break;
10356
10357 CASE_FLT_FN (BUILT_IN_ERF):
10358 if (validate_arg (arg0, REAL_TYPE))
10359 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_ERFC):
10363 if (validate_arg (arg0, REAL_TYPE))
10364 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10365 break;
10366
10367 CASE_FLT_FN (BUILT_IN_TGAMMA):
10368 if (validate_arg (arg0, REAL_TYPE))
10369 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10370 break;
10371
10372 CASE_FLT_FN (BUILT_IN_EXP):
10373 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10374
10375 CASE_FLT_FN (BUILT_IN_EXP2):
10376 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10377
10378 CASE_FLT_FN (BUILT_IN_EXP10):
10379 CASE_FLT_FN (BUILT_IN_POW10):
10380 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10381
10382 CASE_FLT_FN (BUILT_IN_EXPM1):
10383 if (validate_arg (arg0, REAL_TYPE))
10384 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_LOG):
10388 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10389
10390 CASE_FLT_FN (BUILT_IN_LOG2):
10391 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10392
10393 CASE_FLT_FN (BUILT_IN_LOG10):
10394 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10395
10396 CASE_FLT_FN (BUILT_IN_LOG1P):
10397 if (validate_arg (arg0, REAL_TYPE))
10398 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10399 &dconstm1, NULL, false);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_J0):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10405 NULL, NULL, 0);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_J1):
10409 if (validate_arg (arg0, REAL_TYPE))
10410 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10411 NULL, NULL, 0);
10412 break;
10413
10414 CASE_FLT_FN (BUILT_IN_Y0):
10415 if (validate_arg (arg0, REAL_TYPE))
10416 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10417 &dconst0, NULL, false);
10418 break;
10419
10420 CASE_FLT_FN (BUILT_IN_Y1):
10421 if (validate_arg (arg0, REAL_TYPE))
10422 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10423 &dconst0, NULL, false);
10424 break;
10425
10426 CASE_FLT_FN (BUILT_IN_NAN):
10427 case BUILT_IN_NAND32:
10428 case BUILT_IN_NAND64:
10429 case BUILT_IN_NAND128:
10430 return fold_builtin_nan (arg0, type, true);
10431
10432 CASE_FLT_FN (BUILT_IN_NANS):
10433 return fold_builtin_nan (arg0, type, false);
10434
10435 CASE_FLT_FN (BUILT_IN_FLOOR):
10436 return fold_builtin_floor (loc, fndecl, arg0);
10437
10438 CASE_FLT_FN (BUILT_IN_CEIL):
10439 return fold_builtin_ceil (loc, fndecl, arg0);
10440
10441 CASE_FLT_FN (BUILT_IN_TRUNC):
10442 return fold_builtin_trunc (loc, fndecl, arg0);
10443
10444 CASE_FLT_FN (BUILT_IN_ROUND):
10445 return fold_builtin_round (loc, fndecl, arg0);
10446
10447 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10448 CASE_FLT_FN (BUILT_IN_RINT):
10449 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10450
10451 CASE_FLT_FN (BUILT_IN_ICEIL):
10452 CASE_FLT_FN (BUILT_IN_LCEIL):
10453 CASE_FLT_FN (BUILT_IN_LLCEIL):
10454 CASE_FLT_FN (BUILT_IN_LFLOOR):
10455 CASE_FLT_FN (BUILT_IN_IFLOOR):
10456 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10457 CASE_FLT_FN (BUILT_IN_IROUND):
10458 CASE_FLT_FN (BUILT_IN_LROUND):
10459 CASE_FLT_FN (BUILT_IN_LLROUND):
10460 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10461
10462 CASE_FLT_FN (BUILT_IN_IRINT):
10463 CASE_FLT_FN (BUILT_IN_LRINT):
10464 CASE_FLT_FN (BUILT_IN_LLRINT):
10465 return fold_fixed_mathfn (loc, fndecl, arg0);
10466
10467 case BUILT_IN_BSWAP16:
10468 case BUILT_IN_BSWAP32:
10469 case BUILT_IN_BSWAP64:
10470 return fold_builtin_bswap (fndecl, arg0);
10471
10472 CASE_INT_FN (BUILT_IN_FFS):
10473 CASE_INT_FN (BUILT_IN_CLZ):
10474 CASE_INT_FN (BUILT_IN_CTZ):
10475 CASE_INT_FN (BUILT_IN_CLRSB):
10476 CASE_INT_FN (BUILT_IN_POPCOUNT):
10477 CASE_INT_FN (BUILT_IN_PARITY):
10478 return fold_builtin_bitop (fndecl, arg0);
10479
10480 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10481 return fold_builtin_signbit (loc, arg0, type);
10482
10483 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10484 return fold_builtin_significand (loc, arg0, type);
10485
10486 CASE_FLT_FN (BUILT_IN_ILOGB):
10487 CASE_FLT_FN (BUILT_IN_LOGB):
10488 return fold_builtin_logb (loc, arg0, type);
10489
10490 case BUILT_IN_ISASCII:
10491 return fold_builtin_isascii (loc, arg0);
10492
10493 case BUILT_IN_TOASCII:
10494 return fold_builtin_toascii (loc, arg0);
10495
10496 case BUILT_IN_ISDIGIT:
10497 return fold_builtin_isdigit (loc, arg0);
10498
10499 CASE_FLT_FN (BUILT_IN_FINITE):
10500 case BUILT_IN_FINITED32:
10501 case BUILT_IN_FINITED64:
10502 case BUILT_IN_FINITED128:
10503 case BUILT_IN_ISFINITE:
10504 {
10505 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10506 if (ret)
10507 return ret;
10508 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10509 }
10510
10511 CASE_FLT_FN (BUILT_IN_ISINF):
10512 case BUILT_IN_ISINFD32:
10513 case BUILT_IN_ISINFD64:
10514 case BUILT_IN_ISINFD128:
10515 {
10516 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10517 if (ret)
10518 return ret;
10519 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10520 }
10521
10522 case BUILT_IN_ISNORMAL:
10523 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10524
10525 case BUILT_IN_ISINF_SIGN:
10526 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10527
10528 CASE_FLT_FN (BUILT_IN_ISNAN):
10529 case BUILT_IN_ISNAND32:
10530 case BUILT_IN_ISNAND64:
10531 case BUILT_IN_ISNAND128:
10532 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10533
10534 case BUILT_IN_PRINTF:
10535 case BUILT_IN_PRINTF_UNLOCKED:
10536 case BUILT_IN_VPRINTF:
10537 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10538
10539 case BUILT_IN_FREE:
10540 if (integer_zerop (arg0))
10541 return build_empty_stmt (loc);
10542 break;
10543
10544 default:
10545 break;
10546 }
10547
10548 return NULL_TREE;
10549
10550 }
10551
10552 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10553 IGNORE is true if the result of the function call is ignored. This
10554 function returns NULL_TREE if no simplification was possible. */
10555
10556 static tree
10557 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10558 {
10559 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10560 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10561
10562 switch (fcode)
10563 {
10564 CASE_FLT_FN (BUILT_IN_JN):
10565 if (validate_arg (arg0, INTEGER_TYPE)
10566 && validate_arg (arg1, REAL_TYPE))
10567 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10568 break;
10569
10570 CASE_FLT_FN (BUILT_IN_YN):
10571 if (validate_arg (arg0, INTEGER_TYPE)
10572 && validate_arg (arg1, REAL_TYPE))
10573 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10574 &dconst0, false);
10575 break;
10576
10577 CASE_FLT_FN (BUILT_IN_DREM):
10578 CASE_FLT_FN (BUILT_IN_REMAINDER):
10579 if (validate_arg (arg0, REAL_TYPE)
10580 && validate_arg (arg1, REAL_TYPE))
10581 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10582 break;
10583
10584 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10585 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10586 if (validate_arg (arg0, REAL_TYPE)
10587 && validate_arg (arg1, POINTER_TYPE))
10588 return do_mpfr_lgamma_r (arg0, arg1, type);
10589 break;
10590
10591 CASE_FLT_FN (BUILT_IN_ATAN2):
10592 if (validate_arg (arg0, REAL_TYPE)
10593 && validate_arg (arg1, REAL_TYPE))
10594 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10595 break;
10596
10597 CASE_FLT_FN (BUILT_IN_FDIM):
10598 if (validate_arg (arg0, REAL_TYPE)
10599 && validate_arg (arg1, REAL_TYPE))
10600 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10601 break;
10602
10603 CASE_FLT_FN (BUILT_IN_HYPOT):
10604 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10605
10606 CASE_FLT_FN (BUILT_IN_CPOW):
10607 if (validate_arg (arg0, COMPLEX_TYPE)
10608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10609 && validate_arg (arg1, COMPLEX_TYPE)
10610 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10611 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10612 break;
10613
10614 CASE_FLT_FN (BUILT_IN_LDEXP):
10615 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10616 CASE_FLT_FN (BUILT_IN_SCALBN):
10617 CASE_FLT_FN (BUILT_IN_SCALBLN):
10618 return fold_builtin_load_exponent (loc, arg0, arg1,
10619 type, /*ldexp=*/false);
10620
10621 CASE_FLT_FN (BUILT_IN_FREXP):
10622 return fold_builtin_frexp (loc, arg0, arg1, type);
10623
10624 CASE_FLT_FN (BUILT_IN_MODF):
10625 return fold_builtin_modf (loc, arg0, arg1, type);
10626
10627 case BUILT_IN_BZERO:
10628 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10629
10630 case BUILT_IN_FPUTS:
10631 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10632
10633 case BUILT_IN_FPUTS_UNLOCKED:
10634 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10635
10636 case BUILT_IN_STRSTR:
10637 return fold_builtin_strstr (loc, arg0, arg1, type);
10638
10639 case BUILT_IN_STRCAT:
10640 return fold_builtin_strcat (loc, arg0, arg1);
10641
10642 case BUILT_IN_STRSPN:
10643 return fold_builtin_strspn (loc, arg0, arg1);
10644
10645 case BUILT_IN_STRCSPN:
10646 return fold_builtin_strcspn (loc, arg0, arg1);
10647
10648 case BUILT_IN_STRCHR:
10649 case BUILT_IN_INDEX:
10650 return fold_builtin_strchr (loc, arg0, arg1, type);
10651
10652 case BUILT_IN_STRRCHR:
10653 case BUILT_IN_RINDEX:
10654 return fold_builtin_strrchr (loc, arg0, arg1, type);
10655
10656 case BUILT_IN_STRCPY:
10657 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10658
10659 case BUILT_IN_STPCPY:
10660 if (ignore)
10661 {
10662 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10663 if (!fn)
10664 break;
10665
10666 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10667 }
10668 else
10669 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10670 break;
10671
10672 case BUILT_IN_STRCMP:
10673 return fold_builtin_strcmp (loc, arg0, arg1);
10674
10675 case BUILT_IN_STRPBRK:
10676 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10677
10678 case BUILT_IN_EXPECT:
10679 return fold_builtin_expect (loc, arg0, arg1);
10680
10681 CASE_FLT_FN (BUILT_IN_POW):
10682 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10683
10684 CASE_FLT_FN (BUILT_IN_POWI):
10685 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10686
10687 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10688 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10689
10690 CASE_FLT_FN (BUILT_IN_FMIN):
10691 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10692
10693 CASE_FLT_FN (BUILT_IN_FMAX):
10694 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10695
10696 case BUILT_IN_ISGREATER:
10697 return fold_builtin_unordered_cmp (loc, fndecl,
10698 arg0, arg1, UNLE_EXPR, LE_EXPR);
10699 case BUILT_IN_ISGREATEREQUAL:
10700 return fold_builtin_unordered_cmp (loc, fndecl,
10701 arg0, arg1, UNLT_EXPR, LT_EXPR);
10702 case BUILT_IN_ISLESS:
10703 return fold_builtin_unordered_cmp (loc, fndecl,
10704 arg0, arg1, UNGE_EXPR, GE_EXPR);
10705 case BUILT_IN_ISLESSEQUAL:
10706 return fold_builtin_unordered_cmp (loc, fndecl,
10707 arg0, arg1, UNGT_EXPR, GT_EXPR);
10708 case BUILT_IN_ISLESSGREATER:
10709 return fold_builtin_unordered_cmp (loc, fndecl,
10710 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10711 case BUILT_IN_ISUNORDERED:
10712 return fold_builtin_unordered_cmp (loc, fndecl,
10713 arg0, arg1, UNORDERED_EXPR,
10714 NOP_EXPR);
10715
10716 /* We do the folding for va_start in the expander. */
10717 case BUILT_IN_VA_START:
10718 break;
10719
10720 case BUILT_IN_SPRINTF:
10721 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10722
10723 case BUILT_IN_OBJECT_SIZE:
10724 return fold_builtin_object_size (arg0, arg1);
10725
10726 case BUILT_IN_PRINTF:
10727 case BUILT_IN_PRINTF_UNLOCKED:
10728 case BUILT_IN_VPRINTF:
10729 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10730
10731 case BUILT_IN_PRINTF_CHK:
10732 case BUILT_IN_VPRINTF_CHK:
10733 if (!validate_arg (arg0, INTEGER_TYPE)
10734 || TREE_SIDE_EFFECTS (arg0))
10735 return NULL_TREE;
10736 else
10737 return fold_builtin_printf (loc, fndecl,
10738 arg1, NULL_TREE, ignore, fcode);
10739 break;
10740
10741 case BUILT_IN_FPRINTF:
10742 case BUILT_IN_FPRINTF_UNLOCKED:
10743 case BUILT_IN_VFPRINTF:
10744 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10745 ignore, fcode);
10746
10747 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10748 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10749
10750 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10751 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10752
10753 default:
10754 break;
10755 }
10756 return NULL_TREE;
10757 }
10758
10759 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10760 and ARG2. IGNORE is true if the result of the function call is ignored.
10761 This function returns NULL_TREE if no simplification was possible. */
10762
10763 static tree
10764 fold_builtin_3 (location_t loc, tree fndecl,
10765 tree arg0, tree arg1, tree arg2, bool ignore)
10766 {
10767 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10768 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10769 switch (fcode)
10770 {
10771
10772 CASE_FLT_FN (BUILT_IN_SINCOS):
10773 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10774
10775 CASE_FLT_FN (BUILT_IN_FMA):
10776 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10777 break;
10778
10779 CASE_FLT_FN (BUILT_IN_REMQUO):
10780 if (validate_arg (arg0, REAL_TYPE)
10781 && validate_arg (arg1, REAL_TYPE)
10782 && validate_arg (arg2, POINTER_TYPE))
10783 return do_mpfr_remquo (arg0, arg1, arg2);
10784 break;
10785
10786 case BUILT_IN_MEMSET:
10787 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10788
10789 case BUILT_IN_BCOPY:
10790 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10791 void_type_node, true, /*endp=*/3);
10792
10793 case BUILT_IN_MEMCPY:
10794 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10795 type, ignore, /*endp=*/0);
10796
10797 case BUILT_IN_MEMPCPY:
10798 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10799 type, ignore, /*endp=*/1);
10800
10801 case BUILT_IN_MEMMOVE:
10802 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10803 type, ignore, /*endp=*/3);
10804
10805 case BUILT_IN_STRNCAT:
10806 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10807
10808 case BUILT_IN_STRNCPY:
10809 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10810
10811 case BUILT_IN_STRNCMP:
10812 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10813
10814 case BUILT_IN_MEMCHR:
10815 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10816
10817 case BUILT_IN_BCMP:
10818 case BUILT_IN_MEMCMP:
10819 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10820
10821 case BUILT_IN_SPRINTF:
10822 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10823
10824 case BUILT_IN_SNPRINTF:
10825 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10826
10827 case BUILT_IN_STRCPY_CHK:
10828 case BUILT_IN_STPCPY_CHK:
10829 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10830 ignore, fcode);
10831
10832 case BUILT_IN_STRCAT_CHK:
10833 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10834
10835 case BUILT_IN_PRINTF_CHK:
10836 case BUILT_IN_VPRINTF_CHK:
10837 if (!validate_arg (arg0, INTEGER_TYPE)
10838 || TREE_SIDE_EFFECTS (arg0))
10839 return NULL_TREE;
10840 else
10841 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10842 break;
10843
10844 case BUILT_IN_FPRINTF:
10845 case BUILT_IN_FPRINTF_UNLOCKED:
10846 case BUILT_IN_VFPRINTF:
10847 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10848 ignore, fcode);
10849
10850 case BUILT_IN_FPRINTF_CHK:
10851 case BUILT_IN_VFPRINTF_CHK:
10852 if (!validate_arg (arg1, INTEGER_TYPE)
10853 || TREE_SIDE_EFFECTS (arg1))
10854 return NULL_TREE;
10855 else
10856 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10857 ignore, fcode);
10858
10859 default:
10860 break;
10861 }
10862 return NULL_TREE;
10863 }
10864
10865 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10866 ARG2, and ARG3. IGNORE is true if the result of the function call is
10867 ignored. This function returns NULL_TREE if no simplification was
10868 possible. */
10869
10870 static tree
10871 fold_builtin_4 (location_t loc, tree fndecl,
10872 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10873 {
10874 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10875
10876 switch (fcode)
10877 {
10878 case BUILT_IN_MEMCPY_CHK:
10879 case BUILT_IN_MEMPCPY_CHK:
10880 case BUILT_IN_MEMMOVE_CHK:
10881 case BUILT_IN_MEMSET_CHK:
10882 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10883 NULL_TREE, ignore,
10884 DECL_FUNCTION_CODE (fndecl));
10885
10886 case BUILT_IN_STRNCPY_CHK:
10887 case BUILT_IN_STPNCPY_CHK:
10888 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10889 ignore, fcode);
10890
10891 case BUILT_IN_STRNCAT_CHK:
10892 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10893
10894 case BUILT_IN_SNPRINTF:
10895 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10896
10897 case BUILT_IN_FPRINTF_CHK:
10898 case BUILT_IN_VFPRINTF_CHK:
10899 if (!validate_arg (arg1, INTEGER_TYPE)
10900 || TREE_SIDE_EFFECTS (arg1))
10901 return NULL_TREE;
10902 else
10903 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10904 ignore, fcode);
10905 break;
10906
10907 default:
10908 break;
10909 }
10910 return NULL_TREE;
10911 }
10912
10913 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10914 arguments, where NARGS <= 4. IGNORE is true if the result of the
10915 function call is ignored. This function returns NULL_TREE if no
10916 simplification was possible. Note that this only folds builtins with
10917 fixed argument patterns. Foldings that do varargs-to-varargs
10918 transformations, or that match calls with more than 4 arguments,
10919 need to be handled with fold_builtin_varargs instead. */
10920
10921 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10922
10923 static tree
10924 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10925 {
10926 tree ret = NULL_TREE;
10927
10928 switch (nargs)
10929 {
10930 case 0:
10931 ret = fold_builtin_0 (loc, fndecl, ignore);
10932 break;
10933 case 1:
10934 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10935 break;
10936 case 2:
10937 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10938 break;
10939 case 3:
10940 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10941 break;
10942 case 4:
10943 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10944 ignore);
10945 break;
10946 default:
10947 break;
10948 }
10949 if (ret)
10950 {
10951 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10952 SET_EXPR_LOCATION (ret, loc);
10953 TREE_NO_WARNING (ret) = 1;
10954 return ret;
10955 }
10956 return NULL_TREE;
10957 }
10958
10959 /* Builtins with folding operations that operate on "..." arguments
10960 need special handling; we need to store the arguments in a convenient
10961 data structure before attempting any folding. Fortunately there are
10962 only a few builtins that fall into this category. FNDECL is the
10963 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10964 result of the function call is ignored. */
10965
10966 static tree
10967 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10968 bool ignore ATTRIBUTE_UNUSED)
10969 {
10970 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10971 tree ret = NULL_TREE;
10972
10973 switch (fcode)
10974 {
10975 case BUILT_IN_SPRINTF_CHK:
10976 case BUILT_IN_VSPRINTF_CHK:
10977 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10978 break;
10979
10980 case BUILT_IN_SNPRINTF_CHK:
10981 case BUILT_IN_VSNPRINTF_CHK:
10982 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10983 break;
10984
10985 case BUILT_IN_FPCLASSIFY:
10986 ret = fold_builtin_fpclassify (loc, exp);
10987 break;
10988
10989 default:
10990 break;
10991 }
10992 if (ret)
10993 {
10994 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10995 SET_EXPR_LOCATION (ret, loc);
10996 TREE_NO_WARNING (ret) = 1;
10997 return ret;
10998 }
10999 return NULL_TREE;
11000 }
11001
11002 /* Return true if FNDECL shouldn't be folded right now.
11003 If a built-in function has an inline attribute always_inline
11004 wrapper, defer folding it after always_inline functions have
11005 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11006 might not be performed. */
11007
11008 bool
11009 avoid_folding_inline_builtin (tree fndecl)
11010 {
11011 return (DECL_DECLARED_INLINE_P (fndecl)
11012 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11013 && cfun
11014 && !cfun->always_inline_functions_inlined
11015 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11016 }
11017
11018 /* A wrapper function for builtin folding that prevents warnings for
11019 "statement without effect" and the like, caused by removing the
11020 call node earlier than the warning is generated. */
11021
11022 tree
11023 fold_call_expr (location_t loc, tree exp, bool ignore)
11024 {
11025 tree ret = NULL_TREE;
11026 tree fndecl = get_callee_fndecl (exp);
11027 if (fndecl
11028 && TREE_CODE (fndecl) == FUNCTION_DECL
11029 && DECL_BUILT_IN (fndecl)
11030 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11031 yet. Defer folding until we see all the arguments
11032 (after inlining). */
11033 && !CALL_EXPR_VA_ARG_PACK (exp))
11034 {
11035 int nargs = call_expr_nargs (exp);
11036
11037 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11038 instead last argument is __builtin_va_arg_pack (). Defer folding
11039 even in that case, until arguments are finalized. */
11040 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11041 {
11042 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11043 if (fndecl2
11044 && TREE_CODE (fndecl2) == FUNCTION_DECL
11045 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11046 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11047 return NULL_TREE;
11048 }
11049
11050 if (avoid_folding_inline_builtin (fndecl))
11051 return NULL_TREE;
11052
11053 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11054 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11055 CALL_EXPR_ARGP (exp), ignore);
11056 else
11057 {
11058 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11059 {
11060 tree *args = CALL_EXPR_ARGP (exp);
11061 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11062 }
11063 if (!ret)
11064 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11065 if (ret)
11066 return ret;
11067 }
11068 }
11069 return NULL_TREE;
11070 }
11071
11072 /* Conveniently construct a function call expression. FNDECL names the
11073 function to be called and N arguments are passed in the array
11074 ARGARRAY. */
11075
11076 tree
11077 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11078 {
11079 tree fntype = TREE_TYPE (fndecl);
11080 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11081
11082 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11083 }
11084
11085 /* Conveniently construct a function call expression. FNDECL names the
11086 function to be called and the arguments are passed in the vector
11087 VEC. */
11088
11089 tree
11090 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11091 {
11092 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11093 vec_safe_address (vec));
11094 }
11095
11096
11097 /* Conveniently construct a function call expression. FNDECL names the
11098 function to be called, N is the number of arguments, and the "..."
11099 parameters are the argument expressions. */
11100
11101 tree
11102 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11103 {
11104 va_list ap;
11105 tree *argarray = XALLOCAVEC (tree, n);
11106 int i;
11107
11108 va_start (ap, n);
11109 for (i = 0; i < n; i++)
11110 argarray[i] = va_arg (ap, tree);
11111 va_end (ap);
11112 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11113 }
11114
11115 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11116 varargs macros aren't supported by all bootstrap compilers. */
11117
11118 tree
11119 build_call_expr (tree fndecl, int n, ...)
11120 {
11121 va_list ap;
11122 tree *argarray = XALLOCAVEC (tree, n);
11123 int i;
11124
11125 va_start (ap, n);
11126 for (i = 0; i < n; i++)
11127 argarray[i] = va_arg (ap, tree);
11128 va_end (ap);
11129 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11130 }
11131
11132 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11133 N arguments are passed in the array ARGARRAY. */
11134
11135 tree
11136 fold_builtin_call_array (location_t loc, tree type,
11137 tree fn,
11138 int n,
11139 tree *argarray)
11140 {
11141 tree ret = NULL_TREE;
11142 tree exp;
11143
11144 if (TREE_CODE (fn) == ADDR_EXPR)
11145 {
11146 tree fndecl = TREE_OPERAND (fn, 0);
11147 if (TREE_CODE (fndecl) == FUNCTION_DECL
11148 && DECL_BUILT_IN (fndecl))
11149 {
11150 /* If last argument is __builtin_va_arg_pack (), arguments to this
11151 function are not finalized yet. Defer folding until they are. */
11152 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11153 {
11154 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11155 if (fndecl2
11156 && TREE_CODE (fndecl2) == FUNCTION_DECL
11157 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11158 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11159 return build_call_array_loc (loc, type, fn, n, argarray);
11160 }
11161 if (avoid_folding_inline_builtin (fndecl))
11162 return build_call_array_loc (loc, type, fn, n, argarray);
11163 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11164 {
11165 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11166 if (ret)
11167 return ret;
11168
11169 return build_call_array_loc (loc, type, fn, n, argarray);
11170 }
11171 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11172 {
11173 /* First try the transformations that don't require consing up
11174 an exp. */
11175 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11176 if (ret)
11177 return ret;
11178 }
11179
11180 /* If we got this far, we need to build an exp. */
11181 exp = build_call_array_loc (loc, type, fn, n, argarray);
11182 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11183 return ret ? ret : exp;
11184 }
11185 }
11186
11187 return build_call_array_loc (loc, type, fn, n, argarray);
11188 }
11189
11190 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11191 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11192 of arguments in ARGS to be omitted. OLDNARGS is the number of
11193 elements in ARGS. */
11194
11195 static tree
11196 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11197 int skip, tree fndecl, int n, va_list newargs)
11198 {
11199 int nargs = oldnargs - skip + n;
11200 tree *buffer;
11201
11202 if (n > 0)
11203 {
11204 int i, j;
11205
11206 buffer = XALLOCAVEC (tree, nargs);
11207 for (i = 0; i < n; i++)
11208 buffer[i] = va_arg (newargs, tree);
11209 for (j = skip; j < oldnargs; j++, i++)
11210 buffer[i] = args[j];
11211 }
11212 else
11213 buffer = args + skip;
11214
11215 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11216 }
11217
11218 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11219 list ARGS along with N new arguments specified as the "..."
11220 parameters. SKIP is the number of arguments in ARGS to be omitted.
11221 OLDNARGS is the number of elements in ARGS. */
11222
11223 static tree
11224 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11225 int skip, tree fndecl, int n, ...)
11226 {
11227 va_list ap;
11228 tree t;
11229
11230 va_start (ap, n);
11231 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11232 va_end (ap);
11233
11234 return t;
11235 }
11236
11237 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11238 along with N new arguments specified as the "..." parameters. SKIP
11239 is the number of arguments in EXP to be omitted. This function is used
11240 to do varargs-to-varargs transformations. */
11241
11242 static tree
11243 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11244 {
11245 va_list ap;
11246 tree t;
11247
11248 va_start (ap, n);
11249 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11250 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11251 va_end (ap);
11252
11253 return t;
11254 }
11255
11256 /* Validate a single argument ARG against a tree code CODE representing
11257 a type. */
11258
11259 static bool
11260 validate_arg (const_tree arg, enum tree_code code)
11261 {
11262 if (!arg)
11263 return false;
11264 else if (code == POINTER_TYPE)
11265 return POINTER_TYPE_P (TREE_TYPE (arg));
11266 else if (code == INTEGER_TYPE)
11267 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11268 return code == TREE_CODE (TREE_TYPE (arg));
11269 }
11270
11271 /* This function validates the types of a function call argument list
11272 against a specified list of tree_codes. If the last specifier is a 0,
11273 that represents an ellipses, otherwise the last specifier must be a
11274 VOID_TYPE.
11275
11276 This is the GIMPLE version of validate_arglist. Eventually we want to
11277 completely convert builtins.c to work from GIMPLEs and the tree based
11278 validate_arglist will then be removed. */
11279
11280 bool
11281 validate_gimple_arglist (const_gimple call, ...)
11282 {
11283 enum tree_code code;
11284 bool res = 0;
11285 va_list ap;
11286 const_tree arg;
11287 size_t i;
11288
11289 va_start (ap, call);
11290 i = 0;
11291
11292 do
11293 {
11294 code = (enum tree_code) va_arg (ap, int);
11295 switch (code)
11296 {
11297 case 0:
11298 /* This signifies an ellipses, any further arguments are all ok. */
11299 res = true;
11300 goto end;
11301 case VOID_TYPE:
11302 /* This signifies an endlink, if no arguments remain, return
11303 true, otherwise return false. */
11304 res = (i == gimple_call_num_args (call));
11305 goto end;
11306 default:
11307 /* If no parameters remain or the parameter's code does not
11308 match the specified code, return false. Otherwise continue
11309 checking any remaining arguments. */
11310 arg = gimple_call_arg (call, i++);
11311 if (!validate_arg (arg, code))
11312 goto end;
11313 break;
11314 }
11315 }
11316 while (1);
11317
11318 /* We need gotos here since we can only have one VA_CLOSE in a
11319 function. */
11320 end: ;
11321 va_end (ap);
11322
11323 return res;
11324 }
11325
11326 /* This function validates the types of a function call argument list
11327 against a specified list of tree_codes. If the last specifier is a 0,
11328 that represents an ellipses, otherwise the last specifier must be a
11329 VOID_TYPE. */
11330
11331 bool
11332 validate_arglist (const_tree callexpr, ...)
11333 {
11334 enum tree_code code;
11335 bool res = 0;
11336 va_list ap;
11337 const_call_expr_arg_iterator iter;
11338 const_tree arg;
11339
11340 va_start (ap, callexpr);
11341 init_const_call_expr_arg_iterator (callexpr, &iter);
11342
11343 do
11344 {
11345 code = (enum tree_code) va_arg (ap, int);
11346 switch (code)
11347 {
11348 case 0:
11349 /* This signifies an ellipses, any further arguments are all ok. */
11350 res = true;
11351 goto end;
11352 case VOID_TYPE:
11353 /* This signifies an endlink, if no arguments remain, return
11354 true, otherwise return false. */
11355 res = !more_const_call_expr_args_p (&iter);
11356 goto end;
11357 default:
11358 /* If no parameters remain or the parameter's code does not
11359 match the specified code, return false. Otherwise continue
11360 checking any remaining arguments. */
11361 arg = next_const_call_expr_arg (&iter);
11362 if (!validate_arg (arg, code))
11363 goto end;
11364 break;
11365 }
11366 }
11367 while (1);
11368
11369 /* We need gotos here since we can only have one VA_CLOSE in a
11370 function. */
11371 end: ;
11372 va_end (ap);
11373
11374 return res;
11375 }
11376
11377 /* Default target-specific builtin expander that does nothing. */
11378
11379 rtx
11380 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11381 rtx target ATTRIBUTE_UNUSED,
11382 rtx subtarget ATTRIBUTE_UNUSED,
11383 enum machine_mode mode ATTRIBUTE_UNUSED,
11384 int ignore ATTRIBUTE_UNUSED)
11385 {
11386 return NULL_RTX;
11387 }
11388
11389 /* Returns true is EXP represents data that would potentially reside
11390 in a readonly section. */
11391
11392 static bool
11393 readonly_data_expr (tree exp)
11394 {
11395 STRIP_NOPS (exp);
11396
11397 if (TREE_CODE (exp) != ADDR_EXPR)
11398 return false;
11399
11400 exp = get_base_address (TREE_OPERAND (exp, 0));
11401 if (!exp)
11402 return false;
11403
11404 /* Make sure we call decl_readonly_section only for trees it
11405 can handle (since it returns true for everything it doesn't
11406 understand). */
11407 if (TREE_CODE (exp) == STRING_CST
11408 || TREE_CODE (exp) == CONSTRUCTOR
11409 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11410 return decl_readonly_section (exp, 0);
11411 else
11412 return false;
11413 }
11414
11415 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11416 to the call, and TYPE is its return type.
11417
11418 Return NULL_TREE if no simplification was possible, otherwise return the
11419 simplified form of the call as a tree.
11420
11421 The simplified form may be a constant or other expression which
11422 computes the same value, but in a more efficient manner (including
11423 calls to other builtin functions).
11424
11425 The call may contain arguments which need to be evaluated, but
11426 which are not useful to determine the result of the call. In
11427 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11428 COMPOUND_EXPR will be an argument which must be evaluated.
11429 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11430 COMPOUND_EXPR in the chain will contain the tree for the simplified
11431 form of the builtin function call. */
11432
11433 static tree
11434 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11435 {
11436 if (!validate_arg (s1, POINTER_TYPE)
11437 || !validate_arg (s2, POINTER_TYPE))
11438 return NULL_TREE;
11439 else
11440 {
11441 tree fn;
11442 const char *p1, *p2;
11443
11444 p2 = c_getstr (s2);
11445 if (p2 == NULL)
11446 return NULL_TREE;
11447
11448 p1 = c_getstr (s1);
11449 if (p1 != NULL)
11450 {
11451 const char *r = strstr (p1, p2);
11452 tree tem;
11453
11454 if (r == NULL)
11455 return build_int_cst (TREE_TYPE (s1), 0);
11456
11457 /* Return an offset into the constant string argument. */
11458 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11459 return fold_convert_loc (loc, type, tem);
11460 }
11461
11462 /* The argument is const char *, and the result is char *, so we need
11463 a type conversion here to avoid a warning. */
11464 if (p2[0] == '\0')
11465 return fold_convert_loc (loc, type, s1);
11466
11467 if (p2[1] != '\0')
11468 return NULL_TREE;
11469
11470 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11471 if (!fn)
11472 return NULL_TREE;
11473
11474 /* New argument list transforming strstr(s1, s2) to
11475 strchr(s1, s2[0]). */
11476 return build_call_expr_loc (loc, fn, 2, s1,
11477 build_int_cst (integer_type_node, p2[0]));
11478 }
11479 }
11480
11481 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11482 the call, and TYPE is its return type.
11483
11484 Return NULL_TREE if no simplification was possible, otherwise return the
11485 simplified form of the call as a tree.
11486
11487 The simplified form may be a constant or other expression which
11488 computes the same value, but in a more efficient manner (including
11489 calls to other builtin functions).
11490
11491 The call may contain arguments which need to be evaluated, but
11492 which are not useful to determine the result of the call. In
11493 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11494 COMPOUND_EXPR will be an argument which must be evaluated.
11495 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11496 COMPOUND_EXPR in the chain will contain the tree for the simplified
11497 form of the builtin function call. */
11498
11499 static tree
11500 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11501 {
11502 if (!validate_arg (s1, POINTER_TYPE)
11503 || !validate_arg (s2, INTEGER_TYPE))
11504 return NULL_TREE;
11505 else
11506 {
11507 const char *p1;
11508
11509 if (TREE_CODE (s2) != INTEGER_CST)
11510 return NULL_TREE;
11511
11512 p1 = c_getstr (s1);
11513 if (p1 != NULL)
11514 {
11515 char c;
11516 const char *r;
11517 tree tem;
11518
11519 if (target_char_cast (s2, &c))
11520 return NULL_TREE;
11521
11522 r = strchr (p1, c);
11523
11524 if (r == NULL)
11525 return build_int_cst (TREE_TYPE (s1), 0);
11526
11527 /* Return an offset into the constant string argument. */
11528 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11529 return fold_convert_loc (loc, type, tem);
11530 }
11531 return NULL_TREE;
11532 }
11533 }
11534
11535 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11536 the call, and TYPE is its return type.
11537
11538 Return NULL_TREE if no simplification was possible, otherwise return the
11539 simplified form of the call as a tree.
11540
11541 The simplified form may be a constant or other expression which
11542 computes the same value, but in a more efficient manner (including
11543 calls to other builtin functions).
11544
11545 The call may contain arguments which need to be evaluated, but
11546 which are not useful to determine the result of the call. In
11547 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11548 COMPOUND_EXPR will be an argument which must be evaluated.
11549 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11550 COMPOUND_EXPR in the chain will contain the tree for the simplified
11551 form of the builtin function call. */
11552
11553 static tree
11554 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11555 {
11556 if (!validate_arg (s1, POINTER_TYPE)
11557 || !validate_arg (s2, INTEGER_TYPE))
11558 return NULL_TREE;
11559 else
11560 {
11561 tree fn;
11562 const char *p1;
11563
11564 if (TREE_CODE (s2) != INTEGER_CST)
11565 return NULL_TREE;
11566
11567 p1 = c_getstr (s1);
11568 if (p1 != NULL)
11569 {
11570 char c;
11571 const char *r;
11572 tree tem;
11573
11574 if (target_char_cast (s2, &c))
11575 return NULL_TREE;
11576
11577 r = strrchr (p1, c);
11578
11579 if (r == NULL)
11580 return build_int_cst (TREE_TYPE (s1), 0);
11581
11582 /* Return an offset into the constant string argument. */
11583 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11584 return fold_convert_loc (loc, type, tem);
11585 }
11586
11587 if (! integer_zerop (s2))
11588 return NULL_TREE;
11589
11590 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11591 if (!fn)
11592 return NULL_TREE;
11593
11594 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11595 return build_call_expr_loc (loc, fn, 2, s1, s2);
11596 }
11597 }
11598
11599 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11600 to the call, and TYPE is its return type.
11601
11602 Return NULL_TREE if no simplification was possible, otherwise return the
11603 simplified form of the call as a tree.
11604
11605 The simplified form may be a constant or other expression which
11606 computes the same value, but in a more efficient manner (including
11607 calls to other builtin functions).
11608
11609 The call may contain arguments which need to be evaluated, but
11610 which are not useful to determine the result of the call. In
11611 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11612 COMPOUND_EXPR will be an argument which must be evaluated.
11613 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11614 COMPOUND_EXPR in the chain will contain the tree for the simplified
11615 form of the builtin function call. */
11616
11617 static tree
11618 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11619 {
11620 if (!validate_arg (s1, POINTER_TYPE)
11621 || !validate_arg (s2, POINTER_TYPE))
11622 return NULL_TREE;
11623 else
11624 {
11625 tree fn;
11626 const char *p1, *p2;
11627
11628 p2 = c_getstr (s2);
11629 if (p2 == NULL)
11630 return NULL_TREE;
11631
11632 p1 = c_getstr (s1);
11633 if (p1 != NULL)
11634 {
11635 const char *r = strpbrk (p1, p2);
11636 tree tem;
11637
11638 if (r == NULL)
11639 return build_int_cst (TREE_TYPE (s1), 0);
11640
11641 /* Return an offset into the constant string argument. */
11642 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11643 return fold_convert_loc (loc, type, tem);
11644 }
11645
11646 if (p2[0] == '\0')
11647 /* strpbrk(x, "") == NULL.
11648 Evaluate and ignore s1 in case it had side-effects. */
11649 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11650
11651 if (p2[1] != '\0')
11652 return NULL_TREE; /* Really call strpbrk. */
11653
11654 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11655 if (!fn)
11656 return NULL_TREE;
11657
11658 /* New argument list transforming strpbrk(s1, s2) to
11659 strchr(s1, s2[0]). */
11660 return build_call_expr_loc (loc, fn, 2, s1,
11661 build_int_cst (integer_type_node, p2[0]));
11662 }
11663 }
11664
11665 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11666 to the call.
11667
11668 Return NULL_TREE if no simplification was possible, otherwise return the
11669 simplified form of the call as a tree.
11670
11671 The simplified form may be a constant or other expression which
11672 computes the same value, but in a more efficient manner (including
11673 calls to other builtin functions).
11674
11675 The call may contain arguments which need to be evaluated, but
11676 which are not useful to determine the result of the call. In
11677 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11678 COMPOUND_EXPR will be an argument which must be evaluated.
11679 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11680 COMPOUND_EXPR in the chain will contain the tree for the simplified
11681 form of the builtin function call. */
11682
11683 static tree
11684 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11685 {
11686 if (!validate_arg (dst, POINTER_TYPE)
11687 || !validate_arg (src, POINTER_TYPE))
11688 return NULL_TREE;
11689 else
11690 {
11691 const char *p = c_getstr (src);
11692
11693 /* If the string length is zero, return the dst parameter. */
11694 if (p && *p == '\0')
11695 return dst;
11696
11697 if (optimize_insn_for_speed_p ())
11698 {
11699 /* See if we can store by pieces into (dst + strlen(dst)). */
11700 tree newdst, call;
11701 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11702 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11703
11704 if (!strlen_fn || !strcpy_fn)
11705 return NULL_TREE;
11706
11707 /* If we don't have a movstr we don't want to emit an strcpy
11708 call. We have to do that if the length of the source string
11709 isn't computable (in that case we can use memcpy probably
11710 later expanding to a sequence of mov instructions). If we
11711 have movstr instructions we can emit strcpy calls. */
11712 if (!HAVE_movstr)
11713 {
11714 tree len = c_strlen (src, 1);
11715 if (! len || TREE_SIDE_EFFECTS (len))
11716 return NULL_TREE;
11717 }
11718
11719 /* Stabilize the argument list. */
11720 dst = builtin_save_expr (dst);
11721
11722 /* Create strlen (dst). */
11723 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11724 /* Create (dst p+ strlen (dst)). */
11725
11726 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11727 newdst = builtin_save_expr (newdst);
11728
11729 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11730 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11731 }
11732 return NULL_TREE;
11733 }
11734 }
11735
11736 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11737 arguments to the call.
11738
11739 Return NULL_TREE if no simplification was possible, otherwise return the
11740 simplified form of the call as a tree.
11741
11742 The simplified form may be a constant or other expression which
11743 computes the same value, but in a more efficient manner (including
11744 calls to other builtin functions).
11745
11746 The call may contain arguments which need to be evaluated, but
11747 which are not useful to determine the result of the call. In
11748 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11749 COMPOUND_EXPR will be an argument which must be evaluated.
11750 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11751 COMPOUND_EXPR in the chain will contain the tree for the simplified
11752 form of the builtin function call. */
11753
11754 static tree
11755 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11756 {
11757 if (!validate_arg (dst, POINTER_TYPE)
11758 || !validate_arg (src, POINTER_TYPE)
11759 || !validate_arg (len, INTEGER_TYPE))
11760 return NULL_TREE;
11761 else
11762 {
11763 const char *p = c_getstr (src);
11764
11765 /* If the requested length is zero, or the src parameter string
11766 length is zero, return the dst parameter. */
11767 if (integer_zerop (len) || (p && *p == '\0'))
11768 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11769
11770 /* If the requested len is greater than or equal to the string
11771 length, call strcat. */
11772 if (TREE_CODE (len) == INTEGER_CST && p
11773 && compare_tree_int (len, strlen (p)) >= 0)
11774 {
11775 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11776
11777 /* If the replacement _DECL isn't initialized, don't do the
11778 transformation. */
11779 if (!fn)
11780 return NULL_TREE;
11781
11782 return build_call_expr_loc (loc, fn, 2, dst, src);
11783 }
11784 return NULL_TREE;
11785 }
11786 }
11787
11788 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11789 to the call.
11790
11791 Return NULL_TREE if no simplification was possible, otherwise return the
11792 simplified form of the call as a tree.
11793
11794 The simplified form may be a constant or other expression which
11795 computes the same value, but in a more efficient manner (including
11796 calls to other builtin functions).
11797
11798 The call may contain arguments which need to be evaluated, but
11799 which are not useful to determine the result of the call. In
11800 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11801 COMPOUND_EXPR will be an argument which must be evaluated.
11802 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11803 COMPOUND_EXPR in the chain will contain the tree for the simplified
11804 form of the builtin function call. */
11805
11806 static tree
11807 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11808 {
11809 if (!validate_arg (s1, POINTER_TYPE)
11810 || !validate_arg (s2, POINTER_TYPE))
11811 return NULL_TREE;
11812 else
11813 {
11814 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11815
11816 /* If both arguments are constants, evaluate at compile-time. */
11817 if (p1 && p2)
11818 {
11819 const size_t r = strspn (p1, p2);
11820 return build_int_cst (size_type_node, r);
11821 }
11822
11823 /* If either argument is "", return NULL_TREE. */
11824 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11825 /* Evaluate and ignore both arguments in case either one has
11826 side-effects. */
11827 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11828 s1, s2);
11829 return NULL_TREE;
11830 }
11831 }
11832
11833 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11834 to the call.
11835
11836 Return NULL_TREE if no simplification was possible, otherwise return the
11837 simplified form of the call as a tree.
11838
11839 The simplified form may be a constant or other expression which
11840 computes the same value, but in a more efficient manner (including
11841 calls to other builtin functions).
11842
11843 The call may contain arguments which need to be evaluated, but
11844 which are not useful to determine the result of the call. In
11845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11846 COMPOUND_EXPR will be an argument which must be evaluated.
11847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11848 COMPOUND_EXPR in the chain will contain the tree for the simplified
11849 form of the builtin function call. */
11850
11851 static tree
11852 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11853 {
11854 if (!validate_arg (s1, POINTER_TYPE)
11855 || !validate_arg (s2, POINTER_TYPE))
11856 return NULL_TREE;
11857 else
11858 {
11859 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11860
11861 /* If both arguments are constants, evaluate at compile-time. */
11862 if (p1 && p2)
11863 {
11864 const size_t r = strcspn (p1, p2);
11865 return build_int_cst (size_type_node, r);
11866 }
11867
11868 /* If the first argument is "", return NULL_TREE. */
11869 if (p1 && *p1 == '\0')
11870 {
11871 /* Evaluate and ignore argument s2 in case it has
11872 side-effects. */
11873 return omit_one_operand_loc (loc, size_type_node,
11874 size_zero_node, s2);
11875 }
11876
11877 /* If the second argument is "", return __builtin_strlen(s1). */
11878 if (p2 && *p2 == '\0')
11879 {
11880 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11881
11882 /* If the replacement _DECL isn't initialized, don't do the
11883 transformation. */
11884 if (!fn)
11885 return NULL_TREE;
11886
11887 return build_call_expr_loc (loc, fn, 1, s1);
11888 }
11889 return NULL_TREE;
11890 }
11891 }
11892
11893 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11894 to the call. IGNORE is true if the value returned
11895 by the builtin will be ignored. UNLOCKED is true is true if this
11896 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11897 the known length of the string. Return NULL_TREE if no simplification
11898 was possible. */
11899
11900 tree
11901 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11902 bool ignore, bool unlocked, tree len)
11903 {
11904 /* If we're using an unlocked function, assume the other unlocked
11905 functions exist explicitly. */
11906 tree const fn_fputc = (unlocked
11907 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11908 : builtin_decl_implicit (BUILT_IN_FPUTC));
11909 tree const fn_fwrite = (unlocked
11910 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11911 : builtin_decl_implicit (BUILT_IN_FWRITE));
11912
11913 /* If the return value is used, don't do the transformation. */
11914 if (!ignore)
11915 return NULL_TREE;
11916
11917 /* Verify the arguments in the original call. */
11918 if (!validate_arg (arg0, POINTER_TYPE)
11919 || !validate_arg (arg1, POINTER_TYPE))
11920 return NULL_TREE;
11921
11922 if (! len)
11923 len = c_strlen (arg0, 0);
11924
11925 /* Get the length of the string passed to fputs. If the length
11926 can't be determined, punt. */
11927 if (!len
11928 || TREE_CODE (len) != INTEGER_CST)
11929 return NULL_TREE;
11930
11931 switch (compare_tree_int (len, 1))
11932 {
11933 case -1: /* length is 0, delete the call entirely . */
11934 return omit_one_operand_loc (loc, integer_type_node,
11935 integer_zero_node, arg1);;
11936
11937 case 0: /* length is 1, call fputc. */
11938 {
11939 const char *p = c_getstr (arg0);
11940
11941 if (p != NULL)
11942 {
11943 if (fn_fputc)
11944 return build_call_expr_loc (loc, fn_fputc, 2,
11945 build_int_cst
11946 (integer_type_node, p[0]), arg1);
11947 else
11948 return NULL_TREE;
11949 }
11950 }
11951 /* FALLTHROUGH */
11952 case 1: /* length is greater than 1, call fwrite. */
11953 {
11954 /* If optimizing for size keep fputs. */
11955 if (optimize_function_for_size_p (cfun))
11956 return NULL_TREE;
11957 /* New argument list transforming fputs(string, stream) to
11958 fwrite(string, 1, len, stream). */
11959 if (fn_fwrite)
11960 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11961 size_one_node, len, arg1);
11962 else
11963 return NULL_TREE;
11964 }
11965 default:
11966 gcc_unreachable ();
11967 }
11968 return NULL_TREE;
11969 }
11970
11971 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11972 produced. False otherwise. This is done so that we don't output the error
11973 or warning twice or three times. */
11974
11975 bool
11976 fold_builtin_next_arg (tree exp, bool va_start_p)
11977 {
11978 tree fntype = TREE_TYPE (current_function_decl);
11979 int nargs = call_expr_nargs (exp);
11980 tree arg;
11981 /* There is good chance the current input_location points inside the
11982 definition of the va_start macro (perhaps on the token for
11983 builtin) in a system header, so warnings will not be emitted.
11984 Use the location in real source code. */
11985 source_location current_location =
11986 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11987 NULL);
11988
11989 if (!stdarg_p (fntype))
11990 {
11991 error ("%<va_start%> used in function with fixed args");
11992 return true;
11993 }
11994
11995 if (va_start_p)
11996 {
11997 if (va_start_p && (nargs != 2))
11998 {
11999 error ("wrong number of arguments to function %<va_start%>");
12000 return true;
12001 }
12002 arg = CALL_EXPR_ARG (exp, 1);
12003 }
12004 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12005 when we checked the arguments and if needed issued a warning. */
12006 else
12007 {
12008 if (nargs == 0)
12009 {
12010 /* Evidently an out of date version of <stdarg.h>; can't validate
12011 va_start's second argument, but can still work as intended. */
12012 warning_at (current_location,
12013 OPT_Wvarargs,
12014 "%<__builtin_next_arg%> called without an argument");
12015 return true;
12016 }
12017 else if (nargs > 1)
12018 {
12019 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12020 return true;
12021 }
12022 arg = CALL_EXPR_ARG (exp, 0);
12023 }
12024
12025 if (TREE_CODE (arg) == SSA_NAME)
12026 arg = SSA_NAME_VAR (arg);
12027
12028 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12029 or __builtin_next_arg (0) the first time we see it, after checking
12030 the arguments and if needed issuing a warning. */
12031 if (!integer_zerop (arg))
12032 {
12033 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12034
12035 /* Strip off all nops for the sake of the comparison. This
12036 is not quite the same as STRIP_NOPS. It does more.
12037 We must also strip off INDIRECT_EXPR for C++ reference
12038 parameters. */
12039 while (CONVERT_EXPR_P (arg)
12040 || TREE_CODE (arg) == INDIRECT_REF)
12041 arg = TREE_OPERAND (arg, 0);
12042 if (arg != last_parm)
12043 {
12044 /* FIXME: Sometimes with the tree optimizers we can get the
12045 not the last argument even though the user used the last
12046 argument. We just warn and set the arg to be the last
12047 argument so that we will get wrong-code because of
12048 it. */
12049 warning_at (current_location,
12050 OPT_Wvarargs,
12051 "second parameter of %<va_start%> not last named argument");
12052 }
12053
12054 /* Undefined by C99 7.15.1.4p4 (va_start):
12055 "If the parameter parmN is declared with the register storage
12056 class, with a function or array type, or with a type that is
12057 not compatible with the type that results after application of
12058 the default argument promotions, the behavior is undefined."
12059 */
12060 else if (DECL_REGISTER (arg))
12061 {
12062 warning_at (current_location,
12063 OPT_Wvarargs,
12064 "undefined behaviour when second parameter of "
12065 "%<va_start%> is declared with %<register%> storage");
12066 }
12067
12068 /* We want to verify the second parameter just once before the tree
12069 optimizers are run and then avoid keeping it in the tree,
12070 as otherwise we could warn even for correct code like:
12071 void foo (int i, ...)
12072 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12073 if (va_start_p)
12074 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12075 else
12076 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12077 }
12078 return false;
12079 }
12080
12081
12082 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12083 ORIG may be null if this is a 2-argument call. We don't attempt to
12084 simplify calls with more than 3 arguments.
12085
12086 Return NULL_TREE if no simplification was possible, otherwise return the
12087 simplified form of the call as a tree. If IGNORED is true, it means that
12088 the caller does not use the returned value of the function. */
12089
12090 static tree
12091 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12092 tree orig, int ignored)
12093 {
12094 tree call, retval;
12095 const char *fmt_str = NULL;
12096
12097 /* Verify the required arguments in the original call. We deal with two
12098 types of sprintf() calls: 'sprintf (str, fmt)' and
12099 'sprintf (dest, "%s", orig)'. */
12100 if (!validate_arg (dest, POINTER_TYPE)
12101 || !validate_arg (fmt, POINTER_TYPE))
12102 return NULL_TREE;
12103 if (orig && !validate_arg (orig, POINTER_TYPE))
12104 return NULL_TREE;
12105
12106 /* Check whether the format is a literal string constant. */
12107 fmt_str = c_getstr (fmt);
12108 if (fmt_str == NULL)
12109 return NULL_TREE;
12110
12111 call = NULL_TREE;
12112 retval = NULL_TREE;
12113
12114 if (!init_target_chars ())
12115 return NULL_TREE;
12116
12117 /* If the format doesn't contain % args or %%, use strcpy. */
12118 if (strchr (fmt_str, target_percent) == NULL)
12119 {
12120 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12121
12122 if (!fn)
12123 return NULL_TREE;
12124
12125 /* Don't optimize sprintf (buf, "abc", ptr++). */
12126 if (orig)
12127 return NULL_TREE;
12128
12129 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12130 'format' is known to contain no % formats. */
12131 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12132 if (!ignored)
12133 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12134 }
12135
12136 /* If the format is "%s", use strcpy if the result isn't used. */
12137 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12138 {
12139 tree fn;
12140 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12141
12142 if (!fn)
12143 return NULL_TREE;
12144
12145 /* Don't crash on sprintf (str1, "%s"). */
12146 if (!orig)
12147 return NULL_TREE;
12148
12149 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12150 if (!ignored)
12151 {
12152 retval = c_strlen (orig, 1);
12153 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12154 return NULL_TREE;
12155 }
12156 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12157 }
12158
12159 if (call && retval)
12160 {
12161 retval = fold_convert_loc
12162 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12163 retval);
12164 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12165 }
12166 else
12167 return call;
12168 }
12169
12170 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12171 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12172 attempt to simplify calls with more than 4 arguments.
12173
12174 Return NULL_TREE if no simplification was possible, otherwise return the
12175 simplified form of the call as a tree. If IGNORED is true, it means that
12176 the caller does not use the returned value of the function. */
12177
12178 static tree
12179 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12180 tree orig, int ignored)
12181 {
12182 tree call, retval;
12183 const char *fmt_str = NULL;
12184 unsigned HOST_WIDE_INT destlen;
12185
12186 /* Verify the required arguments in the original call. We deal with two
12187 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12188 'snprintf (dest, cst, "%s", orig)'. */
12189 if (!validate_arg (dest, POINTER_TYPE)
12190 || !validate_arg (destsize, INTEGER_TYPE)
12191 || !validate_arg (fmt, POINTER_TYPE))
12192 return NULL_TREE;
12193 if (orig && !validate_arg (orig, POINTER_TYPE))
12194 return NULL_TREE;
12195
12196 if (!tree_fits_uhwi_p (destsize))
12197 return NULL_TREE;
12198
12199 /* Check whether the format is a literal string constant. */
12200 fmt_str = c_getstr (fmt);
12201 if (fmt_str == NULL)
12202 return NULL_TREE;
12203
12204 call = NULL_TREE;
12205 retval = NULL_TREE;
12206
12207 if (!init_target_chars ())
12208 return NULL_TREE;
12209
12210 destlen = tree_to_uhwi (destsize);
12211
12212 /* If the format doesn't contain % args or %%, use strcpy. */
12213 if (strchr (fmt_str, target_percent) == NULL)
12214 {
12215 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12216 size_t len = strlen (fmt_str);
12217
12218 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12219 if (orig)
12220 return NULL_TREE;
12221
12222 /* We could expand this as
12223 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12224 or to
12225 memcpy (str, fmt_with_nul_at_cstm1, cst);
12226 but in the former case that might increase code size
12227 and in the latter case grow .rodata section too much.
12228 So punt for now. */
12229 if (len >= destlen)
12230 return NULL_TREE;
12231
12232 if (!fn)
12233 return NULL_TREE;
12234
12235 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12236 'format' is known to contain no % formats and
12237 strlen (fmt) < cst. */
12238 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12239
12240 if (!ignored)
12241 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12242 }
12243
12244 /* If the format is "%s", use strcpy if the result isn't used. */
12245 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12246 {
12247 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12248 unsigned HOST_WIDE_INT origlen;
12249
12250 /* Don't crash on snprintf (str1, cst, "%s"). */
12251 if (!orig)
12252 return NULL_TREE;
12253
12254 retval = c_strlen (orig, 1);
12255 if (!retval || !tree_fits_uhwi_p (retval))
12256 return NULL_TREE;
12257
12258 origlen = tree_to_uhwi (retval);
12259 /* We could expand this as
12260 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12261 or to
12262 memcpy (str1, str2_with_nul_at_cstm1, cst);
12263 but in the former case that might increase code size
12264 and in the latter case grow .rodata section too much.
12265 So punt for now. */
12266 if (origlen >= destlen)
12267 return NULL_TREE;
12268
12269 /* Convert snprintf (str1, cst, "%s", str2) into
12270 strcpy (str1, str2) if strlen (str2) < cst. */
12271 if (!fn)
12272 return NULL_TREE;
12273
12274 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12275
12276 if (ignored)
12277 retval = NULL_TREE;
12278 }
12279
12280 if (call && retval)
12281 {
12282 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12283 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12284 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12285 }
12286 else
12287 return call;
12288 }
12289
12290 /* Expand a call EXP to __builtin_object_size. */
12291
12292 rtx
12293 expand_builtin_object_size (tree exp)
12294 {
12295 tree ost;
12296 int object_size_type;
12297 tree fndecl = get_callee_fndecl (exp);
12298
12299 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12300 {
12301 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12302 exp, fndecl);
12303 expand_builtin_trap ();
12304 return const0_rtx;
12305 }
12306
12307 ost = CALL_EXPR_ARG (exp, 1);
12308 STRIP_NOPS (ost);
12309
12310 if (TREE_CODE (ost) != INTEGER_CST
12311 || tree_int_cst_sgn (ost) < 0
12312 || compare_tree_int (ost, 3) > 0)
12313 {
12314 error ("%Klast argument of %D is not integer constant between 0 and 3",
12315 exp, fndecl);
12316 expand_builtin_trap ();
12317 return const0_rtx;
12318 }
12319
12320 object_size_type = tree_to_shwi (ost);
12321
12322 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12323 }
12324
12325 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12326 FCODE is the BUILT_IN_* to use.
12327 Return NULL_RTX if we failed; the caller should emit a normal call,
12328 otherwise try to get the result in TARGET, if convenient (and in
12329 mode MODE if that's convenient). */
12330
12331 static rtx
12332 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12333 enum built_in_function fcode)
12334 {
12335 tree dest, src, len, size;
12336
12337 if (!validate_arglist (exp,
12338 POINTER_TYPE,
12339 fcode == BUILT_IN_MEMSET_CHK
12340 ? INTEGER_TYPE : POINTER_TYPE,
12341 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12342 return NULL_RTX;
12343
12344 dest = CALL_EXPR_ARG (exp, 0);
12345 src = CALL_EXPR_ARG (exp, 1);
12346 len = CALL_EXPR_ARG (exp, 2);
12347 size = CALL_EXPR_ARG (exp, 3);
12348
12349 if (! tree_fits_uhwi_p (size))
12350 return NULL_RTX;
12351
12352 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12353 {
12354 tree fn;
12355
12356 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12357 {
12358 warning_at (tree_nonartificial_location (exp),
12359 0, "%Kcall to %D will always overflow destination buffer",
12360 exp, get_callee_fndecl (exp));
12361 return NULL_RTX;
12362 }
12363
12364 fn = NULL_TREE;
12365 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12366 mem{cpy,pcpy,move,set} is available. */
12367 switch (fcode)
12368 {
12369 case BUILT_IN_MEMCPY_CHK:
12370 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12371 break;
12372 case BUILT_IN_MEMPCPY_CHK:
12373 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12374 break;
12375 case BUILT_IN_MEMMOVE_CHK:
12376 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12377 break;
12378 case BUILT_IN_MEMSET_CHK:
12379 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12380 break;
12381 default:
12382 break;
12383 }
12384
12385 if (! fn)
12386 return NULL_RTX;
12387
12388 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12389 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12390 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12391 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12392 }
12393 else if (fcode == BUILT_IN_MEMSET_CHK)
12394 return NULL_RTX;
12395 else
12396 {
12397 unsigned int dest_align = get_pointer_alignment (dest);
12398
12399 /* If DEST is not a pointer type, call the normal function. */
12400 if (dest_align == 0)
12401 return NULL_RTX;
12402
12403 /* If SRC and DEST are the same (and not volatile), do nothing. */
12404 if (operand_equal_p (src, dest, 0))
12405 {
12406 tree expr;
12407
12408 if (fcode != BUILT_IN_MEMPCPY_CHK)
12409 {
12410 /* Evaluate and ignore LEN in case it has side-effects. */
12411 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12412 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12413 }
12414
12415 expr = fold_build_pointer_plus (dest, len);
12416 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12417 }
12418
12419 /* __memmove_chk special case. */
12420 if (fcode == BUILT_IN_MEMMOVE_CHK)
12421 {
12422 unsigned int src_align = get_pointer_alignment (src);
12423
12424 if (src_align == 0)
12425 return NULL_RTX;
12426
12427 /* If src is categorized for a readonly section we can use
12428 normal __memcpy_chk. */
12429 if (readonly_data_expr (src))
12430 {
12431 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12432 if (!fn)
12433 return NULL_RTX;
12434 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12435 dest, src, len, size);
12436 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12437 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12438 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12439 }
12440 }
12441 return NULL_RTX;
12442 }
12443 }
12444
12445 /* Emit warning if a buffer overflow is detected at compile time. */
12446
12447 static void
12448 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12449 {
12450 int is_strlen = 0;
12451 tree len, size;
12452 location_t loc = tree_nonartificial_location (exp);
12453
12454 switch (fcode)
12455 {
12456 case BUILT_IN_STRCPY_CHK:
12457 case BUILT_IN_STPCPY_CHK:
12458 /* For __strcat_chk the warning will be emitted only if overflowing
12459 by at least strlen (dest) + 1 bytes. */
12460 case BUILT_IN_STRCAT_CHK:
12461 len = CALL_EXPR_ARG (exp, 1);
12462 size = CALL_EXPR_ARG (exp, 2);
12463 is_strlen = 1;
12464 break;
12465 case BUILT_IN_STRNCAT_CHK:
12466 case BUILT_IN_STRNCPY_CHK:
12467 case BUILT_IN_STPNCPY_CHK:
12468 len = CALL_EXPR_ARG (exp, 2);
12469 size = CALL_EXPR_ARG (exp, 3);
12470 break;
12471 case BUILT_IN_SNPRINTF_CHK:
12472 case BUILT_IN_VSNPRINTF_CHK:
12473 len = CALL_EXPR_ARG (exp, 1);
12474 size = CALL_EXPR_ARG (exp, 3);
12475 break;
12476 default:
12477 gcc_unreachable ();
12478 }
12479
12480 if (!len || !size)
12481 return;
12482
12483 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12484 return;
12485
12486 if (is_strlen)
12487 {
12488 len = c_strlen (len, 1);
12489 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12490 return;
12491 }
12492 else if (fcode == BUILT_IN_STRNCAT_CHK)
12493 {
12494 tree src = CALL_EXPR_ARG (exp, 1);
12495 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12496 return;
12497 src = c_strlen (src, 1);
12498 if (! src || ! tree_fits_uhwi_p (src))
12499 {
12500 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12501 exp, get_callee_fndecl (exp));
12502 return;
12503 }
12504 else if (tree_int_cst_lt (src, size))
12505 return;
12506 }
12507 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12508 return;
12509
12510 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12511 exp, get_callee_fndecl (exp));
12512 }
12513
12514 /* Emit warning if a buffer overflow is detected at compile time
12515 in __sprintf_chk/__vsprintf_chk calls. */
12516
12517 static void
12518 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12519 {
12520 tree size, len, fmt;
12521 const char *fmt_str;
12522 int nargs = call_expr_nargs (exp);
12523
12524 /* Verify the required arguments in the original call. */
12525
12526 if (nargs < 4)
12527 return;
12528 size = CALL_EXPR_ARG (exp, 2);
12529 fmt = CALL_EXPR_ARG (exp, 3);
12530
12531 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12532 return;
12533
12534 /* Check whether the format is a literal string constant. */
12535 fmt_str = c_getstr (fmt);
12536 if (fmt_str == NULL)
12537 return;
12538
12539 if (!init_target_chars ())
12540 return;
12541
12542 /* If the format doesn't contain % args or %%, we know its size. */
12543 if (strchr (fmt_str, target_percent) == 0)
12544 len = build_int_cstu (size_type_node, strlen (fmt_str));
12545 /* If the format is "%s" and first ... argument is a string literal,
12546 we know it too. */
12547 else if (fcode == BUILT_IN_SPRINTF_CHK
12548 && strcmp (fmt_str, target_percent_s) == 0)
12549 {
12550 tree arg;
12551
12552 if (nargs < 5)
12553 return;
12554 arg = CALL_EXPR_ARG (exp, 4);
12555 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12556 return;
12557
12558 len = c_strlen (arg, 1);
12559 if (!len || ! tree_fits_uhwi_p (len))
12560 return;
12561 }
12562 else
12563 return;
12564
12565 if (! tree_int_cst_lt (len, size))
12566 warning_at (tree_nonartificial_location (exp),
12567 0, "%Kcall to %D will always overflow destination buffer",
12568 exp, get_callee_fndecl (exp));
12569 }
12570
12571 /* Emit warning if a free is called with address of a variable. */
12572
12573 static void
12574 maybe_emit_free_warning (tree exp)
12575 {
12576 tree arg = CALL_EXPR_ARG (exp, 0);
12577
12578 STRIP_NOPS (arg);
12579 if (TREE_CODE (arg) != ADDR_EXPR)
12580 return;
12581
12582 arg = get_base_address (TREE_OPERAND (arg, 0));
12583 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12584 return;
12585
12586 if (SSA_VAR_P (arg))
12587 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12588 "%Kattempt to free a non-heap object %qD", exp, arg);
12589 else
12590 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12591 "%Kattempt to free a non-heap object", exp);
12592 }
12593
12594 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12595 if possible. */
12596
12597 tree
12598 fold_builtin_object_size (tree ptr, tree ost)
12599 {
12600 unsigned HOST_WIDE_INT bytes;
12601 int object_size_type;
12602 int precision = TYPE_PRECISION (TREE_TYPE (ptr));
12603
12604 if (!validate_arg (ptr, POINTER_TYPE)
12605 || !validate_arg (ost, INTEGER_TYPE))
12606 return NULL_TREE;
12607
12608 STRIP_NOPS (ost);
12609
12610 if (TREE_CODE (ost) != INTEGER_CST
12611 || tree_int_cst_sgn (ost) < 0
12612 || compare_tree_int (ost, 3) > 0)
12613 return NULL_TREE;
12614
12615 object_size_type = tree_to_shwi (ost);
12616
12617 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12618 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12619 and (size_t) 0 for types 2 and 3. */
12620 if (TREE_SIDE_EFFECTS (ptr))
12621 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12622
12623 if (TREE_CODE (ptr) == ADDR_EXPR)
12624 {
12625
12626 wide_int wbytes
12627 = wi::uhwi (compute_builtin_object_size (ptr, object_size_type),
12628 precision);
12629 if (wi::fits_to_tree_p (wbytes, size_type_node))
12630 return wide_int_to_tree (size_type_node, wbytes);
12631 }
12632 else if (TREE_CODE (ptr) == SSA_NAME)
12633 {
12634 /* If object size is not known yet, delay folding until
12635 later. Maybe subsequent passes will help determining
12636 it. */
12637 wide_int wbytes;
12638 bytes = compute_builtin_object_size (ptr, object_size_type);
12639 wbytes = wi::uhwi (bytes, precision);
12640 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12641 && wi::fits_to_tree_p (wbytes, size_type_node))
12642 return wide_int_to_tree (size_type_node, wbytes);
12643 }
12644
12645 return NULL_TREE;
12646 }
12647
12648 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12649 DEST, SRC, LEN, and SIZE are the arguments to the call.
12650 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12651 code of the builtin. If MAXLEN is not NULL, it is maximum length
12652 passed as third argument. */
12653
12654 tree
12655 fold_builtin_memory_chk (location_t loc, tree fndecl,
12656 tree dest, tree src, tree len, tree size,
12657 tree maxlen, bool ignore,
12658 enum built_in_function fcode)
12659 {
12660 tree fn;
12661
12662 if (!validate_arg (dest, POINTER_TYPE)
12663 || !validate_arg (src,
12664 (fcode == BUILT_IN_MEMSET_CHK
12665 ? INTEGER_TYPE : POINTER_TYPE))
12666 || !validate_arg (len, INTEGER_TYPE)
12667 || !validate_arg (size, INTEGER_TYPE))
12668 return NULL_TREE;
12669
12670 /* If SRC and DEST are the same (and not volatile), return DEST
12671 (resp. DEST+LEN for __mempcpy_chk). */
12672 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12673 {
12674 if (fcode != BUILT_IN_MEMPCPY_CHK)
12675 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12676 dest, len);
12677 else
12678 {
12679 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12680 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12681 }
12682 }
12683
12684 if (! tree_fits_uhwi_p (size))
12685 return NULL_TREE;
12686
12687 if (! integer_all_onesp (size))
12688 {
12689 if (! tree_fits_uhwi_p (len))
12690 {
12691 /* If LEN is not constant, try MAXLEN too.
12692 For MAXLEN only allow optimizing into non-_ocs function
12693 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12694 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12695 {
12696 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12697 {
12698 /* (void) __mempcpy_chk () can be optimized into
12699 (void) __memcpy_chk (). */
12700 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12701 if (!fn)
12702 return NULL_TREE;
12703
12704 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12705 }
12706 return NULL_TREE;
12707 }
12708 }
12709 else
12710 maxlen = len;
12711
12712 if (tree_int_cst_lt (size, maxlen))
12713 return NULL_TREE;
12714 }
12715
12716 fn = NULL_TREE;
12717 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12718 mem{cpy,pcpy,move,set} is available. */
12719 switch (fcode)
12720 {
12721 case BUILT_IN_MEMCPY_CHK:
12722 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12723 break;
12724 case BUILT_IN_MEMPCPY_CHK:
12725 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12726 break;
12727 case BUILT_IN_MEMMOVE_CHK:
12728 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12729 break;
12730 case BUILT_IN_MEMSET_CHK:
12731 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12732 break;
12733 default:
12734 break;
12735 }
12736
12737 if (!fn)
12738 return NULL_TREE;
12739
12740 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12741 }
12742
12743 /* Fold a call to the __st[rp]cpy_chk builtin.
12744 DEST, SRC, and SIZE are the arguments to the call.
12745 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12746 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12747 strings passed as second argument. */
12748
12749 tree
12750 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12751 tree src, tree size,
12752 tree maxlen, bool ignore,
12753 enum built_in_function fcode)
12754 {
12755 tree len, fn;
12756
12757 if (!validate_arg (dest, POINTER_TYPE)
12758 || !validate_arg (src, POINTER_TYPE)
12759 || !validate_arg (size, INTEGER_TYPE))
12760 return NULL_TREE;
12761
12762 /* If SRC and DEST are the same (and not volatile), return DEST. */
12763 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12764 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12765
12766 if (! tree_fits_uhwi_p (size))
12767 return NULL_TREE;
12768
12769 if (! integer_all_onesp (size))
12770 {
12771 len = c_strlen (src, 1);
12772 if (! len || ! tree_fits_uhwi_p (len))
12773 {
12774 /* If LEN is not constant, try MAXLEN too.
12775 For MAXLEN only allow optimizing into non-_ocs function
12776 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12777 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12778 {
12779 if (fcode == BUILT_IN_STPCPY_CHK)
12780 {
12781 if (! ignore)
12782 return NULL_TREE;
12783
12784 /* If return value of __stpcpy_chk is ignored,
12785 optimize into __strcpy_chk. */
12786 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12787 if (!fn)
12788 return NULL_TREE;
12789
12790 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12791 }
12792
12793 if (! len || TREE_SIDE_EFFECTS (len))
12794 return NULL_TREE;
12795
12796 /* If c_strlen returned something, but not a constant,
12797 transform __strcpy_chk into __memcpy_chk. */
12798 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12799 if (!fn)
12800 return NULL_TREE;
12801
12802 len = fold_convert_loc (loc, size_type_node, len);
12803 len = size_binop_loc (loc, PLUS_EXPR, len,
12804 build_int_cst (size_type_node, 1));
12805 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12806 build_call_expr_loc (loc, fn, 4,
12807 dest, src, len, size));
12808 }
12809 }
12810 else
12811 maxlen = len;
12812
12813 if (! tree_int_cst_lt (maxlen, size))
12814 return NULL_TREE;
12815 }
12816
12817 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12818 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12819 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12820 if (!fn)
12821 return NULL_TREE;
12822
12823 return build_call_expr_loc (loc, fn, 2, dest, src);
12824 }
12825
12826 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12827 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12828 length passed as third argument. IGNORE is true if return value can be
12829 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12830
12831 tree
12832 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12833 tree len, tree size, tree maxlen, bool ignore,
12834 enum built_in_function fcode)
12835 {
12836 tree fn;
12837
12838 if (!validate_arg (dest, POINTER_TYPE)
12839 || !validate_arg (src, POINTER_TYPE)
12840 || !validate_arg (len, INTEGER_TYPE)
12841 || !validate_arg (size, INTEGER_TYPE))
12842 return NULL_TREE;
12843
12844 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12845 {
12846 /* If return value of __stpncpy_chk is ignored,
12847 optimize into __strncpy_chk. */
12848 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12849 if (fn)
12850 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12851 }
12852
12853 if (! tree_fits_uhwi_p (size))
12854 return NULL_TREE;
12855
12856 if (! integer_all_onesp (size))
12857 {
12858 if (! tree_fits_uhwi_p (len))
12859 {
12860 /* If LEN is not constant, try MAXLEN too.
12861 For MAXLEN only allow optimizing into non-_ocs function
12862 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12863 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12864 return NULL_TREE;
12865 }
12866 else
12867 maxlen = len;
12868
12869 if (tree_int_cst_lt (size, maxlen))
12870 return NULL_TREE;
12871 }
12872
12873 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12874 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12875 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12876 if (!fn)
12877 return NULL_TREE;
12878
12879 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12880 }
12881
12882 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12883 are the arguments to the call. */
12884
12885 static tree
12886 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12887 tree src, tree size)
12888 {
12889 tree fn;
12890 const char *p;
12891
12892 if (!validate_arg (dest, POINTER_TYPE)
12893 || !validate_arg (src, POINTER_TYPE)
12894 || !validate_arg (size, INTEGER_TYPE))
12895 return NULL_TREE;
12896
12897 p = c_getstr (src);
12898 /* If the SRC parameter is "", return DEST. */
12899 if (p && *p == '\0')
12900 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12901
12902 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12903 return NULL_TREE;
12904
12905 /* If __builtin_strcat_chk is used, assume strcat is available. */
12906 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12907 if (!fn)
12908 return NULL_TREE;
12909
12910 return build_call_expr_loc (loc, fn, 2, dest, src);
12911 }
12912
12913 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12914 LEN, and SIZE. */
12915
12916 static tree
12917 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12918 tree dest, tree src, tree len, tree size)
12919 {
12920 tree fn;
12921 const char *p;
12922
12923 if (!validate_arg (dest, POINTER_TYPE)
12924 || !validate_arg (src, POINTER_TYPE)
12925 || !validate_arg (size, INTEGER_TYPE)
12926 || !validate_arg (size, INTEGER_TYPE))
12927 return NULL_TREE;
12928
12929 p = c_getstr (src);
12930 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12931 if (p && *p == '\0')
12932 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12933 else if (integer_zerop (len))
12934 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12935
12936 if (! tree_fits_uhwi_p (size))
12937 return NULL_TREE;
12938
12939 if (! integer_all_onesp (size))
12940 {
12941 tree src_len = c_strlen (src, 1);
12942 if (src_len
12943 && tree_fits_uhwi_p (src_len)
12944 && tree_fits_uhwi_p (len)
12945 && ! tree_int_cst_lt (len, src_len))
12946 {
12947 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12948 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12949 if (!fn)
12950 return NULL_TREE;
12951
12952 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12953 }
12954 return NULL_TREE;
12955 }
12956
12957 /* If __builtin_strncat_chk is used, assume strncat is available. */
12958 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12959 if (!fn)
12960 return NULL_TREE;
12961
12962 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12963 }
12964
12965 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12966 Return NULL_TREE if a normal call should be emitted rather than
12967 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12968 or BUILT_IN_VSPRINTF_CHK. */
12969
12970 static tree
12971 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12972 enum built_in_function fcode)
12973 {
12974 tree dest, size, len, fn, fmt, flag;
12975 const char *fmt_str;
12976
12977 /* Verify the required arguments in the original call. */
12978 if (nargs < 4)
12979 return NULL_TREE;
12980 dest = args[0];
12981 if (!validate_arg (dest, POINTER_TYPE))
12982 return NULL_TREE;
12983 flag = args[1];
12984 if (!validate_arg (flag, INTEGER_TYPE))
12985 return NULL_TREE;
12986 size = args[2];
12987 if (!validate_arg (size, INTEGER_TYPE))
12988 return NULL_TREE;
12989 fmt = args[3];
12990 if (!validate_arg (fmt, POINTER_TYPE))
12991 return NULL_TREE;
12992
12993 if (! tree_fits_uhwi_p (size))
12994 return NULL_TREE;
12995
12996 len = NULL_TREE;
12997
12998 if (!init_target_chars ())
12999 return NULL_TREE;
13000
13001 /* Check whether the format is a literal string constant. */
13002 fmt_str = c_getstr (fmt);
13003 if (fmt_str != NULL)
13004 {
13005 /* If the format doesn't contain % args or %%, we know the size. */
13006 if (strchr (fmt_str, target_percent) == 0)
13007 {
13008 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13009 len = build_int_cstu (size_type_node, strlen (fmt_str));
13010 }
13011 /* If the format is "%s" and first ... argument is a string literal,
13012 we know the size too. */
13013 else if (fcode == BUILT_IN_SPRINTF_CHK
13014 && strcmp (fmt_str, target_percent_s) == 0)
13015 {
13016 tree arg;
13017
13018 if (nargs == 5)
13019 {
13020 arg = args[4];
13021 if (validate_arg (arg, POINTER_TYPE))
13022 {
13023 len = c_strlen (arg, 1);
13024 if (! len || ! tree_fits_uhwi_p (len))
13025 len = NULL_TREE;
13026 }
13027 }
13028 }
13029 }
13030
13031 if (! integer_all_onesp (size))
13032 {
13033 if (! len || ! tree_int_cst_lt (len, size))
13034 return NULL_TREE;
13035 }
13036
13037 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13038 or if format doesn't contain % chars or is "%s". */
13039 if (! integer_zerop (flag))
13040 {
13041 if (fmt_str == NULL)
13042 return NULL_TREE;
13043 if (strchr (fmt_str, target_percent) != NULL
13044 && strcmp (fmt_str, target_percent_s))
13045 return NULL_TREE;
13046 }
13047
13048 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13049 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13050 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13051 if (!fn)
13052 return NULL_TREE;
13053
13054 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13055 }
13056
13057 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13058 a normal call should be emitted rather than expanding the function
13059 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13060
13061 static tree
13062 fold_builtin_sprintf_chk (location_t loc, tree exp,
13063 enum built_in_function fcode)
13064 {
13065 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13066 CALL_EXPR_ARGP (exp), fcode);
13067 }
13068
13069 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13070 NULL_TREE if a normal call should be emitted rather than expanding
13071 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13072 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13073 passed as second argument. */
13074
13075 static tree
13076 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13077 tree maxlen, enum built_in_function fcode)
13078 {
13079 tree dest, size, len, fn, fmt, flag;
13080 const char *fmt_str;
13081
13082 /* Verify the required arguments in the original call. */
13083 if (nargs < 5)
13084 return NULL_TREE;
13085 dest = args[0];
13086 if (!validate_arg (dest, POINTER_TYPE))
13087 return NULL_TREE;
13088 len = args[1];
13089 if (!validate_arg (len, INTEGER_TYPE))
13090 return NULL_TREE;
13091 flag = args[2];
13092 if (!validate_arg (flag, INTEGER_TYPE))
13093 return NULL_TREE;
13094 size = args[3];
13095 if (!validate_arg (size, INTEGER_TYPE))
13096 return NULL_TREE;
13097 fmt = args[4];
13098 if (!validate_arg (fmt, POINTER_TYPE))
13099 return NULL_TREE;
13100
13101 if (! tree_fits_uhwi_p (size))
13102 return NULL_TREE;
13103
13104 if (! integer_all_onesp (size))
13105 {
13106 if (! tree_fits_uhwi_p (len))
13107 {
13108 /* If LEN is not constant, try MAXLEN too.
13109 For MAXLEN only allow optimizing into non-_ocs function
13110 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13111 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13112 return NULL_TREE;
13113 }
13114 else
13115 maxlen = len;
13116
13117 if (tree_int_cst_lt (size, maxlen))
13118 return NULL_TREE;
13119 }
13120
13121 if (!init_target_chars ())
13122 return NULL_TREE;
13123
13124 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13125 or if format doesn't contain % chars or is "%s". */
13126 if (! integer_zerop (flag))
13127 {
13128 fmt_str = c_getstr (fmt);
13129 if (fmt_str == NULL)
13130 return NULL_TREE;
13131 if (strchr (fmt_str, target_percent) != NULL
13132 && strcmp (fmt_str, target_percent_s))
13133 return NULL_TREE;
13134 }
13135
13136 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13137 available. */
13138 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13139 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13140 if (!fn)
13141 return NULL_TREE;
13142
13143 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13144 }
13145
13146 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13147 a normal call should be emitted rather than expanding the function
13148 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13149 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13150 passed as second argument. */
13151
13152 tree
13153 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13154 enum built_in_function fcode)
13155 {
13156 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13157 CALL_EXPR_ARGP (exp), maxlen, fcode);
13158 }
13159
13160 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13161 FMT and ARG are the arguments to the call; we don't fold cases with
13162 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13163
13164 Return NULL_TREE if no simplification was possible, otherwise return the
13165 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13166 code of the function to be simplified. */
13167
13168 static tree
13169 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13170 tree arg, bool ignore,
13171 enum built_in_function fcode)
13172 {
13173 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13174 const char *fmt_str = NULL;
13175
13176 /* If the return value is used, don't do the transformation. */
13177 if (! ignore)
13178 return NULL_TREE;
13179
13180 /* Verify the required arguments in the original call. */
13181 if (!validate_arg (fmt, POINTER_TYPE))
13182 return NULL_TREE;
13183
13184 /* Check whether the format is a literal string constant. */
13185 fmt_str = c_getstr (fmt);
13186 if (fmt_str == NULL)
13187 return NULL_TREE;
13188
13189 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13190 {
13191 /* If we're using an unlocked function, assume the other
13192 unlocked functions exist explicitly. */
13193 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13194 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13195 }
13196 else
13197 {
13198 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13199 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13200 }
13201
13202 if (!init_target_chars ())
13203 return NULL_TREE;
13204
13205 if (strcmp (fmt_str, target_percent_s) == 0
13206 || strchr (fmt_str, target_percent) == NULL)
13207 {
13208 const char *str;
13209
13210 if (strcmp (fmt_str, target_percent_s) == 0)
13211 {
13212 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13213 return NULL_TREE;
13214
13215 if (!arg || !validate_arg (arg, POINTER_TYPE))
13216 return NULL_TREE;
13217
13218 str = c_getstr (arg);
13219 if (str == NULL)
13220 return NULL_TREE;
13221 }
13222 else
13223 {
13224 /* The format specifier doesn't contain any '%' characters. */
13225 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13226 && arg)
13227 return NULL_TREE;
13228 str = fmt_str;
13229 }
13230
13231 /* If the string was "", printf does nothing. */
13232 if (str[0] == '\0')
13233 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13234
13235 /* If the string has length of 1, call putchar. */
13236 if (str[1] == '\0')
13237 {
13238 /* Given printf("c"), (where c is any one character,)
13239 convert "c"[0] to an int and pass that to the replacement
13240 function. */
13241 newarg = build_int_cst (integer_type_node, str[0]);
13242 if (fn_putchar)
13243 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13244 }
13245 else
13246 {
13247 /* If the string was "string\n", call puts("string"). */
13248 size_t len = strlen (str);
13249 if ((unsigned char)str[len - 1] == target_newline
13250 && (size_t) (int) len == len
13251 && (int) len > 0)
13252 {
13253 char *newstr;
13254 tree offset_node, string_cst;
13255
13256 /* Create a NUL-terminated string that's one char shorter
13257 than the original, stripping off the trailing '\n'. */
13258 newarg = build_string_literal (len, str);
13259 string_cst = string_constant (newarg, &offset_node);
13260 gcc_checking_assert (string_cst
13261 && (TREE_STRING_LENGTH (string_cst)
13262 == (int) len)
13263 && integer_zerop (offset_node)
13264 && (unsigned char)
13265 TREE_STRING_POINTER (string_cst)[len - 1]
13266 == target_newline);
13267 /* build_string_literal creates a new STRING_CST,
13268 modify it in place to avoid double copying. */
13269 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13270 newstr[len - 1] = '\0';
13271 if (fn_puts)
13272 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13273 }
13274 else
13275 /* We'd like to arrange to call fputs(string,stdout) here,
13276 but we need stdout and don't have a way to get it yet. */
13277 return NULL_TREE;
13278 }
13279 }
13280
13281 /* The other optimizations can be done only on the non-va_list variants. */
13282 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13283 return NULL_TREE;
13284
13285 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13286 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13287 {
13288 if (!arg || !validate_arg (arg, POINTER_TYPE))
13289 return NULL_TREE;
13290 if (fn_puts)
13291 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13292 }
13293
13294 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13295 else if (strcmp (fmt_str, target_percent_c) == 0)
13296 {
13297 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13298 return NULL_TREE;
13299 if (fn_putchar)
13300 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13301 }
13302
13303 if (!call)
13304 return NULL_TREE;
13305
13306 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13307 }
13308
13309 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13310 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13311 more than 3 arguments, and ARG may be null in the 2-argument case.
13312
13313 Return NULL_TREE if no simplification was possible, otherwise return the
13314 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13315 code of the function to be simplified. */
13316
13317 static tree
13318 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13319 tree fmt, tree arg, bool ignore,
13320 enum built_in_function fcode)
13321 {
13322 tree fn_fputc, fn_fputs, call = NULL_TREE;
13323 const char *fmt_str = NULL;
13324
13325 /* If the return value is used, don't do the transformation. */
13326 if (! ignore)
13327 return NULL_TREE;
13328
13329 /* Verify the required arguments in the original call. */
13330 if (!validate_arg (fp, POINTER_TYPE))
13331 return NULL_TREE;
13332 if (!validate_arg (fmt, POINTER_TYPE))
13333 return NULL_TREE;
13334
13335 /* Check whether the format is a literal string constant. */
13336 fmt_str = c_getstr (fmt);
13337 if (fmt_str == NULL)
13338 return NULL_TREE;
13339
13340 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13341 {
13342 /* If we're using an unlocked function, assume the other
13343 unlocked functions exist explicitly. */
13344 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13345 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13346 }
13347 else
13348 {
13349 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13350 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13351 }
13352
13353 if (!init_target_chars ())
13354 return NULL_TREE;
13355
13356 /* If the format doesn't contain % args or %%, use strcpy. */
13357 if (strchr (fmt_str, target_percent) == NULL)
13358 {
13359 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13360 && arg)
13361 return NULL_TREE;
13362
13363 /* If the format specifier was "", fprintf does nothing. */
13364 if (fmt_str[0] == '\0')
13365 {
13366 /* If FP has side-effects, just wait until gimplification is
13367 done. */
13368 if (TREE_SIDE_EFFECTS (fp))
13369 return NULL_TREE;
13370
13371 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13372 }
13373
13374 /* When "string" doesn't contain %, replace all cases of
13375 fprintf (fp, string) with fputs (string, fp). The fputs
13376 builtin will take care of special cases like length == 1. */
13377 if (fn_fputs)
13378 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13379 }
13380
13381 /* The other optimizations can be done only on the non-va_list variants. */
13382 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13383 return NULL_TREE;
13384
13385 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13386 else if (strcmp (fmt_str, target_percent_s) == 0)
13387 {
13388 if (!arg || !validate_arg (arg, POINTER_TYPE))
13389 return NULL_TREE;
13390 if (fn_fputs)
13391 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13392 }
13393
13394 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13395 else if (strcmp (fmt_str, target_percent_c) == 0)
13396 {
13397 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13398 return NULL_TREE;
13399 if (fn_fputc)
13400 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13401 }
13402
13403 if (!call)
13404 return NULL_TREE;
13405 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13406 }
13407
13408 /* Initialize format string characters in the target charset. */
13409
13410 static bool
13411 init_target_chars (void)
13412 {
13413 static bool init;
13414 if (!init)
13415 {
13416 target_newline = lang_hooks.to_target_charset ('\n');
13417 target_percent = lang_hooks.to_target_charset ('%');
13418 target_c = lang_hooks.to_target_charset ('c');
13419 target_s = lang_hooks.to_target_charset ('s');
13420 if (target_newline == 0 || target_percent == 0 || target_c == 0
13421 || target_s == 0)
13422 return false;
13423
13424 target_percent_c[0] = target_percent;
13425 target_percent_c[1] = target_c;
13426 target_percent_c[2] = '\0';
13427
13428 target_percent_s[0] = target_percent;
13429 target_percent_s[1] = target_s;
13430 target_percent_s[2] = '\0';
13431
13432 target_percent_s_newline[0] = target_percent;
13433 target_percent_s_newline[1] = target_s;
13434 target_percent_s_newline[2] = target_newline;
13435 target_percent_s_newline[3] = '\0';
13436
13437 init = true;
13438 }
13439 return true;
13440 }
13441
13442 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13443 and no overflow/underflow occurred. INEXACT is true if M was not
13444 exactly calculated. TYPE is the tree type for the result. This
13445 function assumes that you cleared the MPFR flags and then
13446 calculated M to see if anything subsequently set a flag prior to
13447 entering this function. Return NULL_TREE if any checks fail. */
13448
13449 static tree
13450 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13451 {
13452 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13453 overflow/underflow occurred. If -frounding-math, proceed iff the
13454 result of calling FUNC was exact. */
13455 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13456 && (!flag_rounding_math || !inexact))
13457 {
13458 REAL_VALUE_TYPE rr;
13459
13460 real_from_mpfr (&rr, m, type, GMP_RNDN);
13461 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13462 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13463 but the mpft_t is not, then we underflowed in the
13464 conversion. */
13465 if (real_isfinite (&rr)
13466 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13467 {
13468 REAL_VALUE_TYPE rmode;
13469
13470 real_convert (&rmode, TYPE_MODE (type), &rr);
13471 /* Proceed iff the specified mode can hold the value. */
13472 if (real_identical (&rmode, &rr))
13473 return build_real (type, rmode);
13474 }
13475 }
13476 return NULL_TREE;
13477 }
13478
13479 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13480 number and no overflow/underflow occurred. INEXACT is true if M
13481 was not exactly calculated. TYPE is the tree type for the result.
13482 This function assumes that you cleared the MPFR flags and then
13483 calculated M to see if anything subsequently set a flag prior to
13484 entering this function. Return NULL_TREE if any checks fail, if
13485 FORCE_CONVERT is true, then bypass the checks. */
13486
13487 static tree
13488 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13489 {
13490 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13491 overflow/underflow occurred. If -frounding-math, proceed iff the
13492 result of calling FUNC was exact. */
13493 if (force_convert
13494 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13495 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13496 && (!flag_rounding_math || !inexact)))
13497 {
13498 REAL_VALUE_TYPE re, im;
13499
13500 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13501 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13502 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13503 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13504 but the mpft_t is not, then we underflowed in the
13505 conversion. */
13506 if (force_convert
13507 || (real_isfinite (&re) && real_isfinite (&im)
13508 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13509 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13510 {
13511 REAL_VALUE_TYPE re_mode, im_mode;
13512
13513 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13514 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13515 /* Proceed iff the specified mode can hold the value. */
13516 if (force_convert
13517 || (real_identical (&re_mode, &re)
13518 && real_identical (&im_mode, &im)))
13519 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13520 build_real (TREE_TYPE (type), im_mode));
13521 }
13522 }
13523 return NULL_TREE;
13524 }
13525
13526 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13527 FUNC on it and return the resulting value as a tree with type TYPE.
13528 If MIN and/or MAX are not NULL, then the supplied ARG must be
13529 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13530 acceptable values, otherwise they are not. The mpfr precision is
13531 set to the precision of TYPE. We assume that function FUNC returns
13532 zero if the result could be calculated exactly within the requested
13533 precision. */
13534
13535 static tree
13536 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13537 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13538 bool inclusive)
13539 {
13540 tree result = NULL_TREE;
13541
13542 STRIP_NOPS (arg);
13543
13544 /* To proceed, MPFR must exactly represent the target floating point
13545 format, which only happens when the target base equals two. */
13546 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13547 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13548 {
13549 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13550
13551 if (real_isfinite (ra)
13552 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13553 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13554 {
13555 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13556 const int prec = fmt->p;
13557 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13558 int inexact;
13559 mpfr_t m;
13560
13561 mpfr_init2 (m, prec);
13562 mpfr_from_real (m, ra, GMP_RNDN);
13563 mpfr_clear_flags ();
13564 inexact = func (m, m, rnd);
13565 result = do_mpfr_ckconv (m, type, inexact);
13566 mpfr_clear (m);
13567 }
13568 }
13569
13570 return result;
13571 }
13572
13573 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13574 FUNC on it and return the resulting value as a tree with type TYPE.
13575 The mpfr precision is set to the precision of TYPE. We assume that
13576 function FUNC returns zero if the result could be calculated
13577 exactly within the requested precision. */
13578
13579 static tree
13580 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13581 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13582 {
13583 tree result = NULL_TREE;
13584
13585 STRIP_NOPS (arg1);
13586 STRIP_NOPS (arg2);
13587
13588 /* To proceed, MPFR must exactly represent the target floating point
13589 format, which only happens when the target base equals two. */
13590 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13591 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13592 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13593 {
13594 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13595 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13596
13597 if (real_isfinite (ra1) && real_isfinite (ra2))
13598 {
13599 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13600 const int prec = fmt->p;
13601 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13602 int inexact;
13603 mpfr_t m1, m2;
13604
13605 mpfr_inits2 (prec, m1, m2, NULL);
13606 mpfr_from_real (m1, ra1, GMP_RNDN);
13607 mpfr_from_real (m2, ra2, GMP_RNDN);
13608 mpfr_clear_flags ();
13609 inexact = func (m1, m1, m2, rnd);
13610 result = do_mpfr_ckconv (m1, type, inexact);
13611 mpfr_clears (m1, m2, NULL);
13612 }
13613 }
13614
13615 return result;
13616 }
13617
13618 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13619 FUNC on it and return the resulting value as a tree with type TYPE.
13620 The mpfr precision is set to the precision of TYPE. We assume that
13621 function FUNC returns zero if the result could be calculated
13622 exactly within the requested precision. */
13623
13624 static tree
13625 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13626 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13627 {
13628 tree result = NULL_TREE;
13629
13630 STRIP_NOPS (arg1);
13631 STRIP_NOPS (arg2);
13632 STRIP_NOPS (arg3);
13633
13634 /* To proceed, MPFR must exactly represent the target floating point
13635 format, which only happens when the target base equals two. */
13636 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13637 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13638 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13639 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13640 {
13641 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13642 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13643 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13644
13645 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13646 {
13647 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13648 const int prec = fmt->p;
13649 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13650 int inexact;
13651 mpfr_t m1, m2, m3;
13652
13653 mpfr_inits2 (prec, m1, m2, m3, NULL);
13654 mpfr_from_real (m1, ra1, GMP_RNDN);
13655 mpfr_from_real (m2, ra2, GMP_RNDN);
13656 mpfr_from_real (m3, ra3, GMP_RNDN);
13657 mpfr_clear_flags ();
13658 inexact = func (m1, m1, m2, m3, rnd);
13659 result = do_mpfr_ckconv (m1, type, inexact);
13660 mpfr_clears (m1, m2, m3, NULL);
13661 }
13662 }
13663
13664 return result;
13665 }
13666
13667 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13668 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13669 If ARG_SINP and ARG_COSP are NULL then the result is returned
13670 as a complex value.
13671 The type is taken from the type of ARG and is used for setting the
13672 precision of the calculation and results. */
13673
13674 static tree
13675 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13676 {
13677 tree const type = TREE_TYPE (arg);
13678 tree result = NULL_TREE;
13679
13680 STRIP_NOPS (arg);
13681
13682 /* To proceed, MPFR must exactly represent the target floating point
13683 format, which only happens when the target base equals two. */
13684 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13685 && TREE_CODE (arg) == REAL_CST
13686 && !TREE_OVERFLOW (arg))
13687 {
13688 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13689
13690 if (real_isfinite (ra))
13691 {
13692 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13693 const int prec = fmt->p;
13694 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13695 tree result_s, result_c;
13696 int inexact;
13697 mpfr_t m, ms, mc;
13698
13699 mpfr_inits2 (prec, m, ms, mc, NULL);
13700 mpfr_from_real (m, ra, GMP_RNDN);
13701 mpfr_clear_flags ();
13702 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13703 result_s = do_mpfr_ckconv (ms, type, inexact);
13704 result_c = do_mpfr_ckconv (mc, type, inexact);
13705 mpfr_clears (m, ms, mc, NULL);
13706 if (result_s && result_c)
13707 {
13708 /* If we are to return in a complex value do so. */
13709 if (!arg_sinp && !arg_cosp)
13710 return build_complex (build_complex_type (type),
13711 result_c, result_s);
13712
13713 /* Dereference the sin/cos pointer arguments. */
13714 arg_sinp = build_fold_indirect_ref (arg_sinp);
13715 arg_cosp = build_fold_indirect_ref (arg_cosp);
13716 /* Proceed if valid pointer type were passed in. */
13717 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13718 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13719 {
13720 /* Set the values. */
13721 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13722 result_s);
13723 TREE_SIDE_EFFECTS (result_s) = 1;
13724 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13725 result_c);
13726 TREE_SIDE_EFFECTS (result_c) = 1;
13727 /* Combine the assignments into a compound expr. */
13728 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13729 result_s, result_c));
13730 }
13731 }
13732 }
13733 }
13734 return result;
13735 }
13736
13737 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13738 two-argument mpfr order N Bessel function FUNC on them and return
13739 the resulting value as a tree with type TYPE. The mpfr precision
13740 is set to the precision of TYPE. We assume that function FUNC
13741 returns zero if the result could be calculated exactly within the
13742 requested precision. */
13743 static tree
13744 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13745 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13746 const REAL_VALUE_TYPE *min, bool inclusive)
13747 {
13748 tree result = NULL_TREE;
13749
13750 STRIP_NOPS (arg1);
13751 STRIP_NOPS (arg2);
13752
13753 /* To proceed, MPFR must exactly represent the target floating point
13754 format, which only happens when the target base equals two. */
13755 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13756 && tree_fits_shwi_p (arg1)
13757 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13758 {
13759 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13760 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13761
13762 if (n == (long)n
13763 && real_isfinite (ra)
13764 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13765 {
13766 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13767 const int prec = fmt->p;
13768 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13769 int inexact;
13770 mpfr_t m;
13771
13772 mpfr_init2 (m, prec);
13773 mpfr_from_real (m, ra, GMP_RNDN);
13774 mpfr_clear_flags ();
13775 inexact = func (m, n, m, rnd);
13776 result = do_mpfr_ckconv (m, type, inexact);
13777 mpfr_clear (m);
13778 }
13779 }
13780
13781 return result;
13782 }
13783
13784 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13785 the pointer *(ARG_QUO) and return the result. The type is taken
13786 from the type of ARG0 and is used for setting the precision of the
13787 calculation and results. */
13788
13789 static tree
13790 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13791 {
13792 tree const type = TREE_TYPE (arg0);
13793 tree result = NULL_TREE;
13794
13795 STRIP_NOPS (arg0);
13796 STRIP_NOPS (arg1);
13797
13798 /* To proceed, MPFR must exactly represent the target floating point
13799 format, which only happens when the target base equals two. */
13800 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13801 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13802 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13803 {
13804 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13805 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13806
13807 if (real_isfinite (ra0) && real_isfinite (ra1))
13808 {
13809 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13810 const int prec = fmt->p;
13811 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13812 tree result_rem;
13813 long integer_quo;
13814 mpfr_t m0, m1;
13815
13816 mpfr_inits2 (prec, m0, m1, NULL);
13817 mpfr_from_real (m0, ra0, GMP_RNDN);
13818 mpfr_from_real (m1, ra1, GMP_RNDN);
13819 mpfr_clear_flags ();
13820 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13821 /* Remquo is independent of the rounding mode, so pass
13822 inexact=0 to do_mpfr_ckconv(). */
13823 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13824 mpfr_clears (m0, m1, NULL);
13825 if (result_rem)
13826 {
13827 /* MPFR calculates quo in the host's long so it may
13828 return more bits in quo than the target int can hold
13829 if sizeof(host long) > sizeof(target int). This can
13830 happen even for native compilers in LP64 mode. In
13831 these cases, modulo the quo value with the largest
13832 number that the target int can hold while leaving one
13833 bit for the sign. */
13834 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13835 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13836
13837 /* Dereference the quo pointer argument. */
13838 arg_quo = build_fold_indirect_ref (arg_quo);
13839 /* Proceed iff a valid pointer type was passed in. */
13840 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13841 {
13842 /* Set the value. */
13843 tree result_quo
13844 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13845 build_int_cst (TREE_TYPE (arg_quo),
13846 integer_quo));
13847 TREE_SIDE_EFFECTS (result_quo) = 1;
13848 /* Combine the quo assignment with the rem. */
13849 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13850 result_quo, result_rem));
13851 }
13852 }
13853 }
13854 }
13855 return result;
13856 }
13857
13858 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13859 resulting value as a tree with type TYPE. The mpfr precision is
13860 set to the precision of TYPE. We assume that this mpfr function
13861 returns zero if the result could be calculated exactly within the
13862 requested precision. In addition, the integer pointer represented
13863 by ARG_SG will be dereferenced and set to the appropriate signgam
13864 (-1,1) value. */
13865
13866 static tree
13867 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13868 {
13869 tree result = NULL_TREE;
13870
13871 STRIP_NOPS (arg);
13872
13873 /* To proceed, MPFR must exactly represent the target floating point
13874 format, which only happens when the target base equals two. Also
13875 verify ARG is a constant and that ARG_SG is an int pointer. */
13876 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13877 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13878 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13879 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13880 {
13881 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13882
13883 /* In addition to NaN and Inf, the argument cannot be zero or a
13884 negative integer. */
13885 if (real_isfinite (ra)
13886 && ra->cl != rvc_zero
13887 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13888 {
13889 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13890 const int prec = fmt->p;
13891 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13892 int inexact, sg;
13893 mpfr_t m;
13894 tree result_lg;
13895
13896 mpfr_init2 (m, prec);
13897 mpfr_from_real (m, ra, GMP_RNDN);
13898 mpfr_clear_flags ();
13899 inexact = mpfr_lgamma (m, &sg, m, rnd);
13900 result_lg = do_mpfr_ckconv (m, type, inexact);
13901 mpfr_clear (m);
13902 if (result_lg)
13903 {
13904 tree result_sg;
13905
13906 /* Dereference the arg_sg pointer argument. */
13907 arg_sg = build_fold_indirect_ref (arg_sg);
13908 /* Assign the signgam value into *arg_sg. */
13909 result_sg = fold_build2 (MODIFY_EXPR,
13910 TREE_TYPE (arg_sg), arg_sg,
13911 build_int_cst (TREE_TYPE (arg_sg), sg));
13912 TREE_SIDE_EFFECTS (result_sg) = 1;
13913 /* Combine the signgam assignment with the lgamma result. */
13914 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13915 result_sg, result_lg));
13916 }
13917 }
13918 }
13919
13920 return result;
13921 }
13922
13923 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13924 function FUNC on it and return the resulting value as a tree with
13925 type TYPE. The mpfr precision is set to the precision of TYPE. We
13926 assume that function FUNC returns zero if the result could be
13927 calculated exactly within the requested precision. */
13928
13929 static tree
13930 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13931 {
13932 tree result = NULL_TREE;
13933
13934 STRIP_NOPS (arg);
13935
13936 /* To proceed, MPFR must exactly represent the target floating point
13937 format, which only happens when the target base equals two. */
13938 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13940 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13941 {
13942 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13943 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13944
13945 if (real_isfinite (re) && real_isfinite (im))
13946 {
13947 const struct real_format *const fmt =
13948 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13949 const int prec = fmt->p;
13950 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13951 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13952 int inexact;
13953 mpc_t m;
13954
13955 mpc_init2 (m, prec);
13956 mpfr_from_real (mpc_realref (m), re, rnd);
13957 mpfr_from_real (mpc_imagref (m), im, rnd);
13958 mpfr_clear_flags ();
13959 inexact = func (m, m, crnd);
13960 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13961 mpc_clear (m);
13962 }
13963 }
13964
13965 return result;
13966 }
13967
13968 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13969 mpc function FUNC on it and return the resulting value as a tree
13970 with type TYPE. The mpfr precision is set to the precision of
13971 TYPE. We assume that function FUNC returns zero if the result
13972 could be calculated exactly within the requested precision. If
13973 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13974 in the arguments and/or results. */
13975
13976 tree
13977 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13978 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13979 {
13980 tree result = NULL_TREE;
13981
13982 STRIP_NOPS (arg0);
13983 STRIP_NOPS (arg1);
13984
13985 /* To proceed, MPFR must exactly represent the target floating point
13986 format, which only happens when the target base equals two. */
13987 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13989 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13991 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13992 {
13993 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13994 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13995 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13996 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13997
13998 if (do_nonfinite
13999 || (real_isfinite (re0) && real_isfinite (im0)
14000 && real_isfinite (re1) && real_isfinite (im1)))
14001 {
14002 const struct real_format *const fmt =
14003 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14004 const int prec = fmt->p;
14005 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14006 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14007 int inexact;
14008 mpc_t m0, m1;
14009
14010 mpc_init2 (m0, prec);
14011 mpc_init2 (m1, prec);
14012 mpfr_from_real (mpc_realref (m0), re0, rnd);
14013 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14014 mpfr_from_real (mpc_realref (m1), re1, rnd);
14015 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14016 mpfr_clear_flags ();
14017 inexact = func (m0, m0, m1, crnd);
14018 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14019 mpc_clear (m0);
14020 mpc_clear (m1);
14021 }
14022 }
14023
14024 return result;
14025 }
14026
14027 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14028 a normal call should be emitted rather than expanding the function
14029 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14030
14031 static tree
14032 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14033 {
14034 int nargs = gimple_call_num_args (stmt);
14035
14036 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14037 (nargs > 0
14038 ? gimple_call_arg_ptr (stmt, 0)
14039 : &error_mark_node), fcode);
14040 }
14041
14042 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14043 a normal call should be emitted rather than expanding the function
14044 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14045 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14046 passed as second argument. */
14047
14048 tree
14049 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14050 enum built_in_function fcode)
14051 {
14052 int nargs = gimple_call_num_args (stmt);
14053
14054 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14055 (nargs > 0
14056 ? gimple_call_arg_ptr (stmt, 0)
14057 : &error_mark_node), maxlen, fcode);
14058 }
14059
14060 /* Builtins with folding operations that operate on "..." arguments
14061 need special handling; we need to store the arguments in a convenient
14062 data structure before attempting any folding. Fortunately there are
14063 only a few builtins that fall into this category. FNDECL is the
14064 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14065 result of the function call is ignored. */
14066
14067 static tree
14068 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14069 bool ignore ATTRIBUTE_UNUSED)
14070 {
14071 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14072 tree ret = NULL_TREE;
14073
14074 switch (fcode)
14075 {
14076 case BUILT_IN_SPRINTF_CHK:
14077 case BUILT_IN_VSPRINTF_CHK:
14078 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14079 break;
14080
14081 case BUILT_IN_SNPRINTF_CHK:
14082 case BUILT_IN_VSNPRINTF_CHK:
14083 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14084
14085 default:
14086 break;
14087 }
14088 if (ret)
14089 {
14090 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14091 TREE_NO_WARNING (ret) = 1;
14092 return ret;
14093 }
14094 return NULL_TREE;
14095 }
14096
14097 /* A wrapper function for builtin folding that prevents warnings for
14098 "statement without effect" and the like, caused by removing the
14099 call node earlier than the warning is generated. */
14100
14101 tree
14102 fold_call_stmt (gimple stmt, bool ignore)
14103 {
14104 tree ret = NULL_TREE;
14105 tree fndecl = gimple_call_fndecl (stmt);
14106 location_t loc = gimple_location (stmt);
14107 if (fndecl
14108 && TREE_CODE (fndecl) == FUNCTION_DECL
14109 && DECL_BUILT_IN (fndecl)
14110 && !gimple_call_va_arg_pack_p (stmt))
14111 {
14112 int nargs = gimple_call_num_args (stmt);
14113 tree *args = (nargs > 0
14114 ? gimple_call_arg_ptr (stmt, 0)
14115 : &error_mark_node);
14116
14117 if (avoid_folding_inline_builtin (fndecl))
14118 return NULL_TREE;
14119 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14120 {
14121 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14122 }
14123 else
14124 {
14125 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14126 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14127 if (!ret)
14128 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14129 if (ret)
14130 {
14131 /* Propagate location information from original call to
14132 expansion of builtin. Otherwise things like
14133 maybe_emit_chk_warning, that operate on the expansion
14134 of a builtin, will use the wrong location information. */
14135 if (gimple_has_location (stmt))
14136 {
14137 tree realret = ret;
14138 if (TREE_CODE (ret) == NOP_EXPR)
14139 realret = TREE_OPERAND (ret, 0);
14140 if (CAN_HAVE_LOCATION_P (realret)
14141 && !EXPR_HAS_LOCATION (realret))
14142 SET_EXPR_LOCATION (realret, loc);
14143 return realret;
14144 }
14145 return ret;
14146 }
14147 }
14148 }
14149 return NULL_TREE;
14150 }
14151
14152 /* Look up the function in builtin_decl that corresponds to DECL
14153 and set ASMSPEC as its user assembler name. DECL must be a
14154 function decl that declares a builtin. */
14155
14156 void
14157 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14158 {
14159 tree builtin;
14160 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14161 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14162 && asmspec != 0);
14163
14164 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14165 set_user_assembler_name (builtin, asmspec);
14166 switch (DECL_FUNCTION_CODE (decl))
14167 {
14168 case BUILT_IN_MEMCPY:
14169 init_block_move_fn (asmspec);
14170 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14171 break;
14172 case BUILT_IN_MEMSET:
14173 init_block_clear_fn (asmspec);
14174 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14175 break;
14176 case BUILT_IN_MEMMOVE:
14177 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14178 break;
14179 case BUILT_IN_MEMCMP:
14180 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14181 break;
14182 case BUILT_IN_ABORT:
14183 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14184 break;
14185 case BUILT_IN_FFS:
14186 if (INT_TYPE_SIZE < BITS_PER_WORD)
14187 {
14188 set_user_assembler_libfunc ("ffs", asmspec);
14189 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14190 MODE_INT, 0), "ffs");
14191 }
14192 break;
14193 default:
14194 break;
14195 }
14196 }
14197
14198 /* Return true if DECL is a builtin that expands to a constant or similarly
14199 simple code. */
14200 bool
14201 is_simple_builtin (tree decl)
14202 {
14203 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14204 switch (DECL_FUNCTION_CODE (decl))
14205 {
14206 /* Builtins that expand to constants. */
14207 case BUILT_IN_CONSTANT_P:
14208 case BUILT_IN_EXPECT:
14209 case BUILT_IN_OBJECT_SIZE:
14210 case BUILT_IN_UNREACHABLE:
14211 /* Simple register moves or loads from stack. */
14212 case BUILT_IN_ASSUME_ALIGNED:
14213 case BUILT_IN_RETURN_ADDRESS:
14214 case BUILT_IN_EXTRACT_RETURN_ADDR:
14215 case BUILT_IN_FROB_RETURN_ADDR:
14216 case BUILT_IN_RETURN:
14217 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14218 case BUILT_IN_FRAME_ADDRESS:
14219 case BUILT_IN_VA_END:
14220 case BUILT_IN_STACK_SAVE:
14221 case BUILT_IN_STACK_RESTORE:
14222 /* Exception state returns or moves registers around. */
14223 case BUILT_IN_EH_FILTER:
14224 case BUILT_IN_EH_POINTER:
14225 case BUILT_IN_EH_COPY_VALUES:
14226 return true;
14227
14228 default:
14229 return false;
14230 }
14231
14232 return false;
14233 }
14234
14235 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14236 most probably expanded inline into reasonably simple code. This is a
14237 superset of is_simple_builtin. */
14238 bool
14239 is_inexpensive_builtin (tree decl)
14240 {
14241 if (!decl)
14242 return false;
14243 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14244 return true;
14245 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14246 switch (DECL_FUNCTION_CODE (decl))
14247 {
14248 case BUILT_IN_ABS:
14249 case BUILT_IN_ALLOCA:
14250 case BUILT_IN_ALLOCA_WITH_ALIGN:
14251 case BUILT_IN_BSWAP16:
14252 case BUILT_IN_BSWAP32:
14253 case BUILT_IN_BSWAP64:
14254 case BUILT_IN_CLZ:
14255 case BUILT_IN_CLZIMAX:
14256 case BUILT_IN_CLZL:
14257 case BUILT_IN_CLZLL:
14258 case BUILT_IN_CTZ:
14259 case BUILT_IN_CTZIMAX:
14260 case BUILT_IN_CTZL:
14261 case BUILT_IN_CTZLL:
14262 case BUILT_IN_FFS:
14263 case BUILT_IN_FFSIMAX:
14264 case BUILT_IN_FFSL:
14265 case BUILT_IN_FFSLL:
14266 case BUILT_IN_IMAXABS:
14267 case BUILT_IN_FINITE:
14268 case BUILT_IN_FINITEF:
14269 case BUILT_IN_FINITEL:
14270 case BUILT_IN_FINITED32:
14271 case BUILT_IN_FINITED64:
14272 case BUILT_IN_FINITED128:
14273 case BUILT_IN_FPCLASSIFY:
14274 case BUILT_IN_ISFINITE:
14275 case BUILT_IN_ISINF_SIGN:
14276 case BUILT_IN_ISINF:
14277 case BUILT_IN_ISINFF:
14278 case BUILT_IN_ISINFL:
14279 case BUILT_IN_ISINFD32:
14280 case BUILT_IN_ISINFD64:
14281 case BUILT_IN_ISINFD128:
14282 case BUILT_IN_ISNAN:
14283 case BUILT_IN_ISNANF:
14284 case BUILT_IN_ISNANL:
14285 case BUILT_IN_ISNAND32:
14286 case BUILT_IN_ISNAND64:
14287 case BUILT_IN_ISNAND128:
14288 case BUILT_IN_ISNORMAL:
14289 case BUILT_IN_ISGREATER:
14290 case BUILT_IN_ISGREATEREQUAL:
14291 case BUILT_IN_ISLESS:
14292 case BUILT_IN_ISLESSEQUAL:
14293 case BUILT_IN_ISLESSGREATER:
14294 case BUILT_IN_ISUNORDERED:
14295 case BUILT_IN_VA_ARG_PACK:
14296 case BUILT_IN_VA_ARG_PACK_LEN:
14297 case BUILT_IN_VA_COPY:
14298 case BUILT_IN_TRAP:
14299 case BUILT_IN_SAVEREGS:
14300 case BUILT_IN_POPCOUNTL:
14301 case BUILT_IN_POPCOUNTLL:
14302 case BUILT_IN_POPCOUNTIMAX:
14303 case BUILT_IN_POPCOUNT:
14304 case BUILT_IN_PARITYL:
14305 case BUILT_IN_PARITYLL:
14306 case BUILT_IN_PARITYIMAX:
14307 case BUILT_IN_PARITY:
14308 case BUILT_IN_LABS:
14309 case BUILT_IN_LLABS:
14310 case BUILT_IN_PREFETCH:
14311 return true;
14312
14313 default:
14314 return is_simple_builtin (decl);
14315 }
14316
14317 return false;
14318 }