]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Remove trailing whitespace. Add missing dbxout.c hunk.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "gimple.h"
34 #include "flags.h"
35 #include "regs.h"
36 #include "hard-reg-set.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "libfuncs.h"
43 #include "recog.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "predict.h"
47 #include "tm_p.h"
48 #include "target.h"
49 #include "langhooks.h"
50 #include "basic-block.h"
51 #include "tree-ssanames.h"
52 #include "tree-dfa.h"
53 #include "value-prof.h"
54 #include "diagnostic-core.h"
55 #include "builtins.h"
56 #include "ubsan.h"
57 #include "cilk.h"
58
59
60 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
61
62 struct target_builtins default_target_builtins;
63 #if SWITCHABLE_TARGET
64 struct target_builtins *this_target_builtins = &default_target_builtins;
65 #endif
66
67 /* Define the names of the builtin function types and codes. */
68 const char *const built_in_class_names[BUILT_IN_LAST]
69 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
70
71 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
72 const char * built_in_names[(int) END_BUILTINS] =
73 {
74 #include "builtins.def"
75 };
76 #undef DEF_BUILTIN
77
78 /* Setup an array of _DECL trees, make sure each element is
79 initialized to NULL_TREE. */
80 builtin_info_type builtin_info;
81
82 /* Non-zero if __builtin_constant_p should be folded right away. */
83 bool force_folding_builtin_constant_p;
84
85 static const char *c_getstr (tree);
86 static rtx c_readstr (const char *, enum machine_mode);
87 static int target_char_cast (tree, char *);
88 static rtx get_memory_rtx (tree, tree);
89 static int apply_args_size (void);
90 static int apply_result_size (void);
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector (int, rtx);
93 #endif
94 static void expand_builtin_update_setjmp_buf (rtx);
95 static void expand_builtin_prefetch (tree);
96 static rtx expand_builtin_apply_args (void);
97 static rtx expand_builtin_apply_args_1 (void);
98 static rtx expand_builtin_apply (rtx, rtx, rtx);
99 static void expand_builtin_return (rtx);
100 static enum type_class type_to_class (tree);
101 static rtx expand_builtin_classify_type (tree);
102 static void expand_errno_check (tree, rtx);
103 static rtx expand_builtin_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
107 static rtx expand_builtin_interclass_mathfn (tree, rtx);
108 static rtx expand_builtin_sincos (tree);
109 static rtx expand_builtin_cexpi (tree, rtx);
110 static rtx expand_builtin_int_roundingfn (tree, rtx);
111 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
112 static rtx expand_builtin_next_arg (void);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_strcpy (tree, rtx);
125 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
126 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strncpy (tree, rtx);
128 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
131 static rtx expand_builtin_bzero (tree);
132 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_alloca (tree, bool);
134 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
135 static rtx expand_builtin_frame_address (tree, tree);
136 static tree stabilize_va_list_loc (location_t, tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_expect (location_t, tree, tree);
140 static tree fold_builtin_classify_type (tree);
141 static tree fold_builtin_strlen (location_t, tree, tree);
142 static tree fold_builtin_inf (location_t, tree, int);
143 static tree fold_builtin_nan (tree, tree, int);
144 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
145 static bool validate_arg (const_tree, enum tree_code code);
146 static bool integer_valued_real_p (tree);
147 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
148 static bool readonly_data_expr (tree);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_sqrt (location_t, tree, tree);
152 static tree fold_builtin_cbrt (location_t, tree, tree);
153 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_cos (location_t, tree, tree, tree);
156 static tree fold_builtin_cosh (location_t, tree, tree, tree);
157 static tree fold_builtin_tan (tree, tree);
158 static tree fold_builtin_trunc (location_t, tree, tree);
159 static tree fold_builtin_floor (location_t, tree, tree);
160 static tree fold_builtin_ceil (location_t, tree, tree);
161 static tree fold_builtin_round (location_t, tree, tree);
162 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
163 static tree fold_builtin_bitop (tree, tree);
164 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strcat (location_t, tree, tree);
191 static tree fold_builtin_strncat (location_t, tree, tree, tree);
192 static tree fold_builtin_strspn (location_t, tree, tree);
193 static tree fold_builtin_strcspn (location_t, tree, tree);
194 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
195 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196
197 static rtx expand_builtin_object_size (tree);
198 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
199 enum built_in_function);
200 static void maybe_emit_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_free_warning (tree);
203 static tree fold_builtin_object_size (tree, tree);
204 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
205 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
206 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
207 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
208 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
209 enum built_in_function);
210 static bool init_target_chars (void);
211
212 static unsigned HOST_WIDE_INT target_newline;
213 static unsigned HOST_WIDE_INT target_percent;
214 static unsigned HOST_WIDE_INT target_c;
215 static unsigned HOST_WIDE_INT target_s;
216 static char target_percent_c[3];
217 static char target_percent_s[3];
218 static char target_percent_s_newline[4];
219 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
220 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
221 static tree do_mpfr_arg2 (tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_arg3 (tree, tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_sincos (tree, tree, tree);
226 static tree do_mpfr_bessel_n (tree, tree, tree,
227 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_remquo (tree, tree, tree);
230 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 static void expand_builtin_sync_synchronize (void);
232
233 /* Return true if NAME starts with __builtin_ or __sync_. */
234
235 static bool
236 is_builtin_name (const char *name)
237 {
238 if (strncmp (name, "__builtin_", 10) == 0)
239 return true;
240 if (strncmp (name, "__sync_", 7) == 0)
241 return true;
242 if (strncmp (name, "__atomic_", 9) == 0)
243 return true;
244 if (flag_enable_cilkplus
245 && (!strcmp (name, "__cilkrts_detach")
246 || !strcmp (name, "__cilkrts_pop_frame")))
247 return true;
248 return false;
249 }
250
251
252 /* Return true if DECL is a function symbol representing a built-in. */
253
254 bool
255 is_builtin_fn (tree decl)
256 {
257 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
258 }
259
260 /* By default we assume that c99 functions are present at the runtime,
261 but sincos is not. */
262 bool
263 default_libc_has_function (enum function_class fn_class)
264 {
265 if (fn_class == function_c94
266 || fn_class == function_c99_misc
267 || fn_class == function_c99_math_complex)
268 return true;
269
270 return false;
271 }
272
273 bool
274 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
275 {
276 return true;
277 }
278
279 bool
280 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
281 {
282 return false;
283 }
284
285 /* Return true if NODE should be considered for inline expansion regardless
286 of the optimization level. This means whenever a function is invoked with
287 its "internal" name, which normally contains the prefix "__builtin". */
288
289 static bool
290 called_as_built_in (tree node)
291 {
292 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
293 we want the name used to call the function, not the name it
294 will have. */
295 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
296 return is_builtin_name (name);
297 }
298
299 /* Compute values M and N such that M divides (address of EXP - N) and such
300 that N < M. If these numbers can be determined, store M in alignp and N in
301 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
302 *alignp and any bit-offset to *bitposp.
303
304 Note that the address (and thus the alignment) computed here is based
305 on the address to which a symbol resolves, whereas DECL_ALIGN is based
306 on the address at which an object is actually located. These two
307 addresses are not always the same. For example, on ARM targets,
308 the address &foo of a Thumb function foo() has the lowest bit set,
309 whereas foo() itself starts on an even address.
310
311 If ADDR_P is true we are taking the address of the memory reference EXP
312 and thus cannot rely on the access taking place. */
313
314 static bool
315 get_object_alignment_2 (tree exp, unsigned int *alignp,
316 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
317 {
318 HOST_WIDE_INT bitsize, bitpos;
319 tree offset;
320 enum machine_mode mode;
321 int unsignedp, volatilep;
322 unsigned int align = BITS_PER_UNIT;
323 bool known_alignment = false;
324
325 /* Get the innermost object and the constant (bitpos) and possibly
326 variable (offset) offset of the access. */
327 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
328 &mode, &unsignedp, &volatilep, true);
329
330 /* Extract alignment information from the innermost object and
331 possibly adjust bitpos and offset. */
332 if (TREE_CODE (exp) == FUNCTION_DECL)
333 {
334 /* Function addresses can encode extra information besides their
335 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
336 allows the low bit to be used as a virtual bit, we know
337 that the address itself must be at least 2-byte aligned. */
338 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
339 align = 2 * BITS_PER_UNIT;
340 }
341 else if (TREE_CODE (exp) == LABEL_DECL)
342 ;
343 else if (TREE_CODE (exp) == CONST_DECL)
344 {
345 /* The alignment of a CONST_DECL is determined by its initializer. */
346 exp = DECL_INITIAL (exp);
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 #ifdef CONSTANT_ALIGNMENT
349 if (CONSTANT_CLASS_P (exp))
350 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
351 #endif
352 known_alignment = true;
353 }
354 else if (DECL_P (exp))
355 {
356 align = DECL_ALIGN (exp);
357 known_alignment = true;
358 }
359 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
360 {
361 align = TYPE_ALIGN (TREE_TYPE (exp));
362 }
363 else if (TREE_CODE (exp) == INDIRECT_REF
364 || TREE_CODE (exp) == MEM_REF
365 || TREE_CODE (exp) == TARGET_MEM_REF)
366 {
367 tree addr = TREE_OPERAND (exp, 0);
368 unsigned ptr_align;
369 unsigned HOST_WIDE_INT ptr_bitpos;
370
371 if (TREE_CODE (addr) == BIT_AND_EXPR
372 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
373 {
374 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
375 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
376 align *= BITS_PER_UNIT;
377 addr = TREE_OPERAND (addr, 0);
378 }
379
380 known_alignment
381 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
382 align = MAX (ptr_align, align);
383
384 /* The alignment of the pointer operand in a TARGET_MEM_REF
385 has to take the variable offset parts into account. */
386 if (TREE_CODE (exp) == TARGET_MEM_REF)
387 {
388 if (TMR_INDEX (exp))
389 {
390 unsigned HOST_WIDE_INT step = 1;
391 if (TMR_STEP (exp))
392 step = TREE_INT_CST_LOW (TMR_STEP (exp));
393 align = MIN (align, (step & -step) * BITS_PER_UNIT);
394 }
395 if (TMR_INDEX2 (exp))
396 align = BITS_PER_UNIT;
397 known_alignment = false;
398 }
399
400 /* When EXP is an actual memory reference then we can use
401 TYPE_ALIGN of a pointer indirection to derive alignment.
402 Do so only if get_pointer_alignment_1 did not reveal absolute
403 alignment knowledge and if using that alignment would
404 improve the situation. */
405 if (!addr_p && !known_alignment
406 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
407 align = TYPE_ALIGN (TREE_TYPE (exp));
408 else
409 {
410 /* Else adjust bitpos accordingly. */
411 bitpos += ptr_bitpos;
412 if (TREE_CODE (exp) == MEM_REF
413 || TREE_CODE (exp) == TARGET_MEM_REF)
414 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
415 }
416 }
417 else if (TREE_CODE (exp) == STRING_CST)
418 {
419 /* STRING_CST are the only constant objects we allow to be not
420 wrapped inside a CONST_DECL. */
421 align = TYPE_ALIGN (TREE_TYPE (exp));
422 #ifdef CONSTANT_ALIGNMENT
423 if (CONSTANT_CLASS_P (exp))
424 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
425 #endif
426 known_alignment = true;
427 }
428
429 /* If there is a non-constant offset part extract the maximum
430 alignment that can prevail. */
431 if (offset)
432 {
433 int trailing_zeros = tree_ctz (offset);
434 if (trailing_zeros < HOST_BITS_PER_INT)
435 {
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
439 }
440 }
441
442 *alignp = align;
443 *bitposp = bitpos & (*alignp - 1);
444 return known_alignment;
445 }
446
447 /* For a memory reference expression EXP compute values M and N such that M
448 divides (&EXP - N) and such that N < M. If these numbers can be determined,
449 store M in alignp and N in *BITPOSP and return true. Otherwise return false
450 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
451
452 bool
453 get_object_alignment_1 (tree exp, unsigned int *alignp,
454 unsigned HOST_WIDE_INT *bitposp)
455 {
456 return get_object_alignment_2 (exp, alignp, bitposp, false);
457 }
458
459 /* Return the alignment in bits of EXP, an object. */
460
461 unsigned int
462 get_object_alignment (tree exp)
463 {
464 unsigned HOST_WIDE_INT bitpos = 0;
465 unsigned int align;
466
467 get_object_alignment_1 (exp, &align, &bitpos);
468
469 /* align and bitpos now specify known low bits of the pointer.
470 ptr & (align - 1) == bitpos. */
471
472 if (bitpos != 0)
473 align = (bitpos & -bitpos);
474 return align;
475 }
476
477 /* For a pointer valued expression EXP compute values M and N such that M
478 divides (EXP - N) and such that N < M. If these numbers can be determined,
479 store M in alignp and N in *BITPOSP and return true. Return false if
480 the results are just a conservative approximation.
481
482 If EXP is not a pointer, false is returned too. */
483
484 bool
485 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
486 unsigned HOST_WIDE_INT *bitposp)
487 {
488 STRIP_NOPS (exp);
489
490 if (TREE_CODE (exp) == ADDR_EXPR)
491 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
492 alignp, bitposp, true);
493 else if (TREE_CODE (exp) == SSA_NAME
494 && POINTER_TYPE_P (TREE_TYPE (exp)))
495 {
496 unsigned int ptr_align, ptr_misalign;
497 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
498
499 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
500 {
501 *bitposp = ptr_misalign * BITS_PER_UNIT;
502 *alignp = ptr_align * BITS_PER_UNIT;
503 /* We cannot really tell whether this result is an approximation. */
504 return true;
505 }
506 else
507 {
508 *bitposp = 0;
509 *alignp = BITS_PER_UNIT;
510 return false;
511 }
512 }
513 else if (TREE_CODE (exp) == INTEGER_CST)
514 {
515 *alignp = BIGGEST_ALIGNMENT;
516 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
517 & (BIGGEST_ALIGNMENT - 1));
518 return true;
519 }
520
521 *bitposp = 0;
522 *alignp = BITS_PER_UNIT;
523 return false;
524 }
525
526 /* Return the alignment in bits of EXP, a pointer valued expression.
527 The alignment returned is, by default, the alignment of the thing that
528 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
529
530 Otherwise, look at the expression to see if we can do better, i.e., if the
531 expression is actually pointing at an object whose alignment is tighter. */
532
533 unsigned int
534 get_pointer_alignment (tree exp)
535 {
536 unsigned HOST_WIDE_INT bitpos = 0;
537 unsigned int align;
538
539 get_pointer_alignment_1 (exp, &align, &bitpos);
540
541 /* align and bitpos now specify known low bits of the pointer.
542 ptr & (align - 1) == bitpos. */
543
544 if (bitpos != 0)
545 align = (bitpos & -bitpos);
546
547 return align;
548 }
549
550 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
551 way, because it could contain a zero byte in the middle.
552 TREE_STRING_LENGTH is the size of the character array, not the string.
553
554 ONLY_VALUE should be nonzero if the result is not going to be emitted
555 into the instruction stream and zero if it is going to be expanded.
556 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 is returned, otherwise NULL, since
558 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
559 evaluate the side-effects.
560
561 The value returned is of type `ssizetype'.
562
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
566 tree
567 c_strlen (tree src, int only_value)
568 {
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
574
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 {
579 tree len1, len2;
580
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
585 }
586
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
590
591 loc = EXPR_LOC_OR_HERE (src);
592
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
596
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
599
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
601 {
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
606
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
610
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
617
618 return size_diffop_loc (loc, size_int (max), offset_node);
619 }
620
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
629
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
633 {
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (! TREE_NO_WARNING (src))
636 {
637 warning_at (loc, 0, "offset outside bounds of constant string");
638 TREE_NO_WARNING (src) = 1;
639 }
640 return NULL_TREE;
641 }
642
643 /* Use strlen to search for the first zero byte. Since any strings
644 constructed with build_string will have nulls appended, we win even
645 if we get handed something like (char[4])"abcd".
646
647 Since OFFSET is our starting index into the string, no further
648 calculation is needed. */
649 return ssize_int (strlen (ptr + offset));
650 }
651
652 /* Return a char pointer for a C string if it is a string constant
653 or sum of string constant and integer constant. */
654
655 static const char *
656 c_getstr (tree src)
657 {
658 tree offset_node;
659
660 src = string_constant (src, &offset_node);
661 if (src == 0)
662 return 0;
663
664 if (offset_node == 0)
665 return TREE_STRING_POINTER (src);
666 else if (!tree_fits_uhwi_p (offset_node)
667 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
668 return 0;
669
670 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
671 }
672
673 /* Return a constant integer corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
675
676 static rtx
677 c_readstr (const char *str, enum machine_mode mode)
678 {
679 HOST_WIDE_INT ch;
680 unsigned int i, j;
681 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
682 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
683 / HOST_BITS_PER_WIDE_INT;
684
685 for (i = 0; i < len; i++)
686 tmp[i] = 0;
687
688 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
689
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705
706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
707 return immed_wide_int_const (c, mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 /* Do not care if it fits or not right here. */
724 val = TREE_INT_CST_LOW (cst);
725
726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738 }
739
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744 static tree
745 builtin_save_expr (tree exp)
746 {
747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
751 return exp;
752
753 return save_expr (exp);
754 }
755
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
759
760 static rtx
761 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
762 {
763 int i;
764
765 #ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
767 #else
768 rtx tem;
769
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
774
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
782 {
783 tem = hard_frame_pointer_rtx;
784
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
787 }
788 #endif
789
790 /* Some machines need special handling before we can access
791 arbitrary frames. For example, on the SPARC, we must first flush
792 all register windows to the stack. */
793 #ifdef SETUP_FRAME_ADDRESSES
794 if (count > 0)
795 SETUP_FRAME_ADDRESSES ();
796 #endif
797
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
803 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
804 count--;
805 #endif
806
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
809 {
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814 #endif
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
818 }
819
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825 #else
826 return tem;
827 #endif
828
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832 #else
833 tem = memory_address (Pmode,
834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
835 tem = gen_frame_mem (Pmode, tem);
836 #endif
837 return tem;
838 }
839
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set = -1;
842
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
846
847 void
848 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
849 {
850 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
851 rtx stack_save;
852 rtx mem;
853
854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
856
857 buf_addr = convert_memory_address (Pmode, buf_addr);
858
859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
860
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
864
865 mem = gen_rtx_MEM (Pmode, buf_addr);
866 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
868
869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
871 set_mem_alias_set (mem, setjmp_alias_set);
872
873 emit_move_insn (validize_mem (mem),
874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
875
876 stack_save = gen_rtx_MEM (sa_mode,
877 plus_constant (Pmode, buf_addr,
878 2 * GET_MODE_SIZE (Pmode)));
879 set_mem_alias_set (stack_save, setjmp_alias_set);
880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
881
882 /* If there is further processing to do, do it. */
883 #ifdef HAVE_builtin_setjmp_setup
884 if (HAVE_builtin_setjmp_setup)
885 emit_insn (gen_builtin_setjmp_setup (buf_addr));
886 #endif
887
888 /* We have a nonlocal label. */
889 cfun->has_nonlocal_label = 1;
890 }
891
892 /* Construct the trailing part of a __builtin_setjmp call. This is
893 also called directly by the SJLJ exception handling code.
894 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
895
896 void
897 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
898 {
899 rtx chain;
900
901 /* Mark the FP as used when we get here, so we have to make sure it's
902 marked as used by this function. */
903 emit_use (hard_frame_pointer_rtx);
904
905 /* Mark the static chain as clobbered here so life information
906 doesn't get messed up for it. */
907 chain = targetm.calls.static_chain (current_function_decl, true);
908 if (chain && REG_P (chain))
909 emit_clobber (chain);
910
911 /* Now put in the code to restore the frame pointer, and argument
912 pointer, if needed. */
913 #ifdef HAVE_nonlocal_goto
914 if (! HAVE_nonlocal_goto)
915 #endif
916 /* First adjust our frame pointer to its actual value. It was
917 previously set to the start of the virtual area corresponding to
918 the stacked variables when we branched here and now needs to be
919 adjusted to the actual hardware fp value.
920
921 Assignments to virtual registers are converted by
922 instantiate_virtual_regs into the corresponding assignment
923 to the underlying register (fp in this case) that makes
924 the original assignment true.
925 So the following insn will actually be decrementing fp by
926 STARTING_FRAME_OFFSET. */
927 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
928
929 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
930 if (fixed_regs[ARG_POINTER_REGNUM])
931 {
932 #ifdef ELIMINABLE_REGS
933 /* If the argument pointer can be eliminated in favor of the
934 frame pointer, we don't need to restore it. We assume here
935 that if such an elimination is present, it can always be used.
936 This is the case on all known machines; if we don't make this
937 assumption, we do unnecessary saving on many machines. */
938 size_t i;
939 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
940
941 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
942 if (elim_regs[i].from == ARG_POINTER_REGNUM
943 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
944 break;
945
946 if (i == ARRAY_SIZE (elim_regs))
947 #endif
948 {
949 /* Now restore our arg pointer from the address at which it
950 was saved in our stack frame. */
951 emit_move_insn (crtl->args.internal_arg_pointer,
952 copy_to_reg (get_arg_pointer_save_area ()));
953 }
954 }
955 #endif
956
957 #ifdef HAVE_builtin_setjmp_receiver
958 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
959 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
960 else
961 #endif
962 #ifdef HAVE_nonlocal_goto_receiver
963 if (HAVE_nonlocal_goto_receiver)
964 emit_insn (gen_nonlocal_goto_receiver ());
965 else
966 #endif
967 { /* Nothing */ }
968
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. Similarly, we must block
972 (frame-related) register values to be used across this code. */
973 emit_insn (gen_blockage ());
974 }
975
976 /* __builtin_longjmp is passed a pointer to an array of five words (not
977 all will be used on all machines). It operates similarly to the C
978 library function of the same name, but is more efficient. Much of
979 the code below is copied from the handling of non-local gotos. */
980
981 static void
982 expand_builtin_longjmp (rtx buf_addr, rtx value)
983 {
984 rtx fp, lab, stack, insn, last;
985 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
986
987 /* DRAP is needed for stack realign if longjmp is expanded to current
988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
991
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
994
995 buf_addr = convert_memory_address (Pmode, buf_addr);
996
997 buf_addr = force_reg (Pmode, buf_addr);
998
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1002
1003 last = get_last_insn ();
1004 #ifdef HAVE_builtin_longjmp
1005 if (HAVE_builtin_longjmp)
1006 emit_insn (gen_builtin_longjmp (buf_addr));
1007 else
1008 #endif
1009 {
1010 fp = gen_rtx_MEM (Pmode, buf_addr);
1011 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1012 GET_MODE_SIZE (Pmode)));
1013
1014 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1015 2 * GET_MODE_SIZE (Pmode)));
1016 set_mem_alias_set (fp, setjmp_alias_set);
1017 set_mem_alias_set (lab, setjmp_alias_set);
1018 set_mem_alias_set (stack, setjmp_alias_set);
1019
1020 /* Pick up FP, label, and SP from the block and jump. This code is
1021 from expand_goto in stmt.c; see there for detailed comments. */
1022 #ifdef HAVE_nonlocal_goto
1023 if (HAVE_nonlocal_goto)
1024 /* We have to pass a value to the nonlocal_goto pattern that will
1025 get copied into the static_chain pointer, but it does not matter
1026 what that value is, because builtin_setjmp does not use it. */
1027 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1028 else
1029 #endif
1030 {
1031 lab = copy_to_reg (lab);
1032
1033 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1034 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1035
1036 emit_move_insn (hard_frame_pointer_rtx, fp);
1037 emit_stack_restore (SAVE_NONLOCAL, stack);
1038
1039 emit_use (hard_frame_pointer_rtx);
1040 emit_use (stack_pointer_rtx);
1041 emit_indirect_jump (lab);
1042 }
1043 }
1044
1045 /* Search backwards and mark the jump insn as a non-local goto.
1046 Note that this precludes the use of __builtin_longjmp to a
1047 __builtin_setjmp target in the same function. However, we've
1048 already cautioned the user that these functions are for
1049 internal exception handling use only. */
1050 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1051 {
1052 gcc_assert (insn != last);
1053
1054 if (JUMP_P (insn))
1055 {
1056 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1057 break;
1058 }
1059 else if (CALL_P (insn))
1060 break;
1061 }
1062 }
1063
1064 static inline bool
1065 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1066 {
1067 return (iter->i < iter->n);
1068 }
1069
1070 /* This function validates the types of a function call argument list
1071 against a specified list of tree_codes. If the last specifier is a 0,
1072 that represents an ellipses, otherwise the last specifier must be a
1073 VOID_TYPE. */
1074
1075 static bool
1076 validate_arglist (const_tree callexpr, ...)
1077 {
1078 enum tree_code code;
1079 bool res = 0;
1080 va_list ap;
1081 const_call_expr_arg_iterator iter;
1082 const_tree arg;
1083
1084 va_start (ap, callexpr);
1085 init_const_call_expr_arg_iterator (callexpr, &iter);
1086
1087 do
1088 {
1089 code = (enum tree_code) va_arg (ap, int);
1090 switch (code)
1091 {
1092 case 0:
1093 /* This signifies an ellipses, any further arguments are all ok. */
1094 res = true;
1095 goto end;
1096 case VOID_TYPE:
1097 /* This signifies an endlink, if no arguments remain, return
1098 true, otherwise return false. */
1099 res = !more_const_call_expr_args_p (&iter);
1100 goto end;
1101 default:
1102 /* If no parameters remain or the parameter's code does not
1103 match the specified code, return false. Otherwise continue
1104 checking any remaining arguments. */
1105 arg = next_const_call_expr_arg (&iter);
1106 if (!validate_arg (arg, code))
1107 goto end;
1108 break;
1109 }
1110 }
1111 while (1);
1112
1113 /* We need gotos here since we can only have one VA_CLOSE in a
1114 function. */
1115 end: ;
1116 va_end (ap);
1117
1118 return res;
1119 }
1120
1121 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1122 and the address of the save area. */
1123
1124 static rtx
1125 expand_builtin_nonlocal_goto (tree exp)
1126 {
1127 tree t_label, t_save_area;
1128 rtx r_label, r_save_area, r_fp, r_sp, insn;
1129
1130 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1131 return NULL_RTX;
1132
1133 t_label = CALL_EXPR_ARG (exp, 0);
1134 t_save_area = CALL_EXPR_ARG (exp, 1);
1135
1136 r_label = expand_normal (t_label);
1137 r_label = convert_memory_address (Pmode, r_label);
1138 r_save_area = expand_normal (t_save_area);
1139 r_save_area = convert_memory_address (Pmode, r_save_area);
1140 /* Copy the address of the save location to a register just in case it was
1141 based on the frame pointer. */
1142 r_save_area = copy_to_reg (r_save_area);
1143 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1144 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1145 plus_constant (Pmode, r_save_area,
1146 GET_MODE_SIZE (Pmode)));
1147
1148 crtl->has_nonlocal_goto = 1;
1149
1150 #ifdef HAVE_nonlocal_goto
1151 /* ??? We no longer need to pass the static chain value, afaik. */
1152 if (HAVE_nonlocal_goto)
1153 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1154 else
1155 #endif
1156 {
1157 r_label = copy_to_reg (r_label);
1158
1159 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1160 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1161
1162 /* Restore frame pointer for containing function. */
1163 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1164 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1165
1166 /* USE of hard_frame_pointer_rtx added for consistency;
1167 not clear if really needed. */
1168 emit_use (hard_frame_pointer_rtx);
1169 emit_use (stack_pointer_rtx);
1170
1171 /* If the architecture is using a GP register, we must
1172 conservatively assume that the target function makes use of it.
1173 The prologue of functions with nonlocal gotos must therefore
1174 initialize the GP register to the appropriate value, and we
1175 must then make sure that this value is live at the point
1176 of the jump. (Note that this doesn't necessarily apply
1177 to targets with a nonlocal_goto pattern; they are free
1178 to implement it in their own way. Note also that this is
1179 a no-op if the GP register is a global invariant.) */
1180 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1181 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1182 emit_use (pic_offset_table_rtx);
1183
1184 emit_indirect_jump (r_label);
1185 }
1186
1187 /* Search backwards to the jump insn and mark it as a
1188 non-local goto. */
1189 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 {
1191 if (JUMP_P (insn))
1192 {
1193 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1194 break;
1195 }
1196 else if (CALL_P (insn))
1197 break;
1198 }
1199
1200 return const0_rtx;
1201 }
1202
1203 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1204 (not all will be used on all machines) that was passed to __builtin_setjmp.
1205 It updates the stack pointer in that block to correspond to the current
1206 stack pointer. */
1207
1208 static void
1209 expand_builtin_update_setjmp_buf (rtx buf_addr)
1210 {
1211 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1212 rtx stack_save
1213 = gen_rtx_MEM (sa_mode,
1214 memory_address
1215 (sa_mode,
1216 plus_constant (Pmode, buf_addr,
1217 2 * GET_MODE_SIZE (Pmode))));
1218
1219 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1220 }
1221
1222 /* Expand a call to __builtin_prefetch. For a target that does not support
1223 data prefetch, evaluate the memory address argument in case it has side
1224 effects. */
1225
1226 static void
1227 expand_builtin_prefetch (tree exp)
1228 {
1229 tree arg0, arg1, arg2;
1230 int nargs;
1231 rtx op0, op1, op2;
1232
1233 if (!validate_arglist (exp, POINTER_TYPE, 0))
1234 return;
1235
1236 arg0 = CALL_EXPR_ARG (exp, 0);
1237
1238 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1239 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1240 locality). */
1241 nargs = call_expr_nargs (exp);
1242 if (nargs > 1)
1243 arg1 = CALL_EXPR_ARG (exp, 1);
1244 else
1245 arg1 = integer_zero_node;
1246 if (nargs > 2)
1247 arg2 = CALL_EXPR_ARG (exp, 2);
1248 else
1249 arg2 = integer_three_node;
1250
1251 /* Argument 0 is an address. */
1252 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1253
1254 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1255 if (TREE_CODE (arg1) != INTEGER_CST)
1256 {
1257 error ("second argument to %<__builtin_prefetch%> must be a constant");
1258 arg1 = integer_zero_node;
1259 }
1260 op1 = expand_normal (arg1);
1261 /* Argument 1 must be either zero or one. */
1262 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1263 {
1264 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1265 " using zero");
1266 op1 = const0_rtx;
1267 }
1268
1269 /* Argument 2 (locality) must be a compile-time constant int. */
1270 if (TREE_CODE (arg2) != INTEGER_CST)
1271 {
1272 error ("third argument to %<__builtin_prefetch%> must be a constant");
1273 arg2 = integer_zero_node;
1274 }
1275 op2 = expand_normal (arg2);
1276 /* Argument 2 must be 0, 1, 2, or 3. */
1277 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1278 {
1279 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1280 op2 = const0_rtx;
1281 }
1282
1283 #ifdef HAVE_prefetch
1284 if (HAVE_prefetch)
1285 {
1286 struct expand_operand ops[3];
1287
1288 create_address_operand (&ops[0], op0);
1289 create_integer_operand (&ops[1], INTVAL (op1));
1290 create_integer_operand (&ops[2], INTVAL (op2));
1291 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1292 return;
1293 }
1294 #endif
1295
1296 /* Don't do anything with direct references to volatile memory, but
1297 generate code to handle other side effects. */
1298 if (!MEM_P (op0) && side_effects_p (op0))
1299 emit_insn (op0);
1300 }
1301
1302 /* Get a MEM rtx for expression EXP which is the address of an operand
1303 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1304 the maximum length of the block of memory that might be accessed or
1305 NULL if unknown. */
1306
1307 static rtx
1308 get_memory_rtx (tree exp, tree len)
1309 {
1310 tree orig_exp = exp;
1311 rtx addr, mem;
1312
1313 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1314 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1315 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1316 exp = TREE_OPERAND (exp, 0);
1317
1318 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1319 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1320
1321 /* Get an expression we can use to find the attributes to assign to MEM.
1322 First remove any nops. */
1323 while (CONVERT_EXPR_P (exp)
1324 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1325 exp = TREE_OPERAND (exp, 0);
1326
1327 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1328 (as builtin stringops may alias with anything). */
1329 exp = fold_build2 (MEM_REF,
1330 build_array_type (char_type_node,
1331 build_range_type (sizetype,
1332 size_one_node, len)),
1333 exp, build_int_cst (ptr_type_node, 0));
1334
1335 /* If the MEM_REF has no acceptable address, try to get the base object
1336 from the original address we got, and build an all-aliasing
1337 unknown-sized access to that one. */
1338 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1339 set_mem_attributes (mem, exp, 0);
1340 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1341 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1342 0))))
1343 {
1344 exp = build_fold_addr_expr (exp);
1345 exp = fold_build2 (MEM_REF,
1346 build_array_type (char_type_node,
1347 build_range_type (sizetype,
1348 size_zero_node,
1349 NULL)),
1350 exp, build_int_cst (ptr_type_node, 0));
1351 set_mem_attributes (mem, exp, 0);
1352 }
1353 set_mem_alias_set (mem, 0);
1354 return mem;
1355 }
1356 \f
1357 /* Built-in functions to perform an untyped call and return. */
1358
1359 #define apply_args_mode \
1360 (this_target_builtins->x_apply_args_mode)
1361 #define apply_result_mode \
1362 (this_target_builtins->x_apply_result_mode)
1363
1364 /* Return the size required for the block returned by __builtin_apply_args,
1365 and initialize apply_args_mode. */
1366
1367 static int
1368 apply_args_size (void)
1369 {
1370 static int size = -1;
1371 int align;
1372 unsigned int regno;
1373 enum machine_mode mode;
1374
1375 /* The values computed by this function never change. */
1376 if (size < 0)
1377 {
1378 /* The first value is the incoming arg-pointer. */
1379 size = GET_MODE_SIZE (Pmode);
1380
1381 /* The second value is the structure value address unless this is
1382 passed as an "invisible" first argument. */
1383 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1384 size += GET_MODE_SIZE (Pmode);
1385
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (FUNCTION_ARG_REGNO_P (regno))
1388 {
1389 mode = targetm.calls.get_raw_arg_mode (regno);
1390
1391 gcc_assert (mode != VOIDmode);
1392
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_args_mode[regno] = mode;
1398 }
1399 else
1400 {
1401 apply_args_mode[regno] = VOIDmode;
1402 }
1403 }
1404 return size;
1405 }
1406
1407 /* Return the size required for the block returned by __builtin_apply,
1408 and initialize apply_result_mode. */
1409
1410 static int
1411 apply_result_size (void)
1412 {
1413 static int size = -1;
1414 int align, regno;
1415 enum machine_mode mode;
1416
1417 /* The values computed by this function never change. */
1418 if (size < 0)
1419 {
1420 size = 0;
1421
1422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1423 if (targetm.calls.function_value_regno_p (regno))
1424 {
1425 mode = targetm.calls.get_raw_result_mode (regno);
1426
1427 gcc_assert (mode != VOIDmode);
1428
1429 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1430 if (size % align != 0)
1431 size = CEIL (size, align) * align;
1432 size += GET_MODE_SIZE (mode);
1433 apply_result_mode[regno] = mode;
1434 }
1435 else
1436 apply_result_mode[regno] = VOIDmode;
1437
1438 /* Allow targets that use untyped_call and untyped_return to override
1439 the size so that machine-specific information can be stored here. */
1440 #ifdef APPLY_RESULT_SIZE
1441 size = APPLY_RESULT_SIZE;
1442 #endif
1443 }
1444 return size;
1445 }
1446
1447 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1448 /* Create a vector describing the result block RESULT. If SAVEP is true,
1449 the result block is used to save the values; otherwise it is used to
1450 restore the values. */
1451
1452 static rtx
1453 result_vector (int savep, rtx result)
1454 {
1455 int regno, size, align, nelts;
1456 enum machine_mode mode;
1457 rtx reg, mem;
1458 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1459
1460 size = nelts = 0;
1461 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1462 if ((mode = apply_result_mode[regno]) != VOIDmode)
1463 {
1464 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1465 if (size % align != 0)
1466 size = CEIL (size, align) * align;
1467 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1468 mem = adjust_address (result, mode, size);
1469 savevec[nelts++] = (savep
1470 ? gen_rtx_SET (VOIDmode, mem, reg)
1471 : gen_rtx_SET (VOIDmode, reg, mem));
1472 size += GET_MODE_SIZE (mode);
1473 }
1474 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1475 }
1476 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1477
1478 /* Save the state required to perform an untyped call with the same
1479 arguments as were passed to the current function. */
1480
1481 static rtx
1482 expand_builtin_apply_args_1 (void)
1483 {
1484 rtx registers, tem;
1485 int size, align, regno;
1486 enum machine_mode mode;
1487 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1488
1489 /* Create a block where the arg-pointer, structure value address,
1490 and argument registers can be saved. */
1491 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1492
1493 /* Walk past the arg-pointer and structure value address. */
1494 size = GET_MODE_SIZE (Pmode);
1495 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1496 size += GET_MODE_SIZE (Pmode);
1497
1498 /* Save each register used in calling a function to the block. */
1499 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1500 if ((mode = apply_args_mode[regno]) != VOIDmode)
1501 {
1502 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1503 if (size % align != 0)
1504 size = CEIL (size, align) * align;
1505
1506 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1507
1508 emit_move_insn (adjust_address (registers, mode, size), tem);
1509 size += GET_MODE_SIZE (mode);
1510 }
1511
1512 /* Save the arg pointer to the block. */
1513 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1514 #ifdef STACK_GROWS_DOWNWARD
1515 /* We need the pointer as the caller actually passed them to us, not
1516 as we might have pretended they were passed. Make sure it's a valid
1517 operand, as emit_move_insn isn't expected to handle a PLUS. */
1518 tem
1519 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1520 NULL_RTX);
1521 #endif
1522 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1523
1524 size = GET_MODE_SIZE (Pmode);
1525
1526 /* Save the structure value address unless this is passed as an
1527 "invisible" first argument. */
1528 if (struct_incoming_value)
1529 {
1530 emit_move_insn (adjust_address (registers, Pmode, size),
1531 copy_to_reg (struct_incoming_value));
1532 size += GET_MODE_SIZE (Pmode);
1533 }
1534
1535 /* Return the address of the block. */
1536 return copy_addr_to_reg (XEXP (registers, 0));
1537 }
1538
1539 /* __builtin_apply_args returns block of memory allocated on
1540 the stack into which is stored the arg pointer, structure
1541 value address, static chain, and all the registers that might
1542 possibly be used in performing a function call. The code is
1543 moved to the start of the function so the incoming values are
1544 saved. */
1545
1546 static rtx
1547 expand_builtin_apply_args (void)
1548 {
1549 /* Don't do __builtin_apply_args more than once in a function.
1550 Save the result of the first call and reuse it. */
1551 if (apply_args_value != 0)
1552 return apply_args_value;
1553 {
1554 /* When this function is called, it means that registers must be
1555 saved on entry to this function. So we migrate the
1556 call to the first insn of this function. */
1557 rtx temp;
1558 rtx seq;
1559
1560 start_sequence ();
1561 temp = expand_builtin_apply_args_1 ();
1562 seq = get_insns ();
1563 end_sequence ();
1564
1565 apply_args_value = temp;
1566
1567 /* Put the insns after the NOTE that starts the function.
1568 If this is inside a start_sequence, make the outer-level insn
1569 chain current, so the code is placed at the start of the
1570 function. If internal_arg_pointer is a non-virtual pseudo,
1571 it needs to be placed after the function that initializes
1572 that pseudo. */
1573 push_topmost_sequence ();
1574 if (REG_P (crtl->args.internal_arg_pointer)
1575 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1576 emit_insn_before (seq, parm_birth_insn);
1577 else
1578 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1579 pop_topmost_sequence ();
1580 return temp;
1581 }
1582 }
1583
1584 /* Perform an untyped call and save the state required to perform an
1585 untyped return of whatever value was returned by the given function. */
1586
1587 static rtx
1588 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1589 {
1590 int size, align, regno;
1591 enum machine_mode mode;
1592 rtx incoming_args, result, reg, dest, src, call_insn;
1593 rtx old_stack_level = 0;
1594 rtx call_fusage = 0;
1595 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1596
1597 arguments = convert_memory_address (Pmode, arguments);
1598
1599 /* Create a block where the return registers can be saved. */
1600 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1601
1602 /* Fetch the arg pointer from the ARGUMENTS block. */
1603 incoming_args = gen_reg_rtx (Pmode);
1604 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1605 #ifndef STACK_GROWS_DOWNWARD
1606 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1607 incoming_args, 0, OPTAB_LIB_WIDEN);
1608 #endif
1609
1610 /* Push a new argument block and copy the arguments. Do not allow
1611 the (potential) memcpy call below to interfere with our stack
1612 manipulations. */
1613 do_pending_stack_adjust ();
1614 NO_DEFER_POP;
1615
1616 /* Save the stack with nonlocal if available. */
1617 #ifdef HAVE_save_stack_nonlocal
1618 if (HAVE_save_stack_nonlocal)
1619 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1620 else
1621 #endif
1622 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1623
1624 /* Allocate a block of memory onto the stack and copy the memory
1625 arguments to the outgoing arguments address. We can pass TRUE
1626 as the 4th argument because we just saved the stack pointer
1627 and will restore it right after the call. */
1628 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1629
1630 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1631 may have already set current_function_calls_alloca to true.
1632 current_function_calls_alloca won't be set if argsize is zero,
1633 so we have to guarantee need_drap is true here. */
1634 if (SUPPORTS_STACK_ALIGNMENT)
1635 crtl->need_drap = true;
1636
1637 dest = virtual_outgoing_args_rtx;
1638 #ifndef STACK_GROWS_DOWNWARD
1639 if (CONST_INT_P (argsize))
1640 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1641 else
1642 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1643 #endif
1644 dest = gen_rtx_MEM (BLKmode, dest);
1645 set_mem_align (dest, PARM_BOUNDARY);
1646 src = gen_rtx_MEM (BLKmode, incoming_args);
1647 set_mem_align (src, PARM_BOUNDARY);
1648 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1649
1650 /* Refer to the argument block. */
1651 apply_args_size ();
1652 arguments = gen_rtx_MEM (BLKmode, arguments);
1653 set_mem_align (arguments, PARM_BOUNDARY);
1654
1655 /* Walk past the arg-pointer and structure value address. */
1656 size = GET_MODE_SIZE (Pmode);
1657 if (struct_value)
1658 size += GET_MODE_SIZE (Pmode);
1659
1660 /* Restore each of the registers previously saved. Make USE insns
1661 for each of these registers for use in making the call. */
1662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1663 if ((mode = apply_args_mode[regno]) != VOIDmode)
1664 {
1665 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1666 if (size % align != 0)
1667 size = CEIL (size, align) * align;
1668 reg = gen_rtx_REG (mode, regno);
1669 emit_move_insn (reg, adjust_address (arguments, mode, size));
1670 use_reg (&call_fusage, reg);
1671 size += GET_MODE_SIZE (mode);
1672 }
1673
1674 /* Restore the structure value address unless this is passed as an
1675 "invisible" first argument. */
1676 size = GET_MODE_SIZE (Pmode);
1677 if (struct_value)
1678 {
1679 rtx value = gen_reg_rtx (Pmode);
1680 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1681 emit_move_insn (struct_value, value);
1682 if (REG_P (struct_value))
1683 use_reg (&call_fusage, struct_value);
1684 size += GET_MODE_SIZE (Pmode);
1685 }
1686
1687 /* All arguments and registers used for the call are set up by now! */
1688 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1689
1690 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1691 and we don't want to load it into a register as an optimization,
1692 because prepare_call_address already did it if it should be done. */
1693 if (GET_CODE (function) != SYMBOL_REF)
1694 function = memory_address (FUNCTION_MODE, function);
1695
1696 /* Generate the actual call instruction and save the return value. */
1697 #ifdef HAVE_untyped_call
1698 if (HAVE_untyped_call)
1699 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1700 result, result_vector (1, result)));
1701 else
1702 #endif
1703 #ifdef HAVE_call_value
1704 if (HAVE_call_value)
1705 {
1706 rtx valreg = 0;
1707
1708 /* Locate the unique return register. It is not possible to
1709 express a call that sets more than one return register using
1710 call_value; use untyped_call for that. In fact, untyped_call
1711 only needs to save the return registers in the given block. */
1712 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1713 if ((mode = apply_result_mode[regno]) != VOIDmode)
1714 {
1715 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1716
1717 valreg = gen_rtx_REG (mode, regno);
1718 }
1719
1720 emit_call_insn (GEN_CALL_VALUE (valreg,
1721 gen_rtx_MEM (FUNCTION_MODE, function),
1722 const0_rtx, NULL_RTX, const0_rtx));
1723
1724 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1725 }
1726 else
1727 #endif
1728 gcc_unreachable ();
1729
1730 /* Find the CALL insn we just emitted, and attach the register usage
1731 information. */
1732 call_insn = last_call_insn ();
1733 add_function_usage_to (call_insn, call_fusage);
1734
1735 /* Restore the stack. */
1736 #ifdef HAVE_save_stack_nonlocal
1737 if (HAVE_save_stack_nonlocal)
1738 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1739 else
1740 #endif
1741 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1742 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1743
1744 OK_DEFER_POP;
1745
1746 /* Return the address of the result block. */
1747 result = copy_addr_to_reg (XEXP (result, 0));
1748 return convert_memory_address (ptr_mode, result);
1749 }
1750
1751 /* Perform an untyped return. */
1752
1753 static void
1754 expand_builtin_return (rtx result)
1755 {
1756 int size, align, regno;
1757 enum machine_mode mode;
1758 rtx reg;
1759 rtx call_fusage = 0;
1760
1761 result = convert_memory_address (Pmode, result);
1762
1763 apply_result_size ();
1764 result = gen_rtx_MEM (BLKmode, result);
1765
1766 #ifdef HAVE_untyped_return
1767 if (HAVE_untyped_return)
1768 {
1769 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1770 emit_barrier ();
1771 return;
1772 }
1773 #endif
1774
1775 /* Restore the return value and note that each value is used. */
1776 size = 0;
1777 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1778 if ((mode = apply_result_mode[regno]) != VOIDmode)
1779 {
1780 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1781 if (size % align != 0)
1782 size = CEIL (size, align) * align;
1783 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1784 emit_move_insn (reg, adjust_address (result, mode, size));
1785
1786 push_to_sequence (call_fusage);
1787 emit_use (reg);
1788 call_fusage = get_insns ();
1789 end_sequence ();
1790 size += GET_MODE_SIZE (mode);
1791 }
1792
1793 /* Put the USE insns before the return. */
1794 emit_insn (call_fusage);
1795
1796 /* Return whatever values was restored by jumping directly to the end
1797 of the function. */
1798 expand_naked_return ();
1799 }
1800
1801 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1802
1803 static enum type_class
1804 type_to_class (tree type)
1805 {
1806 switch (TREE_CODE (type))
1807 {
1808 case VOID_TYPE: return void_type_class;
1809 case INTEGER_TYPE: return integer_type_class;
1810 case ENUMERAL_TYPE: return enumeral_type_class;
1811 case BOOLEAN_TYPE: return boolean_type_class;
1812 case POINTER_TYPE: return pointer_type_class;
1813 case REFERENCE_TYPE: return reference_type_class;
1814 case OFFSET_TYPE: return offset_type_class;
1815 case REAL_TYPE: return real_type_class;
1816 case COMPLEX_TYPE: return complex_type_class;
1817 case FUNCTION_TYPE: return function_type_class;
1818 case METHOD_TYPE: return method_type_class;
1819 case RECORD_TYPE: return record_type_class;
1820 case UNION_TYPE:
1821 case QUAL_UNION_TYPE: return union_type_class;
1822 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1823 ? string_type_class : array_type_class);
1824 case LANG_TYPE: return lang_type_class;
1825 default: return no_type_class;
1826 }
1827 }
1828
1829 /* Expand a call EXP to __builtin_classify_type. */
1830
1831 static rtx
1832 expand_builtin_classify_type (tree exp)
1833 {
1834 if (call_expr_nargs (exp))
1835 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1836 return GEN_INT (no_type_class);
1837 }
1838
1839 /* This helper macro, meant to be used in mathfn_built_in below,
1840 determines which among a set of three builtin math functions is
1841 appropriate for a given type mode. The `F' and `L' cases are
1842 automatically generated from the `double' case. */
1843 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1844 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1845 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1846 fcodel = BUILT_IN_MATHFN##L ; break;
1847 /* Similar to above, but appends _R after any F/L suffix. */
1848 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1849 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1850 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1851 fcodel = BUILT_IN_MATHFN##L_R ; break;
1852
1853 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1854 if available. If IMPLICIT is true use the implicit builtin declaration,
1855 otherwise use the explicit declaration. If we can't do the conversion,
1856 return zero. */
1857
1858 static tree
1859 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1860 {
1861 enum built_in_function fcode, fcodef, fcodel, fcode2;
1862
1863 switch (fn)
1864 {
1865 CASE_MATHFN (BUILT_IN_ACOS)
1866 CASE_MATHFN (BUILT_IN_ACOSH)
1867 CASE_MATHFN (BUILT_IN_ASIN)
1868 CASE_MATHFN (BUILT_IN_ASINH)
1869 CASE_MATHFN (BUILT_IN_ATAN)
1870 CASE_MATHFN (BUILT_IN_ATAN2)
1871 CASE_MATHFN (BUILT_IN_ATANH)
1872 CASE_MATHFN (BUILT_IN_CBRT)
1873 CASE_MATHFN (BUILT_IN_CEIL)
1874 CASE_MATHFN (BUILT_IN_CEXPI)
1875 CASE_MATHFN (BUILT_IN_COPYSIGN)
1876 CASE_MATHFN (BUILT_IN_COS)
1877 CASE_MATHFN (BUILT_IN_COSH)
1878 CASE_MATHFN (BUILT_IN_DREM)
1879 CASE_MATHFN (BUILT_IN_ERF)
1880 CASE_MATHFN (BUILT_IN_ERFC)
1881 CASE_MATHFN (BUILT_IN_EXP)
1882 CASE_MATHFN (BUILT_IN_EXP10)
1883 CASE_MATHFN (BUILT_IN_EXP2)
1884 CASE_MATHFN (BUILT_IN_EXPM1)
1885 CASE_MATHFN (BUILT_IN_FABS)
1886 CASE_MATHFN (BUILT_IN_FDIM)
1887 CASE_MATHFN (BUILT_IN_FLOOR)
1888 CASE_MATHFN (BUILT_IN_FMA)
1889 CASE_MATHFN (BUILT_IN_FMAX)
1890 CASE_MATHFN (BUILT_IN_FMIN)
1891 CASE_MATHFN (BUILT_IN_FMOD)
1892 CASE_MATHFN (BUILT_IN_FREXP)
1893 CASE_MATHFN (BUILT_IN_GAMMA)
1894 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1895 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1896 CASE_MATHFN (BUILT_IN_HYPOT)
1897 CASE_MATHFN (BUILT_IN_ILOGB)
1898 CASE_MATHFN (BUILT_IN_ICEIL)
1899 CASE_MATHFN (BUILT_IN_IFLOOR)
1900 CASE_MATHFN (BUILT_IN_INF)
1901 CASE_MATHFN (BUILT_IN_IRINT)
1902 CASE_MATHFN (BUILT_IN_IROUND)
1903 CASE_MATHFN (BUILT_IN_ISINF)
1904 CASE_MATHFN (BUILT_IN_J0)
1905 CASE_MATHFN (BUILT_IN_J1)
1906 CASE_MATHFN (BUILT_IN_JN)
1907 CASE_MATHFN (BUILT_IN_LCEIL)
1908 CASE_MATHFN (BUILT_IN_LDEXP)
1909 CASE_MATHFN (BUILT_IN_LFLOOR)
1910 CASE_MATHFN (BUILT_IN_LGAMMA)
1911 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1912 CASE_MATHFN (BUILT_IN_LLCEIL)
1913 CASE_MATHFN (BUILT_IN_LLFLOOR)
1914 CASE_MATHFN (BUILT_IN_LLRINT)
1915 CASE_MATHFN (BUILT_IN_LLROUND)
1916 CASE_MATHFN (BUILT_IN_LOG)
1917 CASE_MATHFN (BUILT_IN_LOG10)
1918 CASE_MATHFN (BUILT_IN_LOG1P)
1919 CASE_MATHFN (BUILT_IN_LOG2)
1920 CASE_MATHFN (BUILT_IN_LOGB)
1921 CASE_MATHFN (BUILT_IN_LRINT)
1922 CASE_MATHFN (BUILT_IN_LROUND)
1923 CASE_MATHFN (BUILT_IN_MODF)
1924 CASE_MATHFN (BUILT_IN_NAN)
1925 CASE_MATHFN (BUILT_IN_NANS)
1926 CASE_MATHFN (BUILT_IN_NEARBYINT)
1927 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1928 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1929 CASE_MATHFN (BUILT_IN_POW)
1930 CASE_MATHFN (BUILT_IN_POWI)
1931 CASE_MATHFN (BUILT_IN_POW10)
1932 CASE_MATHFN (BUILT_IN_REMAINDER)
1933 CASE_MATHFN (BUILT_IN_REMQUO)
1934 CASE_MATHFN (BUILT_IN_RINT)
1935 CASE_MATHFN (BUILT_IN_ROUND)
1936 CASE_MATHFN (BUILT_IN_SCALB)
1937 CASE_MATHFN (BUILT_IN_SCALBLN)
1938 CASE_MATHFN (BUILT_IN_SCALBN)
1939 CASE_MATHFN (BUILT_IN_SIGNBIT)
1940 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1941 CASE_MATHFN (BUILT_IN_SIN)
1942 CASE_MATHFN (BUILT_IN_SINCOS)
1943 CASE_MATHFN (BUILT_IN_SINH)
1944 CASE_MATHFN (BUILT_IN_SQRT)
1945 CASE_MATHFN (BUILT_IN_TAN)
1946 CASE_MATHFN (BUILT_IN_TANH)
1947 CASE_MATHFN (BUILT_IN_TGAMMA)
1948 CASE_MATHFN (BUILT_IN_TRUNC)
1949 CASE_MATHFN (BUILT_IN_Y0)
1950 CASE_MATHFN (BUILT_IN_Y1)
1951 CASE_MATHFN (BUILT_IN_YN)
1952
1953 default:
1954 return NULL_TREE;
1955 }
1956
1957 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1958 fcode2 = fcode;
1959 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1960 fcode2 = fcodef;
1961 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1962 fcode2 = fcodel;
1963 else
1964 return NULL_TREE;
1965
1966 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1967 return NULL_TREE;
1968
1969 return builtin_decl_explicit (fcode2);
1970 }
1971
1972 /* Like mathfn_built_in_1(), but always use the implicit array. */
1973
1974 tree
1975 mathfn_built_in (tree type, enum built_in_function fn)
1976 {
1977 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1978 }
1979
1980 /* If errno must be maintained, expand the RTL to check if the result,
1981 TARGET, of a built-in function call, EXP, is NaN, and if so set
1982 errno to EDOM. */
1983
1984 static void
1985 expand_errno_check (tree exp, rtx target)
1986 {
1987 rtx lab = gen_label_rtx ();
1988
1989 /* Test the result; if it is NaN, set errno=EDOM because
1990 the argument was not in the domain. */
1991 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1992 NULL_RTX, NULL_RTX, lab,
1993 /* The jump is very likely. */
1994 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1995
1996 #ifdef TARGET_EDOM
1997 /* If this built-in doesn't throw an exception, set errno directly. */
1998 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1999 {
2000 #ifdef GEN_ERRNO_RTX
2001 rtx errno_rtx = GEN_ERRNO_RTX;
2002 #else
2003 rtx errno_rtx
2004 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2005 #endif
2006 emit_move_insn (errno_rtx,
2007 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2008 emit_label (lab);
2009 return;
2010 }
2011 #endif
2012
2013 /* Make sure the library call isn't expanded as a tail call. */
2014 CALL_EXPR_TAILCALL (exp) = 0;
2015
2016 /* We can't set errno=EDOM directly; let the library call do it.
2017 Pop the arguments right away in case the call gets deleted. */
2018 NO_DEFER_POP;
2019 expand_call (exp, target, 0);
2020 OK_DEFER_POP;
2021 emit_label (lab);
2022 }
2023
2024 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2025 Return NULL_RTX if a normal call should be emitted rather than expanding
2026 the function in-line. EXP is the expression that is a call to the builtin
2027 function; if convenient, the result should be placed in TARGET.
2028 SUBTARGET may be used as the target for computing one of EXP's operands. */
2029
2030 static rtx
2031 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2032 {
2033 optab builtin_optab;
2034 rtx op0, insns;
2035 tree fndecl = get_callee_fndecl (exp);
2036 enum machine_mode mode;
2037 bool errno_set = false;
2038 bool try_widening = false;
2039 tree arg;
2040
2041 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2042 return NULL_RTX;
2043
2044 arg = CALL_EXPR_ARG (exp, 0);
2045
2046 switch (DECL_FUNCTION_CODE (fndecl))
2047 {
2048 CASE_FLT_FN (BUILT_IN_SQRT):
2049 errno_set = ! tree_expr_nonnegative_p (arg);
2050 try_widening = true;
2051 builtin_optab = sqrt_optab;
2052 break;
2053 CASE_FLT_FN (BUILT_IN_EXP):
2054 errno_set = true; builtin_optab = exp_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXP10):
2056 CASE_FLT_FN (BUILT_IN_POW10):
2057 errno_set = true; builtin_optab = exp10_optab; break;
2058 CASE_FLT_FN (BUILT_IN_EXP2):
2059 errno_set = true; builtin_optab = exp2_optab; break;
2060 CASE_FLT_FN (BUILT_IN_EXPM1):
2061 errno_set = true; builtin_optab = expm1_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOGB):
2063 errno_set = true; builtin_optab = logb_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG):
2065 errno_set = true; builtin_optab = log_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG10):
2067 errno_set = true; builtin_optab = log10_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOG2):
2069 errno_set = true; builtin_optab = log2_optab; break;
2070 CASE_FLT_FN (BUILT_IN_LOG1P):
2071 errno_set = true; builtin_optab = log1p_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ASIN):
2073 builtin_optab = asin_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ACOS):
2075 builtin_optab = acos_optab; break;
2076 CASE_FLT_FN (BUILT_IN_TAN):
2077 builtin_optab = tan_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN):
2079 builtin_optab = atan_optab; break;
2080 CASE_FLT_FN (BUILT_IN_FLOOR):
2081 builtin_optab = floor_optab; break;
2082 CASE_FLT_FN (BUILT_IN_CEIL):
2083 builtin_optab = ceil_optab; break;
2084 CASE_FLT_FN (BUILT_IN_TRUNC):
2085 builtin_optab = btrunc_optab; break;
2086 CASE_FLT_FN (BUILT_IN_ROUND):
2087 builtin_optab = round_optab; break;
2088 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2089 builtin_optab = nearbyint_optab;
2090 if (flag_trapping_math)
2091 break;
2092 /* Else fallthrough and expand as rint. */
2093 CASE_FLT_FN (BUILT_IN_RINT):
2094 builtin_optab = rint_optab; break;
2095 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2096 builtin_optab = significand_optab; break;
2097 default:
2098 gcc_unreachable ();
2099 }
2100
2101 /* Make a suitable register to place result in. */
2102 mode = TYPE_MODE (TREE_TYPE (exp));
2103
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2106
2107 /* Before working hard, check whether the instruction is available, but try
2108 to widen the mode for specific operations. */
2109 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2110 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2111 && (!errno_set || !optimize_insn_for_size_p ()))
2112 {
2113 rtx result = gen_reg_rtx (mode);
2114
2115 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2116 need to expand the argument again. This way, we will not perform
2117 side-effects more the once. */
2118 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2119
2120 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2121
2122 start_sequence ();
2123
2124 /* Compute into RESULT.
2125 Set RESULT to wherever the result comes back. */
2126 result = expand_unop (mode, builtin_optab, op0, result, 0);
2127
2128 if (result != 0)
2129 {
2130 if (errno_set)
2131 expand_errno_check (exp, result);
2132
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137 return result;
2138 }
2139
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 end_sequence ();
2144 }
2145
2146 return expand_call (exp, target, target == const0_rtx);
2147 }
2148
2149 /* Expand a call to the builtin binary math functions (pow and atan2).
2150 Return NULL_RTX if a normal call should be emitted rather than expanding the
2151 function in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET.
2153 SUBTARGET may be used as the target for computing one of EXP's
2154 operands. */
2155
2156 static rtx
2157 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2158 {
2159 optab builtin_optab;
2160 rtx op0, op1, insns, result;
2161 int op1_type = REAL_TYPE;
2162 tree fndecl = get_callee_fndecl (exp);
2163 tree arg0, arg1;
2164 enum machine_mode mode;
2165 bool errno_set = true;
2166
2167 switch (DECL_FUNCTION_CODE (fndecl))
2168 {
2169 CASE_FLT_FN (BUILT_IN_SCALBN):
2170 CASE_FLT_FN (BUILT_IN_SCALBLN):
2171 CASE_FLT_FN (BUILT_IN_LDEXP):
2172 op1_type = INTEGER_TYPE;
2173 default:
2174 break;
2175 }
2176
2177 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2178 return NULL_RTX;
2179
2180 arg0 = CALL_EXPR_ARG (exp, 0);
2181 arg1 = CALL_EXPR_ARG (exp, 1);
2182
2183 switch (DECL_FUNCTION_CODE (fndecl))
2184 {
2185 CASE_FLT_FN (BUILT_IN_POW):
2186 builtin_optab = pow_optab; break;
2187 CASE_FLT_FN (BUILT_IN_ATAN2):
2188 builtin_optab = atan2_optab; break;
2189 CASE_FLT_FN (BUILT_IN_SCALB):
2190 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2191 return 0;
2192 builtin_optab = scalb_optab; break;
2193 CASE_FLT_FN (BUILT_IN_SCALBN):
2194 CASE_FLT_FN (BUILT_IN_SCALBLN):
2195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2196 return 0;
2197 /* Fall through... */
2198 CASE_FLT_FN (BUILT_IN_LDEXP):
2199 builtin_optab = ldexp_optab; break;
2200 CASE_FLT_FN (BUILT_IN_FMOD):
2201 builtin_optab = fmod_optab; break;
2202 CASE_FLT_FN (BUILT_IN_REMAINDER):
2203 CASE_FLT_FN (BUILT_IN_DREM):
2204 builtin_optab = remainder_optab; break;
2205 default:
2206 gcc_unreachable ();
2207 }
2208
2209 /* Make a suitable register to place result in. */
2210 mode = TYPE_MODE (TREE_TYPE (exp));
2211
2212 /* Before working hard, check whether the instruction is available. */
2213 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2214 return NULL_RTX;
2215
2216 result = gen_reg_rtx (mode);
2217
2218 if (! flag_errno_math || ! HONOR_NANS (mode))
2219 errno_set = false;
2220
2221 if (errno_set && optimize_insn_for_size_p ())
2222 return 0;
2223
2224 /* Always stabilize the argument list. */
2225 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2226 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2227
2228 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2229 op1 = expand_normal (arg1);
2230
2231 start_sequence ();
2232
2233 /* Compute into RESULT.
2234 Set RESULT to wherever the result comes back. */
2235 result = expand_binop (mode, builtin_optab, op0, op1,
2236 result, 0, OPTAB_DIRECT);
2237
2238 /* If we were unable to expand via the builtin, stop the sequence
2239 (without outputting the insns) and call to the library function
2240 with the stabilized argument list. */
2241 if (result == 0)
2242 {
2243 end_sequence ();
2244 return expand_call (exp, target, target == const0_rtx);
2245 }
2246
2247 if (errno_set)
2248 expand_errno_check (exp, result);
2249
2250 /* Output the entire sequence. */
2251 insns = get_insns ();
2252 end_sequence ();
2253 emit_insn (insns);
2254
2255 return result;
2256 }
2257
2258 /* Expand a call to the builtin trinary math functions (fma).
2259 Return NULL_RTX if a normal call should be emitted rather than expanding the
2260 function in-line. EXP is the expression that is a call to the builtin
2261 function; if convenient, the result should be placed in TARGET.
2262 SUBTARGET may be used as the target for computing one of EXP's
2263 operands. */
2264
2265 static rtx
2266 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2267 {
2268 optab builtin_optab;
2269 rtx op0, op1, op2, insns, result;
2270 tree fndecl = get_callee_fndecl (exp);
2271 tree arg0, arg1, arg2;
2272 enum machine_mode mode;
2273
2274 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2275 return NULL_RTX;
2276
2277 arg0 = CALL_EXPR_ARG (exp, 0);
2278 arg1 = CALL_EXPR_ARG (exp, 1);
2279 arg2 = CALL_EXPR_ARG (exp, 2);
2280
2281 switch (DECL_FUNCTION_CODE (fndecl))
2282 {
2283 CASE_FLT_FN (BUILT_IN_FMA):
2284 builtin_optab = fma_optab; break;
2285 default:
2286 gcc_unreachable ();
2287 }
2288
2289 /* Make a suitable register to place result in. */
2290 mode = TYPE_MODE (TREE_TYPE (exp));
2291
2292 /* Before working hard, check whether the instruction is available. */
2293 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2294 return NULL_RTX;
2295
2296 result = gen_reg_rtx (mode);
2297
2298 /* Always stabilize the argument list. */
2299 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2300 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2301 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2302
2303 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2304 op1 = expand_normal (arg1);
2305 op2 = expand_normal (arg2);
2306
2307 start_sequence ();
2308
2309 /* Compute into RESULT.
2310 Set RESULT to wherever the result comes back. */
2311 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2312 result, 0);
2313
2314 /* If we were unable to expand via the builtin, stop the sequence
2315 (without outputting the insns) and call to the library function
2316 with the stabilized argument list. */
2317 if (result == 0)
2318 {
2319 end_sequence ();
2320 return expand_call (exp, target, target == const0_rtx);
2321 }
2322
2323 /* Output the entire sequence. */
2324 insns = get_insns ();
2325 end_sequence ();
2326 emit_insn (insns);
2327
2328 return result;
2329 }
2330
2331 /* Expand a call to the builtin sin and cos math functions.
2332 Return NULL_RTX if a normal call should be emitted rather than expanding the
2333 function in-line. EXP is the expression that is a call to the builtin
2334 function; if convenient, the result should be placed in TARGET.
2335 SUBTARGET may be used as the target for computing one of EXP's
2336 operands. */
2337
2338 static rtx
2339 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2340 {
2341 optab builtin_optab;
2342 rtx op0, insns;
2343 tree fndecl = get_callee_fndecl (exp);
2344 enum machine_mode mode;
2345 tree arg;
2346
2347 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2348 return NULL_RTX;
2349
2350 arg = CALL_EXPR_ARG (exp, 0);
2351
2352 switch (DECL_FUNCTION_CODE (fndecl))
2353 {
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 CASE_FLT_FN (BUILT_IN_COS):
2356 builtin_optab = sincos_optab; break;
2357 default:
2358 gcc_unreachable ();
2359 }
2360
2361 /* Make a suitable register to place result in. */
2362 mode = TYPE_MODE (TREE_TYPE (exp));
2363
2364 /* Check if sincos insn is available, otherwise fallback
2365 to sin or cos insn. */
2366 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2367 switch (DECL_FUNCTION_CODE (fndecl))
2368 {
2369 CASE_FLT_FN (BUILT_IN_SIN):
2370 builtin_optab = sin_optab; break;
2371 CASE_FLT_FN (BUILT_IN_COS):
2372 builtin_optab = cos_optab; break;
2373 default:
2374 gcc_unreachable ();
2375 }
2376
2377 /* Before working hard, check whether the instruction is available. */
2378 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2379 {
2380 rtx result = gen_reg_rtx (mode);
2381
2382 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2383 need to expand the argument again. This way, we will not perform
2384 side-effects more the once. */
2385 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2386
2387 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2388
2389 start_sequence ();
2390
2391 /* Compute into RESULT.
2392 Set RESULT to wherever the result comes back. */
2393 if (builtin_optab == sincos_optab)
2394 {
2395 int ok;
2396
2397 switch (DECL_FUNCTION_CODE (fndecl))
2398 {
2399 CASE_FLT_FN (BUILT_IN_SIN):
2400 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2401 break;
2402 CASE_FLT_FN (BUILT_IN_COS):
2403 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2404 break;
2405 default:
2406 gcc_unreachable ();
2407 }
2408 gcc_assert (ok);
2409 }
2410 else
2411 result = expand_unop (mode, builtin_optab, op0, result, 0);
2412
2413 if (result != 0)
2414 {
2415 /* Output the entire sequence. */
2416 insns = get_insns ();
2417 end_sequence ();
2418 emit_insn (insns);
2419 return result;
2420 }
2421
2422 /* If we were unable to expand via the builtin, stop the sequence
2423 (without outputting the insns) and call to the library function
2424 with the stabilized argument list. */
2425 end_sequence ();
2426 }
2427
2428 return expand_call (exp, target, target == const0_rtx);
2429 }
2430
2431 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2432 return an RTL instruction code that implements the functionality.
2433 If that isn't possible or available return CODE_FOR_nothing. */
2434
2435 static enum insn_code
2436 interclass_mathfn_icode (tree arg, tree fndecl)
2437 {
2438 bool errno_set = false;
2439 optab builtin_optab = unknown_optab;
2440 enum machine_mode mode;
2441
2442 switch (DECL_FUNCTION_CODE (fndecl))
2443 {
2444 CASE_FLT_FN (BUILT_IN_ILOGB):
2445 errno_set = true; builtin_optab = ilogb_optab; break;
2446 CASE_FLT_FN (BUILT_IN_ISINF):
2447 builtin_optab = isinf_optab; break;
2448 case BUILT_IN_ISNORMAL:
2449 case BUILT_IN_ISFINITE:
2450 CASE_FLT_FN (BUILT_IN_FINITE):
2451 case BUILT_IN_FINITED32:
2452 case BUILT_IN_FINITED64:
2453 case BUILT_IN_FINITED128:
2454 case BUILT_IN_ISINFD32:
2455 case BUILT_IN_ISINFD64:
2456 case BUILT_IN_ISINFD128:
2457 /* These builtins have no optabs (yet). */
2458 break;
2459 default:
2460 gcc_unreachable ();
2461 }
2462
2463 /* There's no easy way to detect the case we need to set EDOM. */
2464 if (flag_errno_math && errno_set)
2465 return CODE_FOR_nothing;
2466
2467 /* Optab mode depends on the mode of the input argument. */
2468 mode = TYPE_MODE (TREE_TYPE (arg));
2469
2470 if (builtin_optab)
2471 return optab_handler (builtin_optab, mode);
2472 return CODE_FOR_nothing;
2473 }
2474
2475 /* Expand a call to one of the builtin math functions that operate on
2476 floating point argument and output an integer result (ilogb, isinf,
2477 isnan, etc).
2478 Return 0 if a normal call should be emitted rather than expanding the
2479 function in-line. EXP is the expression that is a call to the builtin
2480 function; if convenient, the result should be placed in TARGET. */
2481
2482 static rtx
2483 expand_builtin_interclass_mathfn (tree exp, rtx target)
2484 {
2485 enum insn_code icode = CODE_FOR_nothing;
2486 rtx op0;
2487 tree fndecl = get_callee_fndecl (exp);
2488 enum machine_mode mode;
2489 tree arg;
2490
2491 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2492 return NULL_RTX;
2493
2494 arg = CALL_EXPR_ARG (exp, 0);
2495 icode = interclass_mathfn_icode (arg, fndecl);
2496 mode = TYPE_MODE (TREE_TYPE (arg));
2497
2498 if (icode != CODE_FOR_nothing)
2499 {
2500 struct expand_operand ops[1];
2501 rtx last = get_last_insn ();
2502 tree orig_arg = arg;
2503
2504 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2505 need to expand the argument again. This way, we will not perform
2506 side-effects more the once. */
2507 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2508
2509 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2510
2511 if (mode != GET_MODE (op0))
2512 op0 = convert_to_mode (mode, op0, 0);
2513
2514 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2515 if (maybe_legitimize_operands (icode, 0, 1, ops)
2516 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2517 return ops[0].value;
2518
2519 delete_insns_since (last);
2520 CALL_EXPR_ARG (exp, 0) = orig_arg;
2521 }
2522
2523 return NULL_RTX;
2524 }
2525
2526 /* Expand a call to the builtin sincos math function.
2527 Return NULL_RTX if a normal call should be emitted rather than expanding the
2528 function in-line. EXP is the expression that is a call to the builtin
2529 function. */
2530
2531 static rtx
2532 expand_builtin_sincos (tree exp)
2533 {
2534 rtx op0, op1, op2, target1, target2;
2535 enum machine_mode mode;
2536 tree arg, sinp, cosp;
2537 int result;
2538 location_t loc = EXPR_LOCATION (exp);
2539 tree alias_type, alias_off;
2540
2541 if (!validate_arglist (exp, REAL_TYPE,
2542 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2543 return NULL_RTX;
2544
2545 arg = CALL_EXPR_ARG (exp, 0);
2546 sinp = CALL_EXPR_ARG (exp, 1);
2547 cosp = CALL_EXPR_ARG (exp, 2);
2548
2549 /* Make a suitable register to place result in. */
2550 mode = TYPE_MODE (TREE_TYPE (arg));
2551
2552 /* Check if sincos insn is available, otherwise emit the call. */
2553 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2554 return NULL_RTX;
2555
2556 target1 = gen_reg_rtx (mode);
2557 target2 = gen_reg_rtx (mode);
2558
2559 op0 = expand_normal (arg);
2560 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2561 alias_off = build_int_cst (alias_type, 0);
2562 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 sinp, alias_off));
2564 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2565 cosp, alias_off));
2566
2567 /* Compute into target1 and target2.
2568 Set TARGET to wherever the result comes back. */
2569 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2570 gcc_assert (result);
2571
2572 /* Move target1 and target2 to the memory locations indicated
2573 by op1 and op2. */
2574 emit_move_insn (op1, target1);
2575 emit_move_insn (op2, target2);
2576
2577 return const0_rtx;
2578 }
2579
2580 /* Expand a call to the internal cexpi builtin to the sincos math function.
2581 EXP is the expression that is a call to the builtin function; if convenient,
2582 the result should be placed in TARGET. */
2583
2584 static rtx
2585 expand_builtin_cexpi (tree exp, rtx target)
2586 {
2587 tree fndecl = get_callee_fndecl (exp);
2588 tree arg, type;
2589 enum machine_mode mode;
2590 rtx op0, op1, op2;
2591 location_t loc = EXPR_LOCATION (exp);
2592
2593 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2594 return NULL_RTX;
2595
2596 arg = CALL_EXPR_ARG (exp, 0);
2597 type = TREE_TYPE (arg);
2598 mode = TYPE_MODE (TREE_TYPE (arg));
2599
2600 /* Try expanding via a sincos optab, fall back to emitting a libcall
2601 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2602 is only generated from sincos, cexp or if we have either of them. */
2603 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2604 {
2605 op1 = gen_reg_rtx (mode);
2606 op2 = gen_reg_rtx (mode);
2607
2608 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2609
2610 /* Compute into op1 and op2. */
2611 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2612 }
2613 else if (targetm.libc_has_function (function_sincos))
2614 {
2615 tree call, fn = NULL_TREE;
2616 tree top1, top2;
2617 rtx op1a, op2a;
2618
2619 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2624 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2625 else
2626 gcc_unreachable ();
2627
2628 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2629 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2630 op1a = copy_addr_to_reg (XEXP (op1, 0));
2631 op2a = copy_addr_to_reg (XEXP (op2, 0));
2632 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2633 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2634
2635 /* Make sure not to fold the sincos call again. */
2636 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2637 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2638 call, 3, arg, top1, top2));
2639 }
2640 else
2641 {
2642 tree call, fn = NULL_TREE, narg;
2643 tree ctype = build_complex_type (type);
2644
2645 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2649 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2650 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2651 else
2652 gcc_unreachable ();
2653
2654 /* If we don't have a decl for cexp create one. This is the
2655 friendliest fallback if the user calls __builtin_cexpi
2656 without full target C99 function support. */
2657 if (fn == NULL_TREE)
2658 {
2659 tree fntype;
2660 const char *name = NULL;
2661
2662 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2663 name = "cexpf";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2665 name = "cexp";
2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2667 name = "cexpl";
2668
2669 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2670 fn = build_fn_decl (name, fntype);
2671 }
2672
2673 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2674 build_real (type, dconst0), arg);
2675
2676 /* Make sure not to fold the cexp call again. */
2677 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2678 return expand_expr (build_call_nary (ctype, call, 1, narg),
2679 target, VOIDmode, EXPAND_NORMAL);
2680 }
2681
2682 /* Now build the proper return type. */
2683 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2684 make_tree (TREE_TYPE (arg), op2),
2685 make_tree (TREE_TYPE (arg), op1)),
2686 target, VOIDmode, EXPAND_NORMAL);
2687 }
2688
2689 /* Conveniently construct a function call expression. FNDECL names the
2690 function to be called, N is the number of arguments, and the "..."
2691 parameters are the argument expressions. Unlike build_call_exr
2692 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2693
2694 static tree
2695 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2696 {
2697 va_list ap;
2698 tree fntype = TREE_TYPE (fndecl);
2699 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2700
2701 va_start (ap, n);
2702 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2703 va_end (ap);
2704 SET_EXPR_LOCATION (fn, loc);
2705 return fn;
2706 }
2707
2708 /* Expand a call to one of the builtin rounding functions gcc defines
2709 as an extension (lfloor and lceil). As these are gcc extensions we
2710 do not need to worry about setting errno to EDOM.
2711 If expanding via optab fails, lower expression to (int)(floor(x)).
2712 EXP is the expression that is a call to the builtin function;
2713 if convenient, the result should be placed in TARGET. */
2714
2715 static rtx
2716 expand_builtin_int_roundingfn (tree exp, rtx target)
2717 {
2718 convert_optab builtin_optab;
2719 rtx op0, insns, tmp;
2720 tree fndecl = get_callee_fndecl (exp);
2721 enum built_in_function fallback_fn;
2722 tree fallback_fndecl;
2723 enum machine_mode mode;
2724 tree arg;
2725
2726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2727 gcc_unreachable ();
2728
2729 arg = CALL_EXPR_ARG (exp, 0);
2730
2731 switch (DECL_FUNCTION_CODE (fndecl))
2732 {
2733 CASE_FLT_FN (BUILT_IN_ICEIL):
2734 CASE_FLT_FN (BUILT_IN_LCEIL):
2735 CASE_FLT_FN (BUILT_IN_LLCEIL):
2736 builtin_optab = lceil_optab;
2737 fallback_fn = BUILT_IN_CEIL;
2738 break;
2739
2740 CASE_FLT_FN (BUILT_IN_IFLOOR):
2741 CASE_FLT_FN (BUILT_IN_LFLOOR):
2742 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2743 builtin_optab = lfloor_optab;
2744 fallback_fn = BUILT_IN_FLOOR;
2745 break;
2746
2747 default:
2748 gcc_unreachable ();
2749 }
2750
2751 /* Make a suitable register to place result in. */
2752 mode = TYPE_MODE (TREE_TYPE (exp));
2753
2754 target = gen_reg_rtx (mode);
2755
2756 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2757 need to expand the argument again. This way, we will not perform
2758 side-effects more the once. */
2759 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2760
2761 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2762
2763 start_sequence ();
2764
2765 /* Compute into TARGET. */
2766 if (expand_sfix_optab (target, op0, builtin_optab))
2767 {
2768 /* Output the entire sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 return target;
2773 }
2774
2775 /* If we were unable to expand via the builtin, stop the sequence
2776 (without outputting the insns). */
2777 end_sequence ();
2778
2779 /* Fall back to floating point rounding optab. */
2780 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2781
2782 /* For non-C99 targets we may end up without a fallback fndecl here
2783 if the user called __builtin_lfloor directly. In this case emit
2784 a call to the floor/ceil variants nevertheless. This should result
2785 in the best user experience for not full C99 targets. */
2786 if (fallback_fndecl == NULL_TREE)
2787 {
2788 tree fntype;
2789 const char *name = NULL;
2790
2791 switch (DECL_FUNCTION_CODE (fndecl))
2792 {
2793 case BUILT_IN_ICEIL:
2794 case BUILT_IN_LCEIL:
2795 case BUILT_IN_LLCEIL:
2796 name = "ceil";
2797 break;
2798 case BUILT_IN_ICEILF:
2799 case BUILT_IN_LCEILF:
2800 case BUILT_IN_LLCEILF:
2801 name = "ceilf";
2802 break;
2803 case BUILT_IN_ICEILL:
2804 case BUILT_IN_LCEILL:
2805 case BUILT_IN_LLCEILL:
2806 name = "ceill";
2807 break;
2808 case BUILT_IN_IFLOOR:
2809 case BUILT_IN_LFLOOR:
2810 case BUILT_IN_LLFLOOR:
2811 name = "floor";
2812 break;
2813 case BUILT_IN_IFLOORF:
2814 case BUILT_IN_LFLOORF:
2815 case BUILT_IN_LLFLOORF:
2816 name = "floorf";
2817 break;
2818 case BUILT_IN_IFLOORL:
2819 case BUILT_IN_LFLOORL:
2820 case BUILT_IN_LLFLOORL:
2821 name = "floorl";
2822 break;
2823 default:
2824 gcc_unreachable ();
2825 }
2826
2827 fntype = build_function_type_list (TREE_TYPE (arg),
2828 TREE_TYPE (arg), NULL_TREE);
2829 fallback_fndecl = build_fn_decl (name, fntype);
2830 }
2831
2832 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2833
2834 tmp = expand_normal (exp);
2835 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2836
2837 /* Truncate the result of floating point optab to integer
2838 via expand_fix (). */
2839 target = gen_reg_rtx (mode);
2840 expand_fix (target, tmp, 0);
2841
2842 return target;
2843 }
2844
2845 /* Expand a call to one of the builtin math functions doing integer
2846 conversion (lrint).
2847 Return 0 if a normal call should be emitted rather than expanding the
2848 function in-line. EXP is the expression that is a call to the builtin
2849 function; if convenient, the result should be placed in TARGET. */
2850
2851 static rtx
2852 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2853 {
2854 convert_optab builtin_optab;
2855 rtx op0, insns;
2856 tree fndecl = get_callee_fndecl (exp);
2857 tree arg;
2858 enum machine_mode mode;
2859 enum built_in_function fallback_fn = BUILT_IN_NONE;
2860
2861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2862 gcc_unreachable ();
2863
2864 arg = CALL_EXPR_ARG (exp, 0);
2865
2866 switch (DECL_FUNCTION_CODE (fndecl))
2867 {
2868 CASE_FLT_FN (BUILT_IN_IRINT):
2869 fallback_fn = BUILT_IN_LRINT;
2870 /* FALLTHRU */
2871 CASE_FLT_FN (BUILT_IN_LRINT):
2872 CASE_FLT_FN (BUILT_IN_LLRINT):
2873 builtin_optab = lrint_optab;
2874 break;
2875
2876 CASE_FLT_FN (BUILT_IN_IROUND):
2877 fallback_fn = BUILT_IN_LROUND;
2878 /* FALLTHRU */
2879 CASE_FLT_FN (BUILT_IN_LROUND):
2880 CASE_FLT_FN (BUILT_IN_LLROUND):
2881 builtin_optab = lround_optab;
2882 break;
2883
2884 default:
2885 gcc_unreachable ();
2886 }
2887
2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2890 return NULL_RTX;
2891
2892 /* Make a suitable register to place result in. */
2893 mode = TYPE_MODE (TREE_TYPE (exp));
2894
2895 /* There's no easy way to detect the case we need to set EDOM. */
2896 if (!flag_errno_math)
2897 {
2898 rtx result = gen_reg_rtx (mode);
2899
2900 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2901 need to expand the argument again. This way, we will not perform
2902 side-effects more the once. */
2903 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2904
2905 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2906
2907 start_sequence ();
2908
2909 if (expand_sfix_optab (result, op0, builtin_optab))
2910 {
2911 /* Output the entire sequence. */
2912 insns = get_insns ();
2913 end_sequence ();
2914 emit_insn (insns);
2915 return result;
2916 }
2917
2918 /* If we were unable to expand via the builtin, stop the sequence
2919 (without outputting the insns) and call to the library function
2920 with the stabilized argument list. */
2921 end_sequence ();
2922 }
2923
2924 if (fallback_fn != BUILT_IN_NONE)
2925 {
2926 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2927 targets, (int) round (x) should never be transformed into
2928 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2929 a call to lround in the hope that the target provides at least some
2930 C99 functions. This should result in the best user experience for
2931 not full C99 targets. */
2932 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2933 fallback_fn, 0);
2934
2935 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2936 fallback_fndecl, 1, arg);
2937
2938 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2939 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2940 return convert_to_mode (mode, target, 0);
2941 }
2942
2943 return expand_call (exp, target, target == const0_rtx);
2944 }
2945
2946 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2947 a normal call should be emitted rather than expanding the function
2948 in-line. EXP is the expression that is a call to the builtin
2949 function; if convenient, the result should be placed in TARGET. */
2950
2951 static rtx
2952 expand_builtin_powi (tree exp, rtx target)
2953 {
2954 tree arg0, arg1;
2955 rtx op0, op1;
2956 enum machine_mode mode;
2957 enum machine_mode mode2;
2958
2959 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2960 return NULL_RTX;
2961
2962 arg0 = CALL_EXPR_ARG (exp, 0);
2963 arg1 = CALL_EXPR_ARG (exp, 1);
2964 mode = TYPE_MODE (TREE_TYPE (exp));
2965
2966 /* Emit a libcall to libgcc. */
2967
2968 /* Mode of the 2nd argument must match that of an int. */
2969 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2970
2971 if (target == NULL_RTX)
2972 target = gen_reg_rtx (mode);
2973
2974 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2975 if (GET_MODE (op0) != mode)
2976 op0 = convert_to_mode (mode, op0, 0);
2977 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2978 if (GET_MODE (op1) != mode2)
2979 op1 = convert_to_mode (mode2, op1, 0);
2980
2981 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2982 target, LCT_CONST, mode, 2,
2983 op0, mode, op1, mode2);
2984
2985 return target;
2986 }
2987
2988 /* Expand expression EXP which is a call to the strlen builtin. Return
2989 NULL_RTX if we failed the caller should emit a normal call, otherwise
2990 try to get the result in TARGET, if convenient. */
2991
2992 static rtx
2993 expand_builtin_strlen (tree exp, rtx target,
2994 enum machine_mode target_mode)
2995 {
2996 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2997 return NULL_RTX;
2998 else
2999 {
3000 struct expand_operand ops[4];
3001 rtx pat;
3002 tree len;
3003 tree src = CALL_EXPR_ARG (exp, 0);
3004 rtx src_reg, before_strlen;
3005 enum machine_mode insn_mode = target_mode;
3006 enum insn_code icode = CODE_FOR_nothing;
3007 unsigned int align;
3008
3009 /* If the length can be computed at compile-time, return it. */
3010 len = c_strlen (src, 0);
3011 if (len)
3012 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013
3014 /* If the length can be computed at compile-time and is constant
3015 integer, but there are side-effects in src, evaluate
3016 src for side-effects, then return len.
3017 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3018 can be optimized into: i++; x = 3; */
3019 len = c_strlen (src, 1);
3020 if (len && TREE_CODE (len) == INTEGER_CST)
3021 {
3022 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3023 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3024 }
3025
3026 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3027
3028 /* If SRC is not a pointer type, don't do this operation inline. */
3029 if (align == 0)
3030 return NULL_RTX;
3031
3032 /* Bail out if we can't compute strlen in the right mode. */
3033 while (insn_mode != VOIDmode)
3034 {
3035 icode = optab_handler (strlen_optab, insn_mode);
3036 if (icode != CODE_FOR_nothing)
3037 break;
3038
3039 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3040 }
3041 if (insn_mode == VOIDmode)
3042 return NULL_RTX;
3043
3044 /* Make a place to hold the source address. We will not expand
3045 the actual source until we are sure that the expansion will
3046 not fail -- there are trees that cannot be expanded twice. */
3047 src_reg = gen_reg_rtx (Pmode);
3048
3049 /* Mark the beginning of the strlen sequence so we can emit the
3050 source operand later. */
3051 before_strlen = get_last_insn ();
3052
3053 create_output_operand (&ops[0], target, insn_mode);
3054 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3055 create_integer_operand (&ops[2], 0);
3056 create_integer_operand (&ops[3], align);
3057 if (!maybe_expand_insn (icode, 4, ops))
3058 return NULL_RTX;
3059
3060 /* Now that we are assured of success, expand the source. */
3061 start_sequence ();
3062 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3063 if (pat != src_reg)
3064 {
3065 #ifdef POINTERS_EXTEND_UNSIGNED
3066 if (GET_MODE (pat) != Pmode)
3067 pat = convert_to_mode (Pmode, pat,
3068 POINTERS_EXTEND_UNSIGNED);
3069 #endif
3070 emit_move_insn (src_reg, pat);
3071 }
3072 pat = get_insns ();
3073 end_sequence ();
3074
3075 if (before_strlen)
3076 emit_insn_after (pat, before_strlen);
3077 else
3078 emit_insn_before (pat, get_insns ());
3079
3080 /* Return the value in the proper mode for this function. */
3081 if (GET_MODE (ops[0].value) == target_mode)
3082 target = ops[0].value;
3083 else if (target != 0)
3084 convert_move (target, ops[0].value, 0);
3085 else
3086 target = convert_to_mode (target_mode, ops[0].value, 0);
3087
3088 return target;
3089 }
3090 }
3091
3092 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3093 bytes from constant string DATA + OFFSET and return it as target
3094 constant. */
3095
3096 static rtx
3097 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3098 enum machine_mode mode)
3099 {
3100 const char *str = (const char *) data;
3101
3102 gcc_assert (offset >= 0
3103 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3104 <= strlen (str) + 1));
3105
3106 return c_readstr (str + offset, mode);
3107 }
3108
3109 /* LEN specify length of the block of memcpy/memset operation.
3110 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3111 In some cases we can make very likely guess on max size, then we
3112 set it into PROBABLE_MAX_SIZE. */
3113
3114 static void
3115 determine_block_size (tree len, rtx len_rtx,
3116 unsigned HOST_WIDE_INT *min_size,
3117 unsigned HOST_WIDE_INT *max_size,
3118 unsigned HOST_WIDE_INT *probable_max_size)
3119 {
3120 if (CONST_INT_P (len_rtx))
3121 {
3122 *min_size = *max_size = UINTVAL (len_rtx);
3123 return;
3124 }
3125 else
3126 {
3127 widest_int min, max;
3128 enum value_range_type range_type = VR_UNDEFINED;
3129
3130 /* Determine bounds from the type. */
3131 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3132 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3133 else
3134 *min_size = 0;
3135 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3136 *probable_max_size = *max_size = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3137 else
3138 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3139
3140 if (TREE_CODE (len) == SSA_NAME)
3141 range_type = get_range_info (len, &min, &max);
3142 if (range_type == VR_RANGE)
3143 {
3144 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3145 *min_size = min.to_uhwi ();
3146 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3147 *probable_max_size = *max_size = max.to_uhwi ();
3148 }
3149 else if (range_type == VR_ANTI_RANGE)
3150 {
3151 /* Anti range 0...N lets us to determine minmal size to N+1. */
3152 if (min == 0)
3153 {
3154 widest_int max_plus_one = max + 1;
3155 if (wi::fits_uhwi_p (max_plus_one))
3156 *min_size = max_plus_one.to_uhwi ();
3157 }
3158 /* Code like
3159
3160 int n;
3161 if (n < 100)
3162 memcpy (a,b, n)
3163
3164 Produce anti range allowing negative values of N. We still
3165 can use the information and make a guess that N is not negative.
3166 */
3167 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3168 *probable_max_size = min.to_uhwi () - 1;
3169 }
3170 }
3171 gcc_checking_assert (*max_size <=
3172 (unsigned HOST_WIDE_INT)
3173 GET_MODE_MASK (GET_MODE (len_rtx)));
3174 }
3175
3176 /* Expand a call EXP to the memcpy builtin.
3177 Return NULL_RTX if we failed, the caller should emit a normal call,
3178 otherwise try to get the result in TARGET, if convenient (and in
3179 mode MODE if that's convenient). */
3180
3181 static rtx
3182 expand_builtin_memcpy (tree exp, rtx target)
3183 {
3184 if (!validate_arglist (exp,
3185 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3186 return NULL_RTX;
3187 else
3188 {
3189 tree dest = CALL_EXPR_ARG (exp, 0);
3190 tree src = CALL_EXPR_ARG (exp, 1);
3191 tree len = CALL_EXPR_ARG (exp, 2);
3192 const char *src_str;
3193 unsigned int src_align = get_pointer_alignment (src);
3194 unsigned int dest_align = get_pointer_alignment (dest);
3195 rtx dest_mem, src_mem, dest_addr, len_rtx;
3196 HOST_WIDE_INT expected_size = -1;
3197 unsigned int expected_align = 0;
3198 unsigned HOST_WIDE_INT min_size;
3199 unsigned HOST_WIDE_INT max_size;
3200 unsigned HOST_WIDE_INT probable_max_size;
3201
3202 /* If DEST is not a pointer type, call the normal function. */
3203 if (dest_align == 0)
3204 return NULL_RTX;
3205
3206 /* If either SRC is not a pointer type, don't do this
3207 operation in-line. */
3208 if (src_align == 0)
3209 return NULL_RTX;
3210
3211 if (currently_expanding_gimple_stmt)
3212 stringop_block_profile (currently_expanding_gimple_stmt,
3213 &expected_align, &expected_size);
3214
3215 if (expected_align < dest_align)
3216 expected_align = dest_align;
3217 dest_mem = get_memory_rtx (dest, len);
3218 set_mem_align (dest_mem, dest_align);
3219 len_rtx = expand_normal (len);
3220 determine_block_size (len, len_rtx, &min_size, &max_size,
3221 &probable_max_size);
3222 src_str = c_getstr (src);
3223
3224 /* If SRC is a string constant and block move would be done
3225 by pieces, we can avoid loading the string from memory
3226 and only stored the computed constants. */
3227 if (src_str
3228 && CONST_INT_P (len_rtx)
3229 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3230 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3231 CONST_CAST (char *, src_str),
3232 dest_align, false))
3233 {
3234 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3235 builtin_memcpy_read_str,
3236 CONST_CAST (char *, src_str),
3237 dest_align, false, 0);
3238 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3239 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3240 return dest_mem;
3241 }
3242
3243 src_mem = get_memory_rtx (src, len);
3244 set_mem_align (src_mem, src_align);
3245
3246 /* Copy word part most expediently. */
3247 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3248 CALL_EXPR_TAILCALL (exp)
3249 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3250 expected_align, expected_size,
3251 min_size, max_size, probable_max_size);
3252
3253 if (dest_addr == 0)
3254 {
3255 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3256 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3257 }
3258 return dest_addr;
3259 }
3260 }
3261
3262 /* Expand a call EXP to the mempcpy builtin.
3263 Return NULL_RTX if we failed; the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). If ENDP is 0 return the
3266 destination pointer, if ENDP is 1 return the end pointer ala
3267 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3268 stpcpy. */
3269
3270 static rtx
3271 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3272 {
3273 if (!validate_arglist (exp,
3274 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3275 return NULL_RTX;
3276 else
3277 {
3278 tree dest = CALL_EXPR_ARG (exp, 0);
3279 tree src = CALL_EXPR_ARG (exp, 1);
3280 tree len = CALL_EXPR_ARG (exp, 2);
3281 return expand_builtin_mempcpy_args (dest, src, len,
3282 target, mode, /*endp=*/ 1);
3283 }
3284 }
3285
3286 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3287 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3288 so that this can also be called without constructing an actual CALL_EXPR.
3289 The other arguments and return value are the same as for
3290 expand_builtin_mempcpy. */
3291
3292 static rtx
3293 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3294 rtx target, enum machine_mode mode, int endp)
3295 {
3296 /* If return value is ignored, transform mempcpy into memcpy. */
3297 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3298 {
3299 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3300 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3301 dest, src, len);
3302 return expand_expr (result, target, mode, EXPAND_NORMAL);
3303 }
3304 else
3305 {
3306 const char *src_str;
3307 unsigned int src_align = get_pointer_alignment (src);
3308 unsigned int dest_align = get_pointer_alignment (dest);
3309 rtx dest_mem, src_mem, len_rtx;
3310
3311 /* If either SRC or DEST is not a pointer type, don't do this
3312 operation in-line. */
3313 if (dest_align == 0 || src_align == 0)
3314 return NULL_RTX;
3315
3316 /* If LEN is not constant, call the normal function. */
3317 if (! tree_fits_uhwi_p (len))
3318 return NULL_RTX;
3319
3320 len_rtx = expand_normal (len);
3321 src_str = c_getstr (src);
3322
3323 /* If SRC is a string constant and block move would be done
3324 by pieces, we can avoid loading the string from memory
3325 and only stored the computed constants. */
3326 if (src_str
3327 && CONST_INT_P (len_rtx)
3328 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3329 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3330 CONST_CAST (char *, src_str),
3331 dest_align, false))
3332 {
3333 dest_mem = get_memory_rtx (dest, len);
3334 set_mem_align (dest_mem, dest_align);
3335 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3336 builtin_memcpy_read_str,
3337 CONST_CAST (char *, src_str),
3338 dest_align, false, endp);
3339 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3340 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3341 return dest_mem;
3342 }
3343
3344 if (CONST_INT_P (len_rtx)
3345 && can_move_by_pieces (INTVAL (len_rtx),
3346 MIN (dest_align, src_align)))
3347 {
3348 dest_mem = get_memory_rtx (dest, len);
3349 set_mem_align (dest_mem, dest_align);
3350 src_mem = get_memory_rtx (src, len);
3351 set_mem_align (src_mem, src_align);
3352 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3353 MIN (dest_align, src_align), endp);
3354 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3355 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3356 return dest_mem;
3357 }
3358
3359 return NULL_RTX;
3360 }
3361 }
3362
3363 #ifndef HAVE_movstr
3364 # define HAVE_movstr 0
3365 # define CODE_FOR_movstr CODE_FOR_nothing
3366 #endif
3367
3368 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3369 we failed, the caller should emit a normal call, otherwise try to
3370 get the result in TARGET, if convenient. If ENDP is 0 return the
3371 destination pointer, if ENDP is 1 return the end pointer ala
3372 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3373 stpcpy. */
3374
3375 static rtx
3376 expand_movstr (tree dest, tree src, rtx target, int endp)
3377 {
3378 struct expand_operand ops[3];
3379 rtx dest_mem;
3380 rtx src_mem;
3381
3382 if (!HAVE_movstr)
3383 return NULL_RTX;
3384
3385 dest_mem = get_memory_rtx (dest, NULL);
3386 src_mem = get_memory_rtx (src, NULL);
3387 if (!endp)
3388 {
3389 target = force_reg (Pmode, XEXP (dest_mem, 0));
3390 dest_mem = replace_equiv_address (dest_mem, target);
3391 }
3392
3393 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3394 create_fixed_operand (&ops[1], dest_mem);
3395 create_fixed_operand (&ops[2], src_mem);
3396 expand_insn (CODE_FOR_movstr, 3, ops);
3397
3398 if (endp && target != const0_rtx)
3399 {
3400 target = ops[0].value;
3401 /* movstr is supposed to set end to the address of the NUL
3402 terminator. If the caller requested a mempcpy-like return value,
3403 adjust it. */
3404 if (endp == 1)
3405 {
3406 rtx tem = plus_constant (GET_MODE (target),
3407 gen_lowpart (GET_MODE (target), target), 1);
3408 emit_move_insn (target, force_operand (tem, NULL_RTX));
3409 }
3410 }
3411 return target;
3412 }
3413
3414 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3415 NULL_RTX if we failed the caller should emit a normal call, otherwise
3416 try to get the result in TARGET, if convenient (and in mode MODE if that's
3417 convenient). */
3418
3419 static rtx
3420 expand_builtin_strcpy (tree exp, rtx target)
3421 {
3422 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3423 {
3424 tree dest = CALL_EXPR_ARG (exp, 0);
3425 tree src = CALL_EXPR_ARG (exp, 1);
3426 return expand_builtin_strcpy_args (dest, src, target);
3427 }
3428 return NULL_RTX;
3429 }
3430
3431 /* Helper function to do the actual work for expand_builtin_strcpy. The
3432 arguments to the builtin_strcpy call DEST and SRC are broken out
3433 so that this can also be called without constructing an actual CALL_EXPR.
3434 The other arguments and return value are the same as for
3435 expand_builtin_strcpy. */
3436
3437 static rtx
3438 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3439 {
3440 return expand_movstr (dest, src, target, /*endp=*/0);
3441 }
3442
3443 /* Expand a call EXP to the stpcpy builtin.
3444 Return NULL_RTX if we failed the caller should emit a normal call,
3445 otherwise try to get the result in TARGET, if convenient (and in
3446 mode MODE if that's convenient). */
3447
3448 static rtx
3449 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3450 {
3451 tree dst, src;
3452 location_t loc = EXPR_LOCATION (exp);
3453
3454 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3455 return NULL_RTX;
3456
3457 dst = CALL_EXPR_ARG (exp, 0);
3458 src = CALL_EXPR_ARG (exp, 1);
3459
3460 /* If return value is ignored, transform stpcpy into strcpy. */
3461 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3462 {
3463 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3464 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3465 return expand_expr (result, target, mode, EXPAND_NORMAL);
3466 }
3467 else
3468 {
3469 tree len, lenp1;
3470 rtx ret;
3471
3472 /* Ensure we get an actual string whose length can be evaluated at
3473 compile-time, not an expression containing a string. This is
3474 because the latter will potentially produce pessimized code
3475 when used to produce the return value. */
3476 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3477 return expand_movstr (dst, src, target, /*endp=*/2);
3478
3479 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3480 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3481 target, mode, /*endp=*/2);
3482
3483 if (ret)
3484 return ret;
3485
3486 if (TREE_CODE (len) == INTEGER_CST)
3487 {
3488 rtx len_rtx = expand_normal (len);
3489
3490 if (CONST_INT_P (len_rtx))
3491 {
3492 ret = expand_builtin_strcpy_args (dst, src, target);
3493
3494 if (ret)
3495 {
3496 if (! target)
3497 {
3498 if (mode != VOIDmode)
3499 target = gen_reg_rtx (mode);
3500 else
3501 target = gen_reg_rtx (GET_MODE (ret));
3502 }
3503 if (GET_MODE (target) != GET_MODE (ret))
3504 ret = gen_lowpart (GET_MODE (target), ret);
3505
3506 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3507 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3508 gcc_assert (ret);
3509
3510 return target;
3511 }
3512 }
3513 }
3514
3515 return expand_movstr (dst, src, target, /*endp=*/2);
3516 }
3517 }
3518
3519 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3520 bytes from constant string DATA + OFFSET and return it as target
3521 constant. */
3522
3523 rtx
3524 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3525 enum machine_mode mode)
3526 {
3527 const char *str = (const char *) data;
3528
3529 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3530 return const0_rtx;
3531
3532 return c_readstr (str + offset, mode);
3533 }
3534
3535 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3536 NULL_RTX if we failed the caller should emit a normal call. */
3537
3538 static rtx
3539 expand_builtin_strncpy (tree exp, rtx target)
3540 {
3541 location_t loc = EXPR_LOCATION (exp);
3542
3543 if (validate_arglist (exp,
3544 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3545 {
3546 tree dest = CALL_EXPR_ARG (exp, 0);
3547 tree src = CALL_EXPR_ARG (exp, 1);
3548 tree len = CALL_EXPR_ARG (exp, 2);
3549 tree slen = c_strlen (src, 1);
3550
3551 /* We must be passed a constant len and src parameter. */
3552 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3553 return NULL_RTX;
3554
3555 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3556
3557 /* We're required to pad with trailing zeros if the requested
3558 len is greater than strlen(s2)+1. In that case try to
3559 use store_by_pieces, if it fails, punt. */
3560 if (tree_int_cst_lt (slen, len))
3561 {
3562 unsigned int dest_align = get_pointer_alignment (dest);
3563 const char *p = c_getstr (src);
3564 rtx dest_mem;
3565
3566 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3567 || !can_store_by_pieces (tree_to_uhwi (len),
3568 builtin_strncpy_read_str,
3569 CONST_CAST (char *, p),
3570 dest_align, false))
3571 return NULL_RTX;
3572
3573 dest_mem = get_memory_rtx (dest, len);
3574 store_by_pieces (dest_mem, tree_to_uhwi (len),
3575 builtin_strncpy_read_str,
3576 CONST_CAST (char *, p), dest_align, false, 0);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3580 }
3581 }
3582 return NULL_RTX;
3583 }
3584
3585 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3586 bytes from constant string DATA + OFFSET and return it as target
3587 constant. */
3588
3589 rtx
3590 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3591 enum machine_mode mode)
3592 {
3593 const char *c = (const char *) data;
3594 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3595
3596 memset (p, *c, GET_MODE_SIZE (mode));
3597
3598 return c_readstr (p, mode);
3599 }
3600
3601 /* Callback routine for store_by_pieces. Return the RTL of a register
3602 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3603 char value given in the RTL register data. For example, if mode is
3604 4 bytes wide, return the RTL for 0x01010101*data. */
3605
3606 static rtx
3607 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3608 enum machine_mode mode)
3609 {
3610 rtx target, coeff;
3611 size_t size;
3612 char *p;
3613
3614 size = GET_MODE_SIZE (mode);
3615 if (size == 1)
3616 return (rtx) data;
3617
3618 p = XALLOCAVEC (char, size);
3619 memset (p, 1, size);
3620 coeff = c_readstr (p, mode);
3621
3622 target = convert_to_mode (mode, (rtx) data, 1);
3623 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3624 return force_reg (mode, target);
3625 }
3626
3627 /* Expand expression EXP, which is a call to the memset builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call, otherwise
3629 try to get the result in TARGET, if convenient (and in mode MODE if that's
3630 convenient). */
3631
3632 static rtx
3633 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3634 {
3635 if (!validate_arglist (exp,
3636 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3637 return NULL_RTX;
3638 else
3639 {
3640 tree dest = CALL_EXPR_ARG (exp, 0);
3641 tree val = CALL_EXPR_ARG (exp, 1);
3642 tree len = CALL_EXPR_ARG (exp, 2);
3643 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3644 }
3645 }
3646
3647 /* Helper function to do the actual work for expand_builtin_memset. The
3648 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3649 so that this can also be called without constructing an actual CALL_EXPR.
3650 The other arguments and return value are the same as for
3651 expand_builtin_memset. */
3652
3653 static rtx
3654 expand_builtin_memset_args (tree dest, tree val, tree len,
3655 rtx target, enum machine_mode mode, tree orig_exp)
3656 {
3657 tree fndecl, fn;
3658 enum built_in_function fcode;
3659 enum machine_mode val_mode;
3660 char c;
3661 unsigned int dest_align;
3662 rtx dest_mem, dest_addr, len_rtx;
3663 HOST_WIDE_INT expected_size = -1;
3664 unsigned int expected_align = 0;
3665 unsigned HOST_WIDE_INT min_size;
3666 unsigned HOST_WIDE_INT max_size;
3667 unsigned HOST_WIDE_INT probable_max_size;
3668
3669 dest_align = get_pointer_alignment (dest);
3670
3671 /* If DEST is not a pointer type, don't do this operation in-line. */
3672 if (dest_align == 0)
3673 return NULL_RTX;
3674
3675 if (currently_expanding_gimple_stmt)
3676 stringop_block_profile (currently_expanding_gimple_stmt,
3677 &expected_align, &expected_size);
3678
3679 if (expected_align < dest_align)
3680 expected_align = dest_align;
3681
3682 /* If the LEN parameter is zero, return DEST. */
3683 if (integer_zerop (len))
3684 {
3685 /* Evaluate and ignore VAL in case it has side-effects. */
3686 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3687 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3688 }
3689
3690 /* Stabilize the arguments in case we fail. */
3691 dest = builtin_save_expr (dest);
3692 val = builtin_save_expr (val);
3693 len = builtin_save_expr (len);
3694
3695 len_rtx = expand_normal (len);
3696 determine_block_size (len, len_rtx, &min_size, &max_size,
3697 &probable_max_size);
3698 dest_mem = get_memory_rtx (dest, len);
3699 val_mode = TYPE_MODE (unsigned_char_type_node);
3700
3701 if (TREE_CODE (val) != INTEGER_CST)
3702 {
3703 rtx val_rtx;
3704
3705 val_rtx = expand_normal (val);
3706 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3707
3708 /* Assume that we can memset by pieces if we can store
3709 * the coefficients by pieces (in the required modes).
3710 * We can't pass builtin_memset_gen_str as that emits RTL. */
3711 c = 1;
3712 if (tree_fits_uhwi_p (len)
3713 && can_store_by_pieces (tree_to_uhwi (len),
3714 builtin_memset_read_str, &c, dest_align,
3715 true))
3716 {
3717 val_rtx = force_reg (val_mode, val_rtx);
3718 store_by_pieces (dest_mem, tree_to_uhwi (len),
3719 builtin_memset_gen_str, val_rtx, dest_align,
3720 true, 0);
3721 }
3722 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3723 dest_align, expected_align,
3724 expected_size, min_size, max_size,
3725 probable_max_size))
3726 goto do_libcall;
3727
3728 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3729 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3730 return dest_mem;
3731 }
3732
3733 if (target_char_cast (val, &c))
3734 goto do_libcall;
3735
3736 if (c)
3737 {
3738 if (tree_fits_uhwi_p (len)
3739 && can_store_by_pieces (tree_to_uhwi (len),
3740 builtin_memset_read_str, &c, dest_align,
3741 true))
3742 store_by_pieces (dest_mem, tree_to_uhwi (len),
3743 builtin_memset_read_str, &c, dest_align, true, 0);
3744 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3745 gen_int_mode (c, val_mode),
3746 dest_align, expected_align,
3747 expected_size, min_size, max_size,
3748 probable_max_size))
3749 goto do_libcall;
3750
3751 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3752 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3753 return dest_mem;
3754 }
3755
3756 set_mem_align (dest_mem, dest_align);
3757 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3758 CALL_EXPR_TAILCALL (orig_exp)
3759 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3760 expected_align, expected_size,
3761 min_size, max_size,
3762 probable_max_size);
3763
3764 if (dest_addr == 0)
3765 {
3766 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3767 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3768 }
3769
3770 return dest_addr;
3771
3772 do_libcall:
3773 fndecl = get_callee_fndecl (orig_exp);
3774 fcode = DECL_FUNCTION_CODE (fndecl);
3775 if (fcode == BUILT_IN_MEMSET)
3776 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3777 dest, val, len);
3778 else if (fcode == BUILT_IN_BZERO)
3779 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3780 dest, len);
3781 else
3782 gcc_unreachable ();
3783 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3784 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3785 return expand_call (fn, target, target == const0_rtx);
3786 }
3787
3788 /* Expand expression EXP, which is a call to the bzero builtin. Return
3789 NULL_RTX if we failed the caller should emit a normal call. */
3790
3791 static rtx
3792 expand_builtin_bzero (tree exp)
3793 {
3794 tree dest, size;
3795 location_t loc = EXPR_LOCATION (exp);
3796
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 dest = CALL_EXPR_ARG (exp, 0);
3801 size = CALL_EXPR_ARG (exp, 1);
3802
3803 /* New argument list transforming bzero(ptr x, int y) to
3804 memset(ptr x, int 0, size_t y). This is done this way
3805 so that if it isn't expanded inline, we fallback to
3806 calling bzero instead of memset. */
3807
3808 return expand_builtin_memset_args (dest, integer_zero_node,
3809 fold_convert_loc (loc,
3810 size_type_node, size),
3811 const0_rtx, VOIDmode, exp);
3812 }
3813
3814 /* Expand expression EXP, which is a call to the memcmp built-in function.
3815 Return NULL_RTX if we failed and the caller should emit a normal call,
3816 otherwise try to get the result in TARGET, if convenient (and in mode
3817 MODE, if that's convenient). */
3818
3819 static rtx
3820 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3821 ATTRIBUTE_UNUSED enum machine_mode mode)
3822 {
3823 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3824
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3828
3829 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3830 implementing memcmp because it will stop if it encounters two
3831 zero bytes. */
3832 #if defined HAVE_cmpmemsi
3833 {
3834 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3835 rtx result;
3836 rtx insn;
3837 tree arg1 = CALL_EXPR_ARG (exp, 0);
3838 tree arg2 = CALL_EXPR_ARG (exp, 1);
3839 tree len = CALL_EXPR_ARG (exp, 2);
3840
3841 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3842 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3843 enum machine_mode insn_mode;
3844
3845 if (HAVE_cmpmemsi)
3846 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3847 else
3848 return NULL_RTX;
3849
3850 /* If we don't have POINTER_TYPE, call the function. */
3851 if (arg1_align == 0 || arg2_align == 0)
3852 return NULL_RTX;
3853
3854 /* Make a place to write the result of the instruction. */
3855 result = target;
3856 if (! (result != 0
3857 && REG_P (result) && GET_MODE (result) == insn_mode
3858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3859 result = gen_reg_rtx (insn_mode);
3860
3861 arg1_rtx = get_memory_rtx (arg1, len);
3862 arg2_rtx = get_memory_rtx (arg2, len);
3863 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3864
3865 /* Set MEM_SIZE as appropriate. */
3866 if (CONST_INT_P (arg3_rtx))
3867 {
3868 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3869 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3870 }
3871
3872 if (HAVE_cmpmemsi)
3873 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3874 GEN_INT (MIN (arg1_align, arg2_align)));
3875 else
3876 gcc_unreachable ();
3877
3878 if (insn)
3879 emit_insn (insn);
3880 else
3881 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3882 TYPE_MODE (integer_type_node), 3,
3883 XEXP (arg1_rtx, 0), Pmode,
3884 XEXP (arg2_rtx, 0), Pmode,
3885 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3886 TYPE_UNSIGNED (sizetype)),
3887 TYPE_MODE (sizetype));
3888
3889 /* Return the value in the proper mode for this function. */
3890 mode = TYPE_MODE (TREE_TYPE (exp));
3891 if (GET_MODE (result) == mode)
3892 return result;
3893 else if (target != 0)
3894 {
3895 convert_move (target, result, 0);
3896 return target;
3897 }
3898 else
3899 return convert_to_mode (mode, result, 0);
3900 }
3901 #endif /* HAVE_cmpmemsi. */
3902
3903 return NULL_RTX;
3904 }
3905
3906 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3907 if we failed the caller should emit a normal call, otherwise try to get
3908 the result in TARGET, if convenient. */
3909
3910 static rtx
3911 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3912 {
3913 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3914 return NULL_RTX;
3915
3916 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3917 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3918 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3919 {
3920 rtx arg1_rtx, arg2_rtx;
3921 rtx result, insn = NULL_RTX;
3922 tree fndecl, fn;
3923 tree arg1 = CALL_EXPR_ARG (exp, 0);
3924 tree arg2 = CALL_EXPR_ARG (exp, 1);
3925
3926 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3927 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3928
3929 /* If we don't have POINTER_TYPE, call the function. */
3930 if (arg1_align == 0 || arg2_align == 0)
3931 return NULL_RTX;
3932
3933 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3934 arg1 = builtin_save_expr (arg1);
3935 arg2 = builtin_save_expr (arg2);
3936
3937 arg1_rtx = get_memory_rtx (arg1, NULL);
3938 arg2_rtx = get_memory_rtx (arg2, NULL);
3939
3940 #ifdef HAVE_cmpstrsi
3941 /* Try to call cmpstrsi. */
3942 if (HAVE_cmpstrsi)
3943 {
3944 enum machine_mode insn_mode
3945 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3946
3947 /* Make a place to write the result of the instruction. */
3948 result = target;
3949 if (! (result != 0
3950 && REG_P (result) && GET_MODE (result) == insn_mode
3951 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3952 result = gen_reg_rtx (insn_mode);
3953
3954 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3955 GEN_INT (MIN (arg1_align, arg2_align)));
3956 }
3957 #endif
3958 #ifdef HAVE_cmpstrnsi
3959 /* Try to determine at least one length and call cmpstrnsi. */
3960 if (!insn && HAVE_cmpstrnsi)
3961 {
3962 tree len;
3963 rtx arg3_rtx;
3964
3965 enum machine_mode insn_mode
3966 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3967 tree len1 = c_strlen (arg1, 1);
3968 tree len2 = c_strlen (arg2, 1);
3969
3970 if (len1)
3971 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3972 if (len2)
3973 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3974
3975 /* If we don't have a constant length for the first, use the length
3976 of the second, if we know it. We don't require a constant for
3977 this case; some cost analysis could be done if both are available
3978 but neither is constant. For now, assume they're equally cheap,
3979 unless one has side effects. If both strings have constant lengths,
3980 use the smaller. */
3981
3982 if (!len1)
3983 len = len2;
3984 else if (!len2)
3985 len = len1;
3986 else if (TREE_SIDE_EFFECTS (len1))
3987 len = len2;
3988 else if (TREE_SIDE_EFFECTS (len2))
3989 len = len1;
3990 else if (TREE_CODE (len1) != INTEGER_CST)
3991 len = len2;
3992 else if (TREE_CODE (len2) != INTEGER_CST)
3993 len = len1;
3994 else if (tree_int_cst_lt (len1, len2))
3995 len = len1;
3996 else
3997 len = len2;
3998
3999 /* If both arguments have side effects, we cannot optimize. */
4000 if (!len || TREE_SIDE_EFFECTS (len))
4001 goto do_libcall;
4002
4003 arg3_rtx = expand_normal (len);
4004
4005 /* Make a place to write the result of the instruction. */
4006 result = target;
4007 if (! (result != 0
4008 && REG_P (result) && GET_MODE (result) == insn_mode
4009 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4010 result = gen_reg_rtx (insn_mode);
4011
4012 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4013 GEN_INT (MIN (arg1_align, arg2_align)));
4014 }
4015 #endif
4016
4017 if (insn)
4018 {
4019 enum machine_mode mode;
4020 emit_insn (insn);
4021
4022 /* Return the value in the proper mode for this function. */
4023 mode = TYPE_MODE (TREE_TYPE (exp));
4024 if (GET_MODE (result) == mode)
4025 return result;
4026 if (target == 0)
4027 return convert_to_mode (mode, result, 0);
4028 convert_move (target, result, 0);
4029 return target;
4030 }
4031
4032 /* Expand the library call ourselves using a stabilized argument
4033 list to avoid re-evaluating the function's arguments twice. */
4034 #ifdef HAVE_cmpstrnsi
4035 do_libcall:
4036 #endif
4037 fndecl = get_callee_fndecl (exp);
4038 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4041 return expand_call (fn, target, target == const0_rtx);
4042 }
4043 #endif
4044 return NULL_RTX;
4045 }
4046
4047 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4048 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4049 the result in TARGET, if convenient. */
4050
4051 static rtx
4052 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4053 ATTRIBUTE_UNUSED enum machine_mode mode)
4054 {
4055 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4056
4057 if (!validate_arglist (exp,
4058 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4059 return NULL_RTX;
4060
4061 /* If c_strlen can determine an expression for one of the string
4062 lengths, and it doesn't have side effects, then emit cmpstrnsi
4063 using length MIN(strlen(string)+1, arg3). */
4064 #ifdef HAVE_cmpstrnsi
4065 if (HAVE_cmpstrnsi)
4066 {
4067 tree len, len1, len2;
4068 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4069 rtx result, insn;
4070 tree fndecl, fn;
4071 tree arg1 = CALL_EXPR_ARG (exp, 0);
4072 tree arg2 = CALL_EXPR_ARG (exp, 1);
4073 tree arg3 = CALL_EXPR_ARG (exp, 2);
4074
4075 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4076 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4077 enum machine_mode insn_mode
4078 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4079
4080 len1 = c_strlen (arg1, 1);
4081 len2 = c_strlen (arg2, 1);
4082
4083 if (len1)
4084 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4085 if (len2)
4086 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4087
4088 /* If we don't have a constant length for the first, use the length
4089 of the second, if we know it. We don't require a constant for
4090 this case; some cost analysis could be done if both are available
4091 but neither is constant. For now, assume they're equally cheap,
4092 unless one has side effects. If both strings have constant lengths,
4093 use the smaller. */
4094
4095 if (!len1)
4096 len = len2;
4097 else if (!len2)
4098 len = len1;
4099 else if (TREE_SIDE_EFFECTS (len1))
4100 len = len2;
4101 else if (TREE_SIDE_EFFECTS (len2))
4102 len = len1;
4103 else if (TREE_CODE (len1) != INTEGER_CST)
4104 len = len2;
4105 else if (TREE_CODE (len2) != INTEGER_CST)
4106 len = len1;
4107 else if (tree_int_cst_lt (len1, len2))
4108 len = len1;
4109 else
4110 len = len2;
4111
4112 /* If both arguments have side effects, we cannot optimize. */
4113 if (!len || TREE_SIDE_EFFECTS (len))
4114 return NULL_RTX;
4115
4116 /* The actual new length parameter is MIN(len,arg3). */
4117 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4118 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4119
4120 /* If we don't have POINTER_TYPE, call the function. */
4121 if (arg1_align == 0 || arg2_align == 0)
4122 return NULL_RTX;
4123
4124 /* Make a place to write the result of the instruction. */
4125 result = target;
4126 if (! (result != 0
4127 && REG_P (result) && GET_MODE (result) == insn_mode
4128 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4129 result = gen_reg_rtx (insn_mode);
4130
4131 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4132 arg1 = builtin_save_expr (arg1);
4133 arg2 = builtin_save_expr (arg2);
4134 len = builtin_save_expr (len);
4135
4136 arg1_rtx = get_memory_rtx (arg1, len);
4137 arg2_rtx = get_memory_rtx (arg2, len);
4138 arg3_rtx = expand_normal (len);
4139 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4140 GEN_INT (MIN (arg1_align, arg2_align)));
4141 if (insn)
4142 {
4143 emit_insn (insn);
4144
4145 /* Return the value in the proper mode for this function. */
4146 mode = TYPE_MODE (TREE_TYPE (exp));
4147 if (GET_MODE (result) == mode)
4148 return result;
4149 if (target == 0)
4150 return convert_to_mode (mode, result, 0);
4151 convert_move (target, result, 0);
4152 return target;
4153 }
4154
4155 /* Expand the library call ourselves using a stabilized argument
4156 list to avoid re-evaluating the function's arguments twice. */
4157 fndecl = get_callee_fndecl (exp);
4158 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4159 arg1, arg2, len);
4160 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4161 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4162 return expand_call (fn, target, target == const0_rtx);
4163 }
4164 #endif
4165 return NULL_RTX;
4166 }
4167
4168 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4169 if that's convenient. */
4170
4171 rtx
4172 expand_builtin_saveregs (void)
4173 {
4174 rtx val, seq;
4175
4176 /* Don't do __builtin_saveregs more than once in a function.
4177 Save the result of the first call and reuse it. */
4178 if (saveregs_value != 0)
4179 return saveregs_value;
4180
4181 /* When this function is called, it means that registers must be
4182 saved on entry to this function. So we migrate the call to the
4183 first insn of this function. */
4184
4185 start_sequence ();
4186
4187 /* Do whatever the machine needs done in this case. */
4188 val = targetm.calls.expand_builtin_saveregs ();
4189
4190 seq = get_insns ();
4191 end_sequence ();
4192
4193 saveregs_value = val;
4194
4195 /* Put the insns after the NOTE that starts the function. If this
4196 is inside a start_sequence, make the outer-level insn chain current, so
4197 the code is placed at the start of the function. */
4198 push_topmost_sequence ();
4199 emit_insn_after (seq, entry_of_function ());
4200 pop_topmost_sequence ();
4201
4202 return val;
4203 }
4204
4205 /* Expand a call to __builtin_next_arg. */
4206
4207 static rtx
4208 expand_builtin_next_arg (void)
4209 {
4210 /* Checking arguments is already done in fold_builtin_next_arg
4211 that must be called before this function. */
4212 return expand_binop (ptr_mode, add_optab,
4213 crtl->args.internal_arg_pointer,
4214 crtl->args.arg_offset_rtx,
4215 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4216 }
4217
4218 /* Make it easier for the backends by protecting the valist argument
4219 from multiple evaluations. */
4220
4221 static tree
4222 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4223 {
4224 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4225
4226 /* The current way of determining the type of valist is completely
4227 bogus. We should have the information on the va builtin instead. */
4228 if (!vatype)
4229 vatype = targetm.fn_abi_va_list (cfun->decl);
4230
4231 if (TREE_CODE (vatype) == ARRAY_TYPE)
4232 {
4233 if (TREE_SIDE_EFFECTS (valist))
4234 valist = save_expr (valist);
4235
4236 /* For this case, the backends will be expecting a pointer to
4237 vatype, but it's possible we've actually been given an array
4238 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4239 So fix it. */
4240 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4241 {
4242 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4243 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4244 }
4245 }
4246 else
4247 {
4248 tree pt = build_pointer_type (vatype);
4249
4250 if (! needs_lvalue)
4251 {
4252 if (! TREE_SIDE_EFFECTS (valist))
4253 return valist;
4254
4255 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4256 TREE_SIDE_EFFECTS (valist) = 1;
4257 }
4258
4259 if (TREE_SIDE_EFFECTS (valist))
4260 valist = save_expr (valist);
4261 valist = fold_build2_loc (loc, MEM_REF,
4262 vatype, valist, build_int_cst (pt, 0));
4263 }
4264
4265 return valist;
4266 }
4267
4268 /* The "standard" definition of va_list is void*. */
4269
4270 tree
4271 std_build_builtin_va_list (void)
4272 {
4273 return ptr_type_node;
4274 }
4275
4276 /* The "standard" abi va_list is va_list_type_node. */
4277
4278 tree
4279 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4280 {
4281 return va_list_type_node;
4282 }
4283
4284 /* The "standard" type of va_list is va_list_type_node. */
4285
4286 tree
4287 std_canonical_va_list_type (tree type)
4288 {
4289 tree wtype, htype;
4290
4291 if (INDIRECT_REF_P (type))
4292 type = TREE_TYPE (type);
4293 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4294 type = TREE_TYPE (type);
4295 wtype = va_list_type_node;
4296 htype = type;
4297 /* Treat structure va_list types. */
4298 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4299 htype = TREE_TYPE (htype);
4300 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4301 {
4302 /* If va_list is an array type, the argument may have decayed
4303 to a pointer type, e.g. by being passed to another function.
4304 In that case, unwrap both types so that we can compare the
4305 underlying records. */
4306 if (TREE_CODE (htype) == ARRAY_TYPE
4307 || POINTER_TYPE_P (htype))
4308 {
4309 wtype = TREE_TYPE (wtype);
4310 htype = TREE_TYPE (htype);
4311 }
4312 }
4313 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4314 return va_list_type_node;
4315
4316 return NULL_TREE;
4317 }
4318
4319 /* The "standard" implementation of va_start: just assign `nextarg' to
4320 the variable. */
4321
4322 void
4323 std_expand_builtin_va_start (tree valist, rtx nextarg)
4324 {
4325 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4326 convert_move (va_r, nextarg, 0);
4327 }
4328
4329 /* Expand EXP, a call to __builtin_va_start. */
4330
4331 static rtx
4332 expand_builtin_va_start (tree exp)
4333 {
4334 rtx nextarg;
4335 tree valist;
4336 location_t loc = EXPR_LOCATION (exp);
4337
4338 if (call_expr_nargs (exp) < 2)
4339 {
4340 error_at (loc, "too few arguments to function %<va_start%>");
4341 return const0_rtx;
4342 }
4343
4344 if (fold_builtin_next_arg (exp, true))
4345 return const0_rtx;
4346
4347 nextarg = expand_builtin_next_arg ();
4348 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4349
4350 if (targetm.expand_builtin_va_start)
4351 targetm.expand_builtin_va_start (valist, nextarg);
4352 else
4353 std_expand_builtin_va_start (valist, nextarg);
4354
4355 return const0_rtx;
4356 }
4357
4358 /* Expand EXP, a call to __builtin_va_end. */
4359
4360 static rtx
4361 expand_builtin_va_end (tree exp)
4362 {
4363 tree valist = CALL_EXPR_ARG (exp, 0);
4364
4365 /* Evaluate for side effects, if needed. I hate macros that don't
4366 do that. */
4367 if (TREE_SIDE_EFFECTS (valist))
4368 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4369
4370 return const0_rtx;
4371 }
4372
4373 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4374 builtin rather than just as an assignment in stdarg.h because of the
4375 nastiness of array-type va_list types. */
4376
4377 static rtx
4378 expand_builtin_va_copy (tree exp)
4379 {
4380 tree dst, src, t;
4381 location_t loc = EXPR_LOCATION (exp);
4382
4383 dst = CALL_EXPR_ARG (exp, 0);
4384 src = CALL_EXPR_ARG (exp, 1);
4385
4386 dst = stabilize_va_list_loc (loc, dst, 1);
4387 src = stabilize_va_list_loc (loc, src, 0);
4388
4389 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4390
4391 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4392 {
4393 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4396 }
4397 else
4398 {
4399 rtx dstb, srcb, size;
4400
4401 /* Evaluate to pointers. */
4402 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4404 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4405 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4406
4407 dstb = convert_memory_address (Pmode, dstb);
4408 srcb = convert_memory_address (Pmode, srcb);
4409
4410 /* "Dereference" to BLKmode memories. */
4411 dstb = gen_rtx_MEM (BLKmode, dstb);
4412 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4413 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4414 srcb = gen_rtx_MEM (BLKmode, srcb);
4415 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4416 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4417
4418 /* Copy. */
4419 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4420 }
4421
4422 return const0_rtx;
4423 }
4424
4425 /* Expand a call to one of the builtin functions __builtin_frame_address or
4426 __builtin_return_address. */
4427
4428 static rtx
4429 expand_builtin_frame_address (tree fndecl, tree exp)
4430 {
4431 /* The argument must be a nonnegative integer constant.
4432 It counts the number of frames to scan up the stack.
4433 The value is the return address saved in that frame. */
4434 if (call_expr_nargs (exp) == 0)
4435 /* Warning about missing arg was already issued. */
4436 return const0_rtx;
4437 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4438 {
4439 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4440 error ("invalid argument to %<__builtin_frame_address%>");
4441 else
4442 error ("invalid argument to %<__builtin_return_address%>");
4443 return const0_rtx;
4444 }
4445 else
4446 {
4447 rtx tem
4448 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4449 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4450
4451 /* Some ports cannot access arbitrary stack frames. */
4452 if (tem == NULL)
4453 {
4454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4455 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4456 else
4457 warning (0, "unsupported argument to %<__builtin_return_address%>");
4458 return const0_rtx;
4459 }
4460
4461 /* For __builtin_frame_address, return what we've got. */
4462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4463 return tem;
4464
4465 if (!REG_P (tem)
4466 && ! CONSTANT_P (tem))
4467 tem = copy_addr_to_reg (tem);
4468 return tem;
4469 }
4470 }
4471
4472 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4473 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4474 is the same as for allocate_dynamic_stack_space. */
4475
4476 static rtx
4477 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4478 {
4479 rtx op0;
4480 rtx result;
4481 bool valid_arglist;
4482 unsigned int align;
4483 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4484 == BUILT_IN_ALLOCA_WITH_ALIGN);
4485
4486 valid_arglist
4487 = (alloca_with_align
4488 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4489 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4490
4491 if (!valid_arglist)
4492 return NULL_RTX;
4493
4494 /* Compute the argument. */
4495 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4496
4497 /* Compute the alignment. */
4498 align = (alloca_with_align
4499 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4500 : BIGGEST_ALIGNMENT);
4501
4502 /* Allocate the desired space. */
4503 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4504 result = convert_memory_address (ptr_mode, result);
4505
4506 return result;
4507 }
4508
4509 /* Expand a call to bswap builtin in EXP.
4510 Return NULL_RTX if a normal call should be emitted rather than expanding the
4511 function in-line. If convenient, the result should be placed in TARGET.
4512 SUBTARGET may be used as the target for computing one of EXP's operands. */
4513
4514 static rtx
4515 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4516 rtx subtarget)
4517 {
4518 tree arg;
4519 rtx op0;
4520
4521 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4522 return NULL_RTX;
4523
4524 arg = CALL_EXPR_ARG (exp, 0);
4525 op0 = expand_expr (arg,
4526 subtarget && GET_MODE (subtarget) == target_mode
4527 ? subtarget : NULL_RTX,
4528 target_mode, EXPAND_NORMAL);
4529 if (GET_MODE (op0) != target_mode)
4530 op0 = convert_to_mode (target_mode, op0, 1);
4531
4532 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4533
4534 gcc_assert (target);
4535
4536 return convert_to_mode (target_mode, target, 1);
4537 }
4538
4539 /* Expand a call to a unary builtin in EXP.
4540 Return NULL_RTX if a normal call should be emitted rather than expanding the
4541 function in-line. If convenient, the result should be placed in TARGET.
4542 SUBTARGET may be used as the target for computing one of EXP's operands. */
4543
4544 static rtx
4545 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4546 rtx subtarget, optab op_optab)
4547 {
4548 rtx op0;
4549
4550 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4551 return NULL_RTX;
4552
4553 /* Compute the argument. */
4554 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4555 (subtarget
4556 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4557 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4558 VOIDmode, EXPAND_NORMAL);
4559 /* Compute op, into TARGET if possible.
4560 Set TARGET to wherever the result comes back. */
4561 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4562 op_optab, op0, target, op_optab != clrsb_optab);
4563 gcc_assert (target);
4564
4565 return convert_to_mode (target_mode, target, 0);
4566 }
4567
4568 /* Expand a call to __builtin_expect. We just return our argument
4569 as the builtin_expect semantic should've been already executed by
4570 tree branch prediction pass. */
4571
4572 static rtx
4573 expand_builtin_expect (tree exp, rtx target)
4574 {
4575 tree arg;
4576
4577 if (call_expr_nargs (exp) < 2)
4578 return const0_rtx;
4579 arg = CALL_EXPR_ARG (exp, 0);
4580
4581 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4582 /* When guessing was done, the hints should be already stripped away. */
4583 gcc_assert (!flag_guess_branch_prob
4584 || optimize == 0 || seen_error ());
4585 return target;
4586 }
4587
4588 /* Expand a call to __builtin_assume_aligned. We just return our first
4589 argument as the builtin_assume_aligned semantic should've been already
4590 executed by CCP. */
4591
4592 static rtx
4593 expand_builtin_assume_aligned (tree exp, rtx target)
4594 {
4595 if (call_expr_nargs (exp) < 2)
4596 return const0_rtx;
4597 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4598 EXPAND_NORMAL);
4599 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4600 && (call_expr_nargs (exp) < 3
4601 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4602 return target;
4603 }
4604
4605 void
4606 expand_builtin_trap (void)
4607 {
4608 #ifdef HAVE_trap
4609 if (HAVE_trap)
4610 {
4611 rtx insn = emit_insn (gen_trap ());
4612 /* For trap insns when not accumulating outgoing args force
4613 REG_ARGS_SIZE note to prevent crossjumping of calls with
4614 different args sizes. */
4615 if (!ACCUMULATE_OUTGOING_ARGS)
4616 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4617 }
4618 else
4619 #endif
4620 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4621 emit_barrier ();
4622 }
4623
4624 /* Expand a call to __builtin_unreachable. We do nothing except emit
4625 a barrier saying that control flow will not pass here.
4626
4627 It is the responsibility of the program being compiled to ensure
4628 that control flow does never reach __builtin_unreachable. */
4629 static void
4630 expand_builtin_unreachable (void)
4631 {
4632 emit_barrier ();
4633 }
4634
4635 /* Expand EXP, a call to fabs, fabsf or fabsl.
4636 Return NULL_RTX if a normal call should be emitted rather than expanding
4637 the function inline. If convenient, the result should be placed
4638 in TARGET. SUBTARGET may be used as the target for computing
4639 the operand. */
4640
4641 static rtx
4642 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4643 {
4644 enum machine_mode mode;
4645 tree arg;
4646 rtx op0;
4647
4648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4649 return NULL_RTX;
4650
4651 arg = CALL_EXPR_ARG (exp, 0);
4652 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4653 mode = TYPE_MODE (TREE_TYPE (arg));
4654 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4655 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4656 }
4657
4658 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4659 Return NULL is a normal call should be emitted rather than expanding the
4660 function inline. If convenient, the result should be placed in TARGET.
4661 SUBTARGET may be used as the target for computing the operand. */
4662
4663 static rtx
4664 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4665 {
4666 rtx op0, op1;
4667 tree arg;
4668
4669 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4671
4672 arg = CALL_EXPR_ARG (exp, 0);
4673 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4674
4675 arg = CALL_EXPR_ARG (exp, 1);
4676 op1 = expand_normal (arg);
4677
4678 return expand_copysign (op0, op1, target);
4679 }
4680
4681 /* Create a new constant string literal and return a char* pointer to it.
4682 The STRING_CST value is the LEN characters at STR. */
4683 tree
4684 build_string_literal (int len, const char *str)
4685 {
4686 tree t, elem, index, type;
4687
4688 t = build_string (len, str);
4689 elem = build_type_variant (char_type_node, 1, 0);
4690 index = build_index_type (size_int (len - 1));
4691 type = build_array_type (elem, index);
4692 TREE_TYPE (t) = type;
4693 TREE_CONSTANT (t) = 1;
4694 TREE_READONLY (t) = 1;
4695 TREE_STATIC (t) = 1;
4696
4697 type = build_pointer_type (elem);
4698 t = build1 (ADDR_EXPR, type,
4699 build4 (ARRAY_REF, elem,
4700 t, integer_zero_node, NULL_TREE, NULL_TREE));
4701 return t;
4702 }
4703
4704 /* Expand a call to __builtin___clear_cache. */
4705
4706 static rtx
4707 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4708 {
4709 #ifndef HAVE_clear_cache
4710 #ifdef CLEAR_INSN_CACHE
4711 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4712 does something. Just do the default expansion to a call to
4713 __clear_cache(). */
4714 return NULL_RTX;
4715 #else
4716 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4717 does nothing. There is no need to call it. Do nothing. */
4718 return const0_rtx;
4719 #endif /* CLEAR_INSN_CACHE */
4720 #else
4721 /* We have a "clear_cache" insn, and it will handle everything. */
4722 tree begin, end;
4723 rtx begin_rtx, end_rtx;
4724
4725 /* We must not expand to a library call. If we did, any
4726 fallback library function in libgcc that might contain a call to
4727 __builtin___clear_cache() would recurse infinitely. */
4728 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4729 {
4730 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4731 return const0_rtx;
4732 }
4733
4734 if (HAVE_clear_cache)
4735 {
4736 struct expand_operand ops[2];
4737
4738 begin = CALL_EXPR_ARG (exp, 0);
4739 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4740
4741 end = CALL_EXPR_ARG (exp, 1);
4742 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4743
4744 create_address_operand (&ops[0], begin_rtx);
4745 create_address_operand (&ops[1], end_rtx);
4746 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4747 return const0_rtx;
4748 }
4749 return const0_rtx;
4750 #endif /* HAVE_clear_cache */
4751 }
4752
4753 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4754
4755 static rtx
4756 round_trampoline_addr (rtx tramp)
4757 {
4758 rtx temp, addend, mask;
4759
4760 /* If we don't need too much alignment, we'll have been guaranteed
4761 proper alignment by get_trampoline_type. */
4762 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4763 return tramp;
4764
4765 /* Round address up to desired boundary. */
4766 temp = gen_reg_rtx (Pmode);
4767 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4768 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4769
4770 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4771 temp, 0, OPTAB_LIB_WIDEN);
4772 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4773 temp, 0, OPTAB_LIB_WIDEN);
4774
4775 return tramp;
4776 }
4777
4778 static rtx
4779 expand_builtin_init_trampoline (tree exp, bool onstack)
4780 {
4781 tree t_tramp, t_func, t_chain;
4782 rtx m_tramp, r_tramp, r_chain, tmp;
4783
4784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4785 POINTER_TYPE, VOID_TYPE))
4786 return NULL_RTX;
4787
4788 t_tramp = CALL_EXPR_ARG (exp, 0);
4789 t_func = CALL_EXPR_ARG (exp, 1);
4790 t_chain = CALL_EXPR_ARG (exp, 2);
4791
4792 r_tramp = expand_normal (t_tramp);
4793 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4794 MEM_NOTRAP_P (m_tramp) = 1;
4795
4796 /* If ONSTACK, the TRAMP argument should be the address of a field
4797 within the local function's FRAME decl. Either way, let's see if
4798 we can fill in the MEM_ATTRs for this memory. */
4799 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4800 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4801
4802 /* Creator of a heap trampoline is responsible for making sure the
4803 address is aligned to at least STACK_BOUNDARY. Normally malloc
4804 will ensure this anyhow. */
4805 tmp = round_trampoline_addr (r_tramp);
4806 if (tmp != r_tramp)
4807 {
4808 m_tramp = change_address (m_tramp, BLKmode, tmp);
4809 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4810 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4811 }
4812
4813 /* The FUNC argument should be the address of the nested function.
4814 Extract the actual function decl to pass to the hook. */
4815 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4816 t_func = TREE_OPERAND (t_func, 0);
4817 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4818
4819 r_chain = expand_normal (t_chain);
4820
4821 /* Generate insns to initialize the trampoline. */
4822 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4823
4824 if (onstack)
4825 {
4826 trampolines_created = 1;
4827
4828 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4829 "trampoline generated for nested function %qD", t_func);
4830 }
4831
4832 return const0_rtx;
4833 }
4834
4835 static rtx
4836 expand_builtin_adjust_trampoline (tree exp)
4837 {
4838 rtx tramp;
4839
4840 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4842
4843 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4844 tramp = round_trampoline_addr (tramp);
4845 if (targetm.calls.trampoline_adjust_address)
4846 tramp = targetm.calls.trampoline_adjust_address (tramp);
4847
4848 return tramp;
4849 }
4850
4851 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4852 function. The function first checks whether the back end provides
4853 an insn to implement signbit for the respective mode. If not, it
4854 checks whether the floating point format of the value is such that
4855 the sign bit can be extracted. If that is not the case, the
4856 function returns NULL_RTX to indicate that a normal call should be
4857 emitted rather than expanding the function in-line. EXP is the
4858 expression that is a call to the builtin function; if convenient,
4859 the result should be placed in TARGET. */
4860 static rtx
4861 expand_builtin_signbit (tree exp, rtx target)
4862 {
4863 const struct real_format *fmt;
4864 enum machine_mode fmode, imode, rmode;
4865 tree arg;
4866 int word, bitpos;
4867 enum insn_code icode;
4868 rtx temp;
4869 location_t loc = EXPR_LOCATION (exp);
4870
4871 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4872 return NULL_RTX;
4873
4874 arg = CALL_EXPR_ARG (exp, 0);
4875 fmode = TYPE_MODE (TREE_TYPE (arg));
4876 rmode = TYPE_MODE (TREE_TYPE (exp));
4877 fmt = REAL_MODE_FORMAT (fmode);
4878
4879 arg = builtin_save_expr (arg);
4880
4881 /* Expand the argument yielding a RTX expression. */
4882 temp = expand_normal (arg);
4883
4884 /* Check if the back end provides an insn that handles signbit for the
4885 argument's mode. */
4886 icode = optab_handler (signbit_optab, fmode);
4887 if (icode != CODE_FOR_nothing)
4888 {
4889 rtx last = get_last_insn ();
4890 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4891 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4892 return target;
4893 delete_insns_since (last);
4894 }
4895
4896 /* For floating point formats without a sign bit, implement signbit
4897 as "ARG < 0.0". */
4898 bitpos = fmt->signbit_ro;
4899 if (bitpos < 0)
4900 {
4901 /* But we can't do this if the format supports signed zero. */
4902 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4903 return NULL_RTX;
4904
4905 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4906 build_real (TREE_TYPE (arg), dconst0));
4907 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4908 }
4909
4910 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4911 {
4912 imode = int_mode_for_mode (fmode);
4913 if (imode == BLKmode)
4914 return NULL_RTX;
4915 temp = gen_lowpart (imode, temp);
4916 }
4917 else
4918 {
4919 imode = word_mode;
4920 /* Handle targets with different FP word orders. */
4921 if (FLOAT_WORDS_BIG_ENDIAN)
4922 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4923 else
4924 word = bitpos / BITS_PER_WORD;
4925 temp = operand_subword_force (temp, word, fmode);
4926 bitpos = bitpos % BITS_PER_WORD;
4927 }
4928
4929 /* Force the intermediate word_mode (or narrower) result into a
4930 register. This avoids attempting to create paradoxical SUBREGs
4931 of floating point modes below. */
4932 temp = force_reg (imode, temp);
4933
4934 /* If the bitpos is within the "result mode" lowpart, the operation
4935 can be implement with a single bitwise AND. Otherwise, we need
4936 a right shift and an AND. */
4937
4938 if (bitpos < GET_MODE_BITSIZE (rmode))
4939 {
4940 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4941
4942 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4943 temp = gen_lowpart (rmode, temp);
4944 temp = expand_binop (rmode, and_optab, temp,
4945 immed_wide_int_const (mask, rmode),
4946 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4947 }
4948 else
4949 {
4950 /* Perform a logical right shift to place the signbit in the least
4951 significant bit, then truncate the result to the desired mode
4952 and mask just this bit. */
4953 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4954 temp = gen_lowpart (rmode, temp);
4955 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4956 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4957 }
4958
4959 return temp;
4960 }
4961
4962 /* Expand fork or exec calls. TARGET is the desired target of the
4963 call. EXP is the call. FN is the
4964 identificator of the actual function. IGNORE is nonzero if the
4965 value is to be ignored. */
4966
4967 static rtx
4968 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4969 {
4970 tree id, decl;
4971 tree call;
4972
4973 /* If we are not profiling, just call the function. */
4974 if (!profile_arc_flag)
4975 return NULL_RTX;
4976
4977 /* Otherwise call the wrapper. This should be equivalent for the rest of
4978 compiler, so the code does not diverge, and the wrapper may run the
4979 code necessary for keeping the profiling sane. */
4980
4981 switch (DECL_FUNCTION_CODE (fn))
4982 {
4983 case BUILT_IN_FORK:
4984 id = get_identifier ("__gcov_fork");
4985 break;
4986
4987 case BUILT_IN_EXECL:
4988 id = get_identifier ("__gcov_execl");
4989 break;
4990
4991 case BUILT_IN_EXECV:
4992 id = get_identifier ("__gcov_execv");
4993 break;
4994
4995 case BUILT_IN_EXECLP:
4996 id = get_identifier ("__gcov_execlp");
4997 break;
4998
4999 case BUILT_IN_EXECLE:
5000 id = get_identifier ("__gcov_execle");
5001 break;
5002
5003 case BUILT_IN_EXECVP:
5004 id = get_identifier ("__gcov_execvp");
5005 break;
5006
5007 case BUILT_IN_EXECVE:
5008 id = get_identifier ("__gcov_execve");
5009 break;
5010
5011 default:
5012 gcc_unreachable ();
5013 }
5014
5015 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5016 FUNCTION_DECL, id, TREE_TYPE (fn));
5017 DECL_EXTERNAL (decl) = 1;
5018 TREE_PUBLIC (decl) = 1;
5019 DECL_ARTIFICIAL (decl) = 1;
5020 TREE_NOTHROW (decl) = 1;
5021 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5022 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5023 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5024 return expand_call (call, target, ignore);
5025 }
5026
5027
5028 \f
5029 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5030 the pointer in these functions is void*, the tree optimizers may remove
5031 casts. The mode computed in expand_builtin isn't reliable either, due
5032 to __sync_bool_compare_and_swap.
5033
5034 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5035 group of builtins. This gives us log2 of the mode size. */
5036
5037 static inline enum machine_mode
5038 get_builtin_sync_mode (int fcode_diff)
5039 {
5040 /* The size is not negotiable, so ask not to get BLKmode in return
5041 if the target indicates that a smaller size would be better. */
5042 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5043 }
5044
5045 /* Expand the memory expression LOC and return the appropriate memory operand
5046 for the builtin_sync operations. */
5047
5048 static rtx
5049 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5050 {
5051 rtx addr, mem;
5052
5053 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5054 addr = convert_memory_address (Pmode, addr);
5055
5056 /* Note that we explicitly do not want any alias information for this
5057 memory, so that we kill all other live memories. Otherwise we don't
5058 satisfy the full barrier semantics of the intrinsic. */
5059 mem = validize_mem (gen_rtx_MEM (mode, addr));
5060
5061 /* The alignment needs to be at least according to that of the mode. */
5062 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5063 get_pointer_alignment (loc)));
5064 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5065 MEM_VOLATILE_P (mem) = 1;
5066
5067 return mem;
5068 }
5069
5070 /* Make sure an argument is in the right mode.
5071 EXP is the tree argument.
5072 MODE is the mode it should be in. */
5073
5074 static rtx
5075 expand_expr_force_mode (tree exp, enum machine_mode mode)
5076 {
5077 rtx val;
5078 enum machine_mode old_mode;
5079
5080 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5081 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5082 of CONST_INTs, where we know the old_mode only from the call argument. */
5083
5084 old_mode = GET_MODE (val);
5085 if (old_mode == VOIDmode)
5086 old_mode = TYPE_MODE (TREE_TYPE (exp));
5087 val = convert_modes (mode, old_mode, val, 1);
5088 return val;
5089 }
5090
5091
5092 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5093 EXP is the CALL_EXPR. CODE is the rtx code
5094 that corresponds to the arithmetic or logical operation from the name;
5095 an exception here is that NOT actually means NAND. TARGET is an optional
5096 place for us to store the results; AFTER is true if this is the
5097 fetch_and_xxx form. */
5098
5099 static rtx
5100 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5101 enum rtx_code code, bool after,
5102 rtx target)
5103 {
5104 rtx val, mem;
5105 location_t loc = EXPR_LOCATION (exp);
5106
5107 if (code == NOT && warn_sync_nand)
5108 {
5109 tree fndecl = get_callee_fndecl (exp);
5110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5111
5112 static bool warned_f_a_n, warned_n_a_f;
5113
5114 switch (fcode)
5115 {
5116 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5117 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5118 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5119 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5120 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5121 if (warned_f_a_n)
5122 break;
5123
5124 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5125 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5126 warned_f_a_n = true;
5127 break;
5128
5129 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5130 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5131 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5132 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5133 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5134 if (warned_n_a_f)
5135 break;
5136
5137 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5138 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5139 warned_n_a_f = true;
5140 break;
5141
5142 default:
5143 gcc_unreachable ();
5144 }
5145 }
5146
5147 /* Expand the operands. */
5148 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5149 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5150
5151 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5152 after);
5153 }
5154
5155 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5156 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5157 true if this is the boolean form. TARGET is a place for us to store the
5158 results; this is NOT optional if IS_BOOL is true. */
5159
5160 static rtx
5161 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5162 bool is_bool, rtx target)
5163 {
5164 rtx old_val, new_val, mem;
5165 rtx *pbool, *poval;
5166
5167 /* Expand the operands. */
5168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5169 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5170 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5171
5172 pbool = poval = NULL;
5173 if (target != const0_rtx)
5174 {
5175 if (is_bool)
5176 pbool = &target;
5177 else
5178 poval = &target;
5179 }
5180 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5181 false, MEMMODEL_SEQ_CST,
5182 MEMMODEL_SEQ_CST))
5183 return NULL_RTX;
5184
5185 return target;
5186 }
5187
5188 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5189 general form is actually an atomic exchange, and some targets only
5190 support a reduced form with the second argument being a constant 1.
5191 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5192 the results. */
5193
5194 static rtx
5195 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5196 rtx target)
5197 {
5198 rtx val, mem;
5199
5200 /* Expand the operands. */
5201 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5202 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5203
5204 return expand_sync_lock_test_and_set (target, mem, val);
5205 }
5206
5207 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5208
5209 static void
5210 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5211 {
5212 rtx mem;
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216
5217 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5218 }
5219
5220 /* Given an integer representing an ``enum memmodel'', verify its
5221 correctness and return the memory model enum. */
5222
5223 static enum memmodel
5224 get_memmodel (tree exp)
5225 {
5226 rtx op;
5227 unsigned HOST_WIDE_INT val;
5228
5229 /* If the parameter is not a constant, it's a run time value so we'll just
5230 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5231 if (TREE_CODE (exp) != INTEGER_CST)
5232 return MEMMODEL_SEQ_CST;
5233
5234 op = expand_normal (exp);
5235
5236 val = INTVAL (op);
5237 if (targetm.memmodel_check)
5238 val = targetm.memmodel_check (val);
5239 else if (val & ~MEMMODEL_MASK)
5240 {
5241 warning (OPT_Winvalid_memory_model,
5242 "Unknown architecture specifier in memory model to builtin.");
5243 return MEMMODEL_SEQ_CST;
5244 }
5245
5246 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5247 {
5248 warning (OPT_Winvalid_memory_model,
5249 "invalid memory model argument to builtin");
5250 return MEMMODEL_SEQ_CST;
5251 }
5252
5253 return (enum memmodel) val;
5254 }
5255
5256 /* Expand the __atomic_exchange intrinsic:
5257 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5258 EXP is the CALL_EXPR.
5259 TARGET is an optional place for us to store the results. */
5260
5261 static rtx
5262 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5263 {
5264 rtx val, mem;
5265 enum memmodel model;
5266
5267 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5268 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5269 {
5270 error ("invalid memory model for %<__atomic_exchange%>");
5271 return NULL_RTX;
5272 }
5273
5274 if (!flag_inline_atomics)
5275 return NULL_RTX;
5276
5277 /* Expand the operands. */
5278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5279 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5280
5281 return expand_atomic_exchange (target, mem, val, model);
5282 }
5283
5284 /* Expand the __atomic_compare_exchange intrinsic:
5285 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5286 TYPE desired, BOOL weak,
5287 enum memmodel success,
5288 enum memmodel failure)
5289 EXP is the CALL_EXPR.
5290 TARGET is an optional place for us to store the results. */
5291
5292 static rtx
5293 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5294 rtx target)
5295 {
5296 rtx expect, desired, mem, oldval;
5297 enum memmodel success, failure;
5298 tree weak;
5299 bool is_weak;
5300
5301 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5302 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5303
5304 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5305 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5306 {
5307 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5308 return NULL_RTX;
5309 }
5310
5311 if (failure > success)
5312 {
5313 error ("failure memory model cannot be stronger than success "
5314 "memory model for %<__atomic_compare_exchange%>");
5315 return NULL_RTX;
5316 }
5317
5318 if (!flag_inline_atomics)
5319 return NULL_RTX;
5320
5321 /* Expand the operands. */
5322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5323
5324 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5325 expect = convert_memory_address (Pmode, expect);
5326 expect = gen_rtx_MEM (mode, expect);
5327 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5328
5329 weak = CALL_EXPR_ARG (exp, 3);
5330 is_weak = false;
5331 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5332 is_weak = true;
5333
5334 oldval = expect;
5335 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5336 &oldval, mem, oldval, desired,
5337 is_weak, success, failure))
5338 return NULL_RTX;
5339
5340 if (oldval != expect)
5341 emit_move_insn (expect, oldval);
5342
5343 return target;
5344 }
5345
5346 /* Expand the __atomic_load intrinsic:
5347 TYPE __atomic_load (TYPE *object, enum memmodel)
5348 EXP is the CALL_EXPR.
5349 TARGET is an optional place for us to store the results. */
5350
5351 static rtx
5352 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5353 {
5354 rtx mem;
5355 enum memmodel model;
5356
5357 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5358 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5359 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5360 {
5361 error ("invalid memory model for %<__atomic_load%>");
5362 return NULL_RTX;
5363 }
5364
5365 if (!flag_inline_atomics)
5366 return NULL_RTX;
5367
5368 /* Expand the operand. */
5369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5370
5371 return expand_atomic_load (target, mem, model);
5372 }
5373
5374
5375 /* Expand the __atomic_store intrinsic:
5376 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5377 EXP is the CALL_EXPR.
5378 TARGET is an optional place for us to store the results. */
5379
5380 static rtx
5381 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5382 {
5383 rtx mem, val;
5384 enum memmodel model;
5385
5386 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5387 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5388 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5389 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5390 {
5391 error ("invalid memory model for %<__atomic_store%>");
5392 return NULL_RTX;
5393 }
5394
5395 if (!flag_inline_atomics)
5396 return NULL_RTX;
5397
5398 /* Expand the operands. */
5399 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5400 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5401
5402 return expand_atomic_store (mem, val, model, false);
5403 }
5404
5405 /* Expand the __atomic_fetch_XXX intrinsic:
5406 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results.
5409 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5410 FETCH_AFTER is true if returning the result of the operation.
5411 FETCH_AFTER is false if returning the value before the operation.
5412 IGNORE is true if the result is not used.
5413 EXT_CALL is the correct builtin for an external call if this cannot be
5414 resolved to an instruction sequence. */
5415
5416 static rtx
5417 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5418 enum rtx_code code, bool fetch_after,
5419 bool ignore, enum built_in_function ext_call)
5420 {
5421 rtx val, mem, ret;
5422 enum memmodel model;
5423 tree fndecl;
5424 tree addr;
5425
5426 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5427
5428 /* Expand the operands. */
5429 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5430 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5431
5432 /* Only try generating instructions if inlining is turned on. */
5433 if (flag_inline_atomics)
5434 {
5435 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5436 if (ret)
5437 return ret;
5438 }
5439
5440 /* Return if a different routine isn't needed for the library call. */
5441 if (ext_call == BUILT_IN_NONE)
5442 return NULL_RTX;
5443
5444 /* Change the call to the specified function. */
5445 fndecl = get_callee_fndecl (exp);
5446 addr = CALL_EXPR_FN (exp);
5447 STRIP_NOPS (addr);
5448
5449 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5450 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5451
5452 /* Expand the call here so we can emit trailing code. */
5453 ret = expand_call (exp, target, ignore);
5454
5455 /* Replace the original function just in case it matters. */
5456 TREE_OPERAND (addr, 0) = fndecl;
5457
5458 /* Then issue the arithmetic correction to return the right result. */
5459 if (!ignore)
5460 {
5461 if (code == NOT)
5462 {
5463 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5464 OPTAB_LIB_WIDEN);
5465 ret = expand_simple_unop (mode, NOT, ret, target, true);
5466 }
5467 else
5468 ret = expand_simple_binop (mode, code, ret, val, target, true,
5469 OPTAB_LIB_WIDEN);
5470 }
5471 return ret;
5472 }
5473
5474
5475 #ifndef HAVE_atomic_clear
5476 # define HAVE_atomic_clear 0
5477 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5478 #endif
5479
5480 /* Expand an atomic clear operation.
5481 void _atomic_clear (BOOL *obj, enum memmodel)
5482 EXP is the call expression. */
5483
5484 static rtx
5485 expand_builtin_atomic_clear (tree exp)
5486 {
5487 enum machine_mode mode;
5488 rtx mem, ret;
5489 enum memmodel model;
5490
5491 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5492 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5493 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5494
5495 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5496 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5497 {
5498 error ("invalid memory model for %<__atomic_store%>");
5499 return const0_rtx;
5500 }
5501
5502 if (HAVE_atomic_clear)
5503 {
5504 emit_insn (gen_atomic_clear (mem, model));
5505 return const0_rtx;
5506 }
5507
5508 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5509 Failing that, a store is issued by __atomic_store. The only way this can
5510 fail is if the bool type is larger than a word size. Unlikely, but
5511 handle it anyway for completeness. Assume a single threaded model since
5512 there is no atomic support in this case, and no barriers are required. */
5513 ret = expand_atomic_store (mem, const0_rtx, model, true);
5514 if (!ret)
5515 emit_move_insn (mem, const0_rtx);
5516 return const0_rtx;
5517 }
5518
5519 /* Expand an atomic test_and_set operation.
5520 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5521 EXP is the call expression. */
5522
5523 static rtx
5524 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5525 {
5526 rtx mem;
5527 enum memmodel model;
5528 enum machine_mode mode;
5529
5530 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5531 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5532 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5533
5534 return expand_atomic_test_and_set (target, mem, model);
5535 }
5536
5537
5538 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5539 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5540
5541 static tree
5542 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5543 {
5544 int size;
5545 enum machine_mode mode;
5546 unsigned int mode_align, type_align;
5547
5548 if (TREE_CODE (arg0) != INTEGER_CST)
5549 return NULL_TREE;
5550
5551 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5552 mode = mode_for_size (size, MODE_INT, 0);
5553 mode_align = GET_MODE_ALIGNMENT (mode);
5554
5555 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5556 type_align = mode_align;
5557 else
5558 {
5559 tree ttype = TREE_TYPE (arg1);
5560
5561 /* This function is usually invoked and folded immediately by the front
5562 end before anything else has a chance to look at it. The pointer
5563 parameter at this point is usually cast to a void *, so check for that
5564 and look past the cast. */
5565 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5566 && VOID_TYPE_P (TREE_TYPE (ttype)))
5567 arg1 = TREE_OPERAND (arg1, 0);
5568
5569 ttype = TREE_TYPE (arg1);
5570 gcc_assert (POINTER_TYPE_P (ttype));
5571
5572 /* Get the underlying type of the object. */
5573 ttype = TREE_TYPE (ttype);
5574 type_align = TYPE_ALIGN (ttype);
5575 }
5576
5577 /* If the object has smaller alignment, the the lock free routines cannot
5578 be used. */
5579 if (type_align < mode_align)
5580 return boolean_false_node;
5581
5582 /* Check if a compare_and_swap pattern exists for the mode which represents
5583 the required size. The pattern is not allowed to fail, so the existence
5584 of the pattern indicates support is present. */
5585 if (can_compare_and_swap_p (mode, true))
5586 return boolean_true_node;
5587 else
5588 return boolean_false_node;
5589 }
5590
5591 /* Return true if the parameters to call EXP represent an object which will
5592 always generate lock free instructions. The first argument represents the
5593 size of the object, and the second parameter is a pointer to the object
5594 itself. If NULL is passed for the object, then the result is based on
5595 typical alignment for an object of the specified size. Otherwise return
5596 false. */
5597
5598 static rtx
5599 expand_builtin_atomic_always_lock_free (tree exp)
5600 {
5601 tree size;
5602 tree arg0 = CALL_EXPR_ARG (exp, 0);
5603 tree arg1 = CALL_EXPR_ARG (exp, 1);
5604
5605 if (TREE_CODE (arg0) != INTEGER_CST)
5606 {
5607 error ("non-constant argument 1 to __atomic_always_lock_free");
5608 return const0_rtx;
5609 }
5610
5611 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5612 if (size == boolean_true_node)
5613 return const1_rtx;
5614 return const0_rtx;
5615 }
5616
5617 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5618 is lock free on this architecture. */
5619
5620 static tree
5621 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5622 {
5623 if (!flag_inline_atomics)
5624 return NULL_TREE;
5625
5626 /* If it isn't always lock free, don't generate a result. */
5627 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5628 return boolean_true_node;
5629
5630 return NULL_TREE;
5631 }
5632
5633 /* Return true if the parameters to call EXP represent an object which will
5634 always generate lock free instructions. The first argument represents the
5635 size of the object, and the second parameter is a pointer to the object
5636 itself. If NULL is passed for the object, then the result is based on
5637 typical alignment for an object of the specified size. Otherwise return
5638 NULL*/
5639
5640 static rtx
5641 expand_builtin_atomic_is_lock_free (tree exp)
5642 {
5643 tree size;
5644 tree arg0 = CALL_EXPR_ARG (exp, 0);
5645 tree arg1 = CALL_EXPR_ARG (exp, 1);
5646
5647 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5648 {
5649 error ("non-integer argument 1 to __atomic_is_lock_free");
5650 return NULL_RTX;
5651 }
5652
5653 if (!flag_inline_atomics)
5654 return NULL_RTX;
5655
5656 /* If the value is known at compile time, return the RTX for it. */
5657 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5658 if (size == boolean_true_node)
5659 return const1_rtx;
5660
5661 return NULL_RTX;
5662 }
5663
5664 /* Expand the __atomic_thread_fence intrinsic:
5665 void __atomic_thread_fence (enum memmodel)
5666 EXP is the CALL_EXPR. */
5667
5668 static void
5669 expand_builtin_atomic_thread_fence (tree exp)
5670 {
5671 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5672 expand_mem_thread_fence (model);
5673 }
5674
5675 /* Expand the __atomic_signal_fence intrinsic:
5676 void __atomic_signal_fence (enum memmodel)
5677 EXP is the CALL_EXPR. */
5678
5679 static void
5680 expand_builtin_atomic_signal_fence (tree exp)
5681 {
5682 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5683 expand_mem_signal_fence (model);
5684 }
5685
5686 /* Expand the __sync_synchronize intrinsic. */
5687
5688 static void
5689 expand_builtin_sync_synchronize (void)
5690 {
5691 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5692 }
5693
5694 static rtx
5695 expand_builtin_thread_pointer (tree exp, rtx target)
5696 {
5697 enum insn_code icode;
5698 if (!validate_arglist (exp, VOID_TYPE))
5699 return const0_rtx;
5700 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5701 if (icode != CODE_FOR_nothing)
5702 {
5703 struct expand_operand op;
5704 if (!REG_P (target) || GET_MODE (target) != Pmode)
5705 target = gen_reg_rtx (Pmode);
5706 create_output_operand (&op, target, Pmode);
5707 expand_insn (icode, 1, &op);
5708 return target;
5709 }
5710 error ("__builtin_thread_pointer is not supported on this target");
5711 return const0_rtx;
5712 }
5713
5714 static void
5715 expand_builtin_set_thread_pointer (tree exp)
5716 {
5717 enum insn_code icode;
5718 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5719 return;
5720 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5721 if (icode != CODE_FOR_nothing)
5722 {
5723 struct expand_operand op;
5724 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5725 Pmode, EXPAND_NORMAL);
5726 create_input_operand (&op, val, Pmode);
5727 expand_insn (icode, 1, &op);
5728 return;
5729 }
5730 error ("__builtin_set_thread_pointer is not supported on this target");
5731 }
5732
5733 \f
5734 /* Emit code to restore the current value of stack. */
5735
5736 static void
5737 expand_stack_restore (tree var)
5738 {
5739 rtx prev, sa = expand_normal (var);
5740
5741 sa = convert_memory_address (Pmode, sa);
5742
5743 prev = get_last_insn ();
5744 emit_stack_restore (SAVE_BLOCK, sa);
5745 fixup_args_size_notes (prev, get_last_insn (), 0);
5746 }
5747
5748
5749 /* Emit code to save the current value of stack. */
5750
5751 static rtx
5752 expand_stack_save (void)
5753 {
5754 rtx ret = NULL_RTX;
5755
5756 do_pending_stack_adjust ();
5757 emit_stack_save (SAVE_BLOCK, &ret);
5758 return ret;
5759 }
5760
5761 /* Expand an expression EXP that calls a built-in function,
5762 with result going to TARGET if that's convenient
5763 (and in mode MODE if that's convenient).
5764 SUBTARGET may be used as the target for computing one of EXP's operands.
5765 IGNORE is nonzero if the value is to be ignored. */
5766
5767 rtx
5768 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5769 int ignore)
5770 {
5771 tree fndecl = get_callee_fndecl (exp);
5772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5773 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5774 int flags;
5775
5776 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5777 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5778
5779 /* When not optimizing, generate calls to library functions for a certain
5780 set of builtins. */
5781 if (!optimize
5782 && !called_as_built_in (fndecl)
5783 && fcode != BUILT_IN_FORK
5784 && fcode != BUILT_IN_EXECL
5785 && fcode != BUILT_IN_EXECV
5786 && fcode != BUILT_IN_EXECLP
5787 && fcode != BUILT_IN_EXECLE
5788 && fcode != BUILT_IN_EXECVP
5789 && fcode != BUILT_IN_EXECVE
5790 && fcode != BUILT_IN_ALLOCA
5791 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5792 && fcode != BUILT_IN_FREE
5793 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5794 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5795 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5796 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5797 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5798 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5799 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5800 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5801 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5802 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5803 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND)
5804 return expand_call (exp, target, ignore);
5805
5806 /* The built-in function expanders test for target == const0_rtx
5807 to determine whether the function's result will be ignored. */
5808 if (ignore)
5809 target = const0_rtx;
5810
5811 /* If the result of a pure or const built-in function is ignored, and
5812 none of its arguments are volatile, we can avoid expanding the
5813 built-in call and just evaluate the arguments for side-effects. */
5814 if (target == const0_rtx
5815 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5816 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5817 {
5818 bool volatilep = false;
5819 tree arg;
5820 call_expr_arg_iterator iter;
5821
5822 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5823 if (TREE_THIS_VOLATILE (arg))
5824 {
5825 volatilep = true;
5826 break;
5827 }
5828
5829 if (! volatilep)
5830 {
5831 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5832 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5833 return const0_rtx;
5834 }
5835 }
5836
5837 switch (fcode)
5838 {
5839 CASE_FLT_FN (BUILT_IN_FABS):
5840 case BUILT_IN_FABSD32:
5841 case BUILT_IN_FABSD64:
5842 case BUILT_IN_FABSD128:
5843 target = expand_builtin_fabs (exp, target, subtarget);
5844 if (target)
5845 return target;
5846 break;
5847
5848 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5849 target = expand_builtin_copysign (exp, target, subtarget);
5850 if (target)
5851 return target;
5852 break;
5853
5854 /* Just do a normal library call if we were unable to fold
5855 the values. */
5856 CASE_FLT_FN (BUILT_IN_CABS):
5857 break;
5858
5859 CASE_FLT_FN (BUILT_IN_EXP):
5860 CASE_FLT_FN (BUILT_IN_EXP10):
5861 CASE_FLT_FN (BUILT_IN_POW10):
5862 CASE_FLT_FN (BUILT_IN_EXP2):
5863 CASE_FLT_FN (BUILT_IN_EXPM1):
5864 CASE_FLT_FN (BUILT_IN_LOGB):
5865 CASE_FLT_FN (BUILT_IN_LOG):
5866 CASE_FLT_FN (BUILT_IN_LOG10):
5867 CASE_FLT_FN (BUILT_IN_LOG2):
5868 CASE_FLT_FN (BUILT_IN_LOG1P):
5869 CASE_FLT_FN (BUILT_IN_TAN):
5870 CASE_FLT_FN (BUILT_IN_ASIN):
5871 CASE_FLT_FN (BUILT_IN_ACOS):
5872 CASE_FLT_FN (BUILT_IN_ATAN):
5873 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5874 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5875 because of possible accuracy problems. */
5876 if (! flag_unsafe_math_optimizations)
5877 break;
5878 CASE_FLT_FN (BUILT_IN_SQRT):
5879 CASE_FLT_FN (BUILT_IN_FLOOR):
5880 CASE_FLT_FN (BUILT_IN_CEIL):
5881 CASE_FLT_FN (BUILT_IN_TRUNC):
5882 CASE_FLT_FN (BUILT_IN_ROUND):
5883 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5884 CASE_FLT_FN (BUILT_IN_RINT):
5885 target = expand_builtin_mathfn (exp, target, subtarget);
5886 if (target)
5887 return target;
5888 break;
5889
5890 CASE_FLT_FN (BUILT_IN_FMA):
5891 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_ILOGB):
5897 if (! flag_unsafe_math_optimizations)
5898 break;
5899 CASE_FLT_FN (BUILT_IN_ISINF):
5900 CASE_FLT_FN (BUILT_IN_FINITE):
5901 case BUILT_IN_ISFINITE:
5902 case BUILT_IN_ISNORMAL:
5903 target = expand_builtin_interclass_mathfn (exp, target);
5904 if (target)
5905 return target;
5906 break;
5907
5908 CASE_FLT_FN (BUILT_IN_ICEIL):
5909 CASE_FLT_FN (BUILT_IN_LCEIL):
5910 CASE_FLT_FN (BUILT_IN_LLCEIL):
5911 CASE_FLT_FN (BUILT_IN_LFLOOR):
5912 CASE_FLT_FN (BUILT_IN_IFLOOR):
5913 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5914 target = expand_builtin_int_roundingfn (exp, target);
5915 if (target)
5916 return target;
5917 break;
5918
5919 CASE_FLT_FN (BUILT_IN_IRINT):
5920 CASE_FLT_FN (BUILT_IN_LRINT):
5921 CASE_FLT_FN (BUILT_IN_LLRINT):
5922 CASE_FLT_FN (BUILT_IN_IROUND):
5923 CASE_FLT_FN (BUILT_IN_LROUND):
5924 CASE_FLT_FN (BUILT_IN_LLROUND):
5925 target = expand_builtin_int_roundingfn_2 (exp, target);
5926 if (target)
5927 return target;
5928 break;
5929
5930 CASE_FLT_FN (BUILT_IN_POWI):
5931 target = expand_builtin_powi (exp, target);
5932 if (target)
5933 return target;
5934 break;
5935
5936 CASE_FLT_FN (BUILT_IN_ATAN2):
5937 CASE_FLT_FN (BUILT_IN_LDEXP):
5938 CASE_FLT_FN (BUILT_IN_SCALB):
5939 CASE_FLT_FN (BUILT_IN_SCALBN):
5940 CASE_FLT_FN (BUILT_IN_SCALBLN):
5941 if (! flag_unsafe_math_optimizations)
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_FMOD):
5945 CASE_FLT_FN (BUILT_IN_REMAINDER):
5946 CASE_FLT_FN (BUILT_IN_DREM):
5947 CASE_FLT_FN (BUILT_IN_POW):
5948 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_FLT_FN (BUILT_IN_CEXPI):
5954 target = expand_builtin_cexpi (exp, target);
5955 gcc_assert (target);
5956 return target;
5957
5958 CASE_FLT_FN (BUILT_IN_SIN):
5959 CASE_FLT_FN (BUILT_IN_COS):
5960 if (! flag_unsafe_math_optimizations)
5961 break;
5962 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5963 if (target)
5964 return target;
5965 break;
5966
5967 CASE_FLT_FN (BUILT_IN_SINCOS):
5968 if (! flag_unsafe_math_optimizations)
5969 break;
5970 target = expand_builtin_sincos (exp);
5971 if (target)
5972 return target;
5973 break;
5974
5975 case BUILT_IN_APPLY_ARGS:
5976 return expand_builtin_apply_args ();
5977
5978 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5979 FUNCTION with a copy of the parameters described by
5980 ARGUMENTS, and ARGSIZE. It returns a block of memory
5981 allocated on the stack into which is stored all the registers
5982 that might possibly be used for returning the result of a
5983 function. ARGUMENTS is the value returned by
5984 __builtin_apply_args. ARGSIZE is the number of bytes of
5985 arguments that must be copied. ??? How should this value be
5986 computed? We'll also need a safe worst case value for varargs
5987 functions. */
5988 case BUILT_IN_APPLY:
5989 if (!validate_arglist (exp, POINTER_TYPE,
5990 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5991 && !validate_arglist (exp, REFERENCE_TYPE,
5992 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5993 return const0_rtx;
5994 else
5995 {
5996 rtx ops[3];
5997
5998 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5999 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6000 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6001
6002 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6003 }
6004
6005 /* __builtin_return (RESULT) causes the function to return the
6006 value described by RESULT. RESULT is address of the block of
6007 memory returned by __builtin_apply. */
6008 case BUILT_IN_RETURN:
6009 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6010 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6011 return const0_rtx;
6012
6013 case BUILT_IN_SAVEREGS:
6014 return expand_builtin_saveregs ();
6015
6016 case BUILT_IN_VA_ARG_PACK:
6017 /* All valid uses of __builtin_va_arg_pack () are removed during
6018 inlining. */
6019 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6020 return const0_rtx;
6021
6022 case BUILT_IN_VA_ARG_PACK_LEN:
6023 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6024 inlining. */
6025 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6026 return const0_rtx;
6027
6028 /* Return the address of the first anonymous stack arg. */
6029 case BUILT_IN_NEXT_ARG:
6030 if (fold_builtin_next_arg (exp, false))
6031 return const0_rtx;
6032 return expand_builtin_next_arg ();
6033
6034 case BUILT_IN_CLEAR_CACHE:
6035 target = expand_builtin___clear_cache (exp);
6036 if (target)
6037 return target;
6038 break;
6039
6040 case BUILT_IN_CLASSIFY_TYPE:
6041 return expand_builtin_classify_type (exp);
6042
6043 case BUILT_IN_CONSTANT_P:
6044 return const0_rtx;
6045
6046 case BUILT_IN_FRAME_ADDRESS:
6047 case BUILT_IN_RETURN_ADDRESS:
6048 return expand_builtin_frame_address (fndecl, exp);
6049
6050 /* Returns the address of the area where the structure is returned.
6051 0 otherwise. */
6052 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6053 if (call_expr_nargs (exp) != 0
6054 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6055 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6056 return const0_rtx;
6057 else
6058 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6059
6060 case BUILT_IN_ALLOCA:
6061 case BUILT_IN_ALLOCA_WITH_ALIGN:
6062 /* If the allocation stems from the declaration of a variable-sized
6063 object, it cannot accumulate. */
6064 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6065 if (target)
6066 return target;
6067 break;
6068
6069 case BUILT_IN_STACK_SAVE:
6070 return expand_stack_save ();
6071
6072 case BUILT_IN_STACK_RESTORE:
6073 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6074 return const0_rtx;
6075
6076 case BUILT_IN_BSWAP16:
6077 case BUILT_IN_BSWAP32:
6078 case BUILT_IN_BSWAP64:
6079 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_INT_FN (BUILT_IN_FFS):
6085 target = expand_builtin_unop (target_mode, exp, target,
6086 subtarget, ffs_optab);
6087 if (target)
6088 return target;
6089 break;
6090
6091 CASE_INT_FN (BUILT_IN_CLZ):
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, clz_optab);
6094 if (target)
6095 return target;
6096 break;
6097
6098 CASE_INT_FN (BUILT_IN_CTZ):
6099 target = expand_builtin_unop (target_mode, exp, target,
6100 subtarget, ctz_optab);
6101 if (target)
6102 return target;
6103 break;
6104
6105 CASE_INT_FN (BUILT_IN_CLRSB):
6106 target = expand_builtin_unop (target_mode, exp, target,
6107 subtarget, clrsb_optab);
6108 if (target)
6109 return target;
6110 break;
6111
6112 CASE_INT_FN (BUILT_IN_POPCOUNT):
6113 target = expand_builtin_unop (target_mode, exp, target,
6114 subtarget, popcount_optab);
6115 if (target)
6116 return target;
6117 break;
6118
6119 CASE_INT_FN (BUILT_IN_PARITY):
6120 target = expand_builtin_unop (target_mode, exp, target,
6121 subtarget, parity_optab);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STRLEN:
6127 target = expand_builtin_strlen (exp, target, target_mode);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_STRCPY:
6133 target = expand_builtin_strcpy (exp, target);
6134 if (target)
6135 return target;
6136 break;
6137
6138 case BUILT_IN_STRNCPY:
6139 target = expand_builtin_strncpy (exp, target);
6140 if (target)
6141 return target;
6142 break;
6143
6144 case BUILT_IN_STPCPY:
6145 target = expand_builtin_stpcpy (exp, target, mode);
6146 if (target)
6147 return target;
6148 break;
6149
6150 case BUILT_IN_MEMCPY:
6151 target = expand_builtin_memcpy (exp, target);
6152 if (target)
6153 return target;
6154 break;
6155
6156 case BUILT_IN_MEMPCPY:
6157 target = expand_builtin_mempcpy (exp, target, mode);
6158 if (target)
6159 return target;
6160 break;
6161
6162 case BUILT_IN_MEMSET:
6163 target = expand_builtin_memset (exp, target, mode);
6164 if (target)
6165 return target;
6166 break;
6167
6168 case BUILT_IN_BZERO:
6169 target = expand_builtin_bzero (exp);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_STRCMP:
6175 target = expand_builtin_strcmp (exp, target);
6176 if (target)
6177 return target;
6178 break;
6179
6180 case BUILT_IN_STRNCMP:
6181 target = expand_builtin_strncmp (exp, target, mode);
6182 if (target)
6183 return target;
6184 break;
6185
6186 case BUILT_IN_BCMP:
6187 case BUILT_IN_MEMCMP:
6188 target = expand_builtin_memcmp (exp, target, mode);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_SETJMP:
6194 /* This should have been lowered to the builtins below. */
6195 gcc_unreachable ();
6196
6197 case BUILT_IN_SETJMP_SETUP:
6198 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6199 and the receiver label. */
6200 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6201 {
6202 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6203 VOIDmode, EXPAND_NORMAL);
6204 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6205 rtx label_r = label_rtx (label);
6206
6207 /* This is copied from the handling of non-local gotos. */
6208 expand_builtin_setjmp_setup (buf_addr, label_r);
6209 nonlocal_goto_handler_labels
6210 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6211 nonlocal_goto_handler_labels);
6212 /* ??? Do not let expand_label treat us as such since we would
6213 not want to be both on the list of non-local labels and on
6214 the list of forced labels. */
6215 FORCED_LABEL (label) = 0;
6216 return const0_rtx;
6217 }
6218 break;
6219
6220 case BUILT_IN_SETJMP_DISPATCHER:
6221 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6222 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6223 {
6224 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6225 rtx label_r = label_rtx (label);
6226
6227 /* Remove the dispatcher label from the list of non-local labels
6228 since the receiver labels have been added to it above. */
6229 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6230 return const0_rtx;
6231 }
6232 break;
6233
6234 case BUILT_IN_SETJMP_RECEIVER:
6235 /* __builtin_setjmp_receiver is passed the receiver label. */
6236 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6237 {
6238 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6239 rtx label_r = label_rtx (label);
6240
6241 expand_builtin_setjmp_receiver (label_r);
6242 return const0_rtx;
6243 }
6244 break;
6245
6246 /* __builtin_longjmp is passed a pointer to an array of five words.
6247 It's similar to the C library longjmp function but works with
6248 __builtin_setjmp above. */
6249 case BUILT_IN_LONGJMP:
6250 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6251 {
6252 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6253 VOIDmode, EXPAND_NORMAL);
6254 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6255
6256 if (value != const1_rtx)
6257 {
6258 error ("%<__builtin_longjmp%> second argument must be 1");
6259 return const0_rtx;
6260 }
6261
6262 expand_builtin_longjmp (buf_addr, value);
6263 return const0_rtx;
6264 }
6265 break;
6266
6267 case BUILT_IN_NONLOCAL_GOTO:
6268 target = expand_builtin_nonlocal_goto (exp);
6269 if (target)
6270 return target;
6271 break;
6272
6273 /* This updates the setjmp buffer that is its argument with the value
6274 of the current stack pointer. */
6275 case BUILT_IN_UPDATE_SETJMP_BUF:
6276 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6277 {
6278 rtx buf_addr
6279 = expand_normal (CALL_EXPR_ARG (exp, 0));
6280
6281 expand_builtin_update_setjmp_buf (buf_addr);
6282 return const0_rtx;
6283 }
6284 break;
6285
6286 case BUILT_IN_TRAP:
6287 expand_builtin_trap ();
6288 return const0_rtx;
6289
6290 case BUILT_IN_UNREACHABLE:
6291 expand_builtin_unreachable ();
6292 return const0_rtx;
6293
6294 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6295 case BUILT_IN_SIGNBITD32:
6296 case BUILT_IN_SIGNBITD64:
6297 case BUILT_IN_SIGNBITD128:
6298 target = expand_builtin_signbit (exp, target);
6299 if (target)
6300 return target;
6301 break;
6302
6303 /* Various hooks for the DWARF 2 __throw routine. */
6304 case BUILT_IN_UNWIND_INIT:
6305 expand_builtin_unwind_init ();
6306 return const0_rtx;
6307 case BUILT_IN_DWARF_CFA:
6308 return virtual_cfa_rtx;
6309 #ifdef DWARF2_UNWIND_INFO
6310 case BUILT_IN_DWARF_SP_COLUMN:
6311 return expand_builtin_dwarf_sp_column ();
6312 case BUILT_IN_INIT_DWARF_REG_SIZES:
6313 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6314 return const0_rtx;
6315 #endif
6316 case BUILT_IN_FROB_RETURN_ADDR:
6317 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6318 case BUILT_IN_EXTRACT_RETURN_ADDR:
6319 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6320 case BUILT_IN_EH_RETURN:
6321 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6322 CALL_EXPR_ARG (exp, 1));
6323 return const0_rtx;
6324 #ifdef EH_RETURN_DATA_REGNO
6325 case BUILT_IN_EH_RETURN_DATA_REGNO:
6326 return expand_builtin_eh_return_data_regno (exp);
6327 #endif
6328 case BUILT_IN_EXTEND_POINTER:
6329 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6330 case BUILT_IN_EH_POINTER:
6331 return expand_builtin_eh_pointer (exp);
6332 case BUILT_IN_EH_FILTER:
6333 return expand_builtin_eh_filter (exp);
6334 case BUILT_IN_EH_COPY_VALUES:
6335 return expand_builtin_eh_copy_values (exp);
6336
6337 case BUILT_IN_VA_START:
6338 return expand_builtin_va_start (exp);
6339 case BUILT_IN_VA_END:
6340 return expand_builtin_va_end (exp);
6341 case BUILT_IN_VA_COPY:
6342 return expand_builtin_va_copy (exp);
6343 case BUILT_IN_EXPECT:
6344 return expand_builtin_expect (exp, target);
6345 case BUILT_IN_ASSUME_ALIGNED:
6346 return expand_builtin_assume_aligned (exp, target);
6347 case BUILT_IN_PREFETCH:
6348 expand_builtin_prefetch (exp);
6349 return const0_rtx;
6350
6351 case BUILT_IN_INIT_TRAMPOLINE:
6352 return expand_builtin_init_trampoline (exp, true);
6353 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6354 return expand_builtin_init_trampoline (exp, false);
6355 case BUILT_IN_ADJUST_TRAMPOLINE:
6356 return expand_builtin_adjust_trampoline (exp);
6357
6358 case BUILT_IN_FORK:
6359 case BUILT_IN_EXECL:
6360 case BUILT_IN_EXECV:
6361 case BUILT_IN_EXECLP:
6362 case BUILT_IN_EXECLE:
6363 case BUILT_IN_EXECVP:
6364 case BUILT_IN_EXECVE:
6365 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6371 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6372 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6373 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6374 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6376 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6382 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6383 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6384 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6385 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6387 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6393 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6394 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6395 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6396 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6398 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6404 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6405 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6406 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6407 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6409 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6415 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6416 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6417 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6418 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6420 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6426 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6427 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6428 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6429 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6431 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6437 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6438 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6439 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6440 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6448 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6449 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6450 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6451 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6459 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6460 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6461 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6462 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6470 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6471 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6472 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6473 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6481 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6482 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6483 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6484 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6486 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6493 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6494 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6495 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6497 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6503 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6504 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6505 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6506 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6507 if (mode == VOIDmode)
6508 mode = TYPE_MODE (boolean_type_node);
6509 if (!target || !register_operand (target, mode))
6510 target = gen_reg_rtx (mode);
6511
6512 mode = get_builtin_sync_mode
6513 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6514 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6515 if (target)
6516 return target;
6517 break;
6518
6519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6520 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6521 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6522 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6523 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6524 mode = get_builtin_sync_mode
6525 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6526 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6527 if (target)
6528 return target;
6529 break;
6530
6531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6532 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6533 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6534 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6535 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6537 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6543 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6544 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6545 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6546 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6548 expand_builtin_sync_lock_release (mode, exp);
6549 return const0_rtx;
6550
6551 case BUILT_IN_SYNC_SYNCHRONIZE:
6552 expand_builtin_sync_synchronize ();
6553 return const0_rtx;
6554
6555 case BUILT_IN_ATOMIC_EXCHANGE_1:
6556 case BUILT_IN_ATOMIC_EXCHANGE_2:
6557 case BUILT_IN_ATOMIC_EXCHANGE_4:
6558 case BUILT_IN_ATOMIC_EXCHANGE_8:
6559 case BUILT_IN_ATOMIC_EXCHANGE_16:
6560 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6561 target = expand_builtin_atomic_exchange (mode, exp, target);
6562 if (target)
6563 return target;
6564 break;
6565
6566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6567 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6568 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6569 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6570 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6571 {
6572 unsigned int nargs, z;
6573 vec<tree, va_gc> *vec;
6574
6575 mode =
6576 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6577 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6578 if (target)
6579 return target;
6580
6581 /* If this is turned into an external library call, the weak parameter
6582 must be dropped to match the expected parameter list. */
6583 nargs = call_expr_nargs (exp);
6584 vec_alloc (vec, nargs - 1);
6585 for (z = 0; z < 3; z++)
6586 vec->quick_push (CALL_EXPR_ARG (exp, z));
6587 /* Skip the boolean weak parameter. */
6588 for (z = 4; z < 6; z++)
6589 vec->quick_push (CALL_EXPR_ARG (exp, z));
6590 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6591 break;
6592 }
6593
6594 case BUILT_IN_ATOMIC_LOAD_1:
6595 case BUILT_IN_ATOMIC_LOAD_2:
6596 case BUILT_IN_ATOMIC_LOAD_4:
6597 case BUILT_IN_ATOMIC_LOAD_8:
6598 case BUILT_IN_ATOMIC_LOAD_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6600 target = expand_builtin_atomic_load (mode, exp, target);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_ATOMIC_STORE_1:
6606 case BUILT_IN_ATOMIC_STORE_2:
6607 case BUILT_IN_ATOMIC_STORE_4:
6608 case BUILT_IN_ATOMIC_STORE_8:
6609 case BUILT_IN_ATOMIC_STORE_16:
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6611 target = expand_builtin_atomic_store (mode, exp);
6612 if (target)
6613 return const0_rtx;
6614 break;
6615
6616 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6617 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6618 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6619 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6620 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6621 {
6622 enum built_in_function lib;
6623 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6624 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6625 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6626 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6627 ignore, lib);
6628 if (target)
6629 return target;
6630 break;
6631 }
6632 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6633 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6634 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6635 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6636 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6637 {
6638 enum built_in_function lib;
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6640 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6641 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6642 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6643 ignore, lib);
6644 if (target)
6645 return target;
6646 break;
6647 }
6648 case BUILT_IN_ATOMIC_AND_FETCH_1:
6649 case BUILT_IN_ATOMIC_AND_FETCH_2:
6650 case BUILT_IN_ATOMIC_AND_FETCH_4:
6651 case BUILT_IN_ATOMIC_AND_FETCH_8:
6652 case BUILT_IN_ATOMIC_AND_FETCH_16:
6653 {
6654 enum built_in_function lib;
6655 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6656 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6657 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6658 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6659 ignore, lib);
6660 if (target)
6661 return target;
6662 break;
6663 }
6664 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6665 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6666 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6667 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6668 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6669 {
6670 enum built_in_function lib;
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6672 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6673 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6674 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6675 ignore, lib);
6676 if (target)
6677 return target;
6678 break;
6679 }
6680 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6681 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6682 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6683 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6684 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6685 {
6686 enum built_in_function lib;
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6688 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6689 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6690 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6691 ignore, lib);
6692 if (target)
6693 return target;
6694 break;
6695 }
6696 case BUILT_IN_ATOMIC_OR_FETCH_1:
6697 case BUILT_IN_ATOMIC_OR_FETCH_2:
6698 case BUILT_IN_ATOMIC_OR_FETCH_4:
6699 case BUILT_IN_ATOMIC_OR_FETCH_8:
6700 case BUILT_IN_ATOMIC_OR_FETCH_16:
6701 {
6702 enum built_in_function lib;
6703 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6704 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6705 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6707 ignore, lib);
6708 if (target)
6709 return target;
6710 break;
6711 }
6712 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6713 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6714 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6715 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6716 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6725 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6726 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6727 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6728 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_AND_1:
6737 case BUILT_IN_ATOMIC_FETCH_AND_2:
6738 case BUILT_IN_ATOMIC_FETCH_AND_4:
6739 case BUILT_IN_ATOMIC_FETCH_AND_8:
6740 case BUILT_IN_ATOMIC_FETCH_AND_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6749 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6750 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6751 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6752 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
6759
6760 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6761 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6762 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6763 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6764 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6766 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6767 ignore, BUILT_IN_NONE);
6768 if (target)
6769 return target;
6770 break;
6771
6772 case BUILT_IN_ATOMIC_FETCH_OR_1:
6773 case BUILT_IN_ATOMIC_FETCH_OR_2:
6774 case BUILT_IN_ATOMIC_FETCH_OR_4:
6775 case BUILT_IN_ATOMIC_FETCH_OR_8:
6776 case BUILT_IN_ATOMIC_FETCH_OR_16:
6777 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6778 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6779 ignore, BUILT_IN_NONE);
6780 if (target)
6781 return target;
6782 break;
6783
6784 case BUILT_IN_ATOMIC_TEST_AND_SET:
6785 return expand_builtin_atomic_test_and_set (exp, target);
6786
6787 case BUILT_IN_ATOMIC_CLEAR:
6788 return expand_builtin_atomic_clear (exp);
6789
6790 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6791 return expand_builtin_atomic_always_lock_free (exp);
6792
6793 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6794 target = expand_builtin_atomic_is_lock_free (exp);
6795 if (target)
6796 return target;
6797 break;
6798
6799 case BUILT_IN_ATOMIC_THREAD_FENCE:
6800 expand_builtin_atomic_thread_fence (exp);
6801 return const0_rtx;
6802
6803 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6804 expand_builtin_atomic_signal_fence (exp);
6805 return const0_rtx;
6806
6807 case BUILT_IN_OBJECT_SIZE:
6808 return expand_builtin_object_size (exp);
6809
6810 case BUILT_IN_MEMCPY_CHK:
6811 case BUILT_IN_MEMPCPY_CHK:
6812 case BUILT_IN_MEMMOVE_CHK:
6813 case BUILT_IN_MEMSET_CHK:
6814 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6815 if (target)
6816 return target;
6817 break;
6818
6819 case BUILT_IN_STRCPY_CHK:
6820 case BUILT_IN_STPCPY_CHK:
6821 case BUILT_IN_STRNCPY_CHK:
6822 case BUILT_IN_STPNCPY_CHK:
6823 case BUILT_IN_STRCAT_CHK:
6824 case BUILT_IN_STRNCAT_CHK:
6825 case BUILT_IN_SNPRINTF_CHK:
6826 case BUILT_IN_VSNPRINTF_CHK:
6827 maybe_emit_chk_warning (exp, fcode);
6828 break;
6829
6830 case BUILT_IN_SPRINTF_CHK:
6831 case BUILT_IN_VSPRINTF_CHK:
6832 maybe_emit_sprintf_chk_warning (exp, fcode);
6833 break;
6834
6835 case BUILT_IN_FREE:
6836 if (warn_free_nonheap_object)
6837 maybe_emit_free_warning (exp);
6838 break;
6839
6840 case BUILT_IN_THREAD_POINTER:
6841 return expand_builtin_thread_pointer (exp, target);
6842
6843 case BUILT_IN_SET_THREAD_POINTER:
6844 expand_builtin_set_thread_pointer (exp);
6845 return const0_rtx;
6846
6847 case BUILT_IN_CILK_DETACH:
6848 expand_builtin_cilk_detach (exp);
6849 return const0_rtx;
6850
6851 case BUILT_IN_CILK_POP_FRAME:
6852 expand_builtin_cilk_pop_frame (exp);
6853 return const0_rtx;
6854
6855 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6856 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6857 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6858 return expand_normal (CALL_EXPR_ARG (exp, 0));
6859
6860 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6861 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6862 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6863 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6864 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6865 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6866 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6867 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6868 /* We allow user CHKP builtins if Pointer Bounds
6869 Checker is off. */
6870 if (!flag_check_pointer_bounds)
6871 {
6872 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6873 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
6874 return expand_normal (CALL_EXPR_ARG (exp, 0));
6875 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6876 return expand_normal (size_zero_node);
6877 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6878 return expand_normal (size_int (-1));
6879 else
6880 return const0_rtx;
6881 }
6882 /* FALLTHROUGH */
6883
6884 case BUILT_IN_CHKP_BNDMK:
6885 case BUILT_IN_CHKP_BNDSTX:
6886 case BUILT_IN_CHKP_BNDCL:
6887 case BUILT_IN_CHKP_BNDCU:
6888 case BUILT_IN_CHKP_BNDLDX:
6889 case BUILT_IN_CHKP_BNDRET:
6890 case BUILT_IN_CHKP_INTERSECT:
6891 case BUILT_IN_CHKP_ARG_BND:
6892 case BUILT_IN_CHKP_NARROW:
6893 case BUILT_IN_CHKP_EXTRACT_LOWER:
6894 case BUILT_IN_CHKP_EXTRACT_UPPER:
6895 /* Software implementation of pointers checker is NYI.
6896 Target support is required. */
6897 error ("Your target platform does not support -fcheck-pointers");
6898 break;
6899
6900 default: /* just do library call, if unknown builtin */
6901 break;
6902 }
6903
6904 /* The switch statement above can drop through to cause the function
6905 to be called normally. */
6906 return expand_call (exp, target, ignore);
6907 }
6908
6909 /* Determine whether a tree node represents a call to a built-in
6910 function. If the tree T is a call to a built-in function with
6911 the right number of arguments of the appropriate types, return
6912 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6913 Otherwise the return value is END_BUILTINS. */
6914
6915 enum built_in_function
6916 builtin_mathfn_code (const_tree t)
6917 {
6918 const_tree fndecl, arg, parmlist;
6919 const_tree argtype, parmtype;
6920 const_call_expr_arg_iterator iter;
6921
6922 if (TREE_CODE (t) != CALL_EXPR
6923 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6924 return END_BUILTINS;
6925
6926 fndecl = get_callee_fndecl (t);
6927 if (fndecl == NULL_TREE
6928 || TREE_CODE (fndecl) != FUNCTION_DECL
6929 || ! DECL_BUILT_IN (fndecl)
6930 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6931 return END_BUILTINS;
6932
6933 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6934 init_const_call_expr_arg_iterator (t, &iter);
6935 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6936 {
6937 /* If a function doesn't take a variable number of arguments,
6938 the last element in the list will have type `void'. */
6939 parmtype = TREE_VALUE (parmlist);
6940 if (VOID_TYPE_P (parmtype))
6941 {
6942 if (more_const_call_expr_args_p (&iter))
6943 return END_BUILTINS;
6944 return DECL_FUNCTION_CODE (fndecl);
6945 }
6946
6947 if (! more_const_call_expr_args_p (&iter))
6948 return END_BUILTINS;
6949
6950 arg = next_const_call_expr_arg (&iter);
6951 argtype = TREE_TYPE (arg);
6952
6953 if (SCALAR_FLOAT_TYPE_P (parmtype))
6954 {
6955 if (! SCALAR_FLOAT_TYPE_P (argtype))
6956 return END_BUILTINS;
6957 }
6958 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6959 {
6960 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6961 return END_BUILTINS;
6962 }
6963 else if (POINTER_TYPE_P (parmtype))
6964 {
6965 if (! POINTER_TYPE_P (argtype))
6966 return END_BUILTINS;
6967 }
6968 else if (INTEGRAL_TYPE_P (parmtype))
6969 {
6970 if (! INTEGRAL_TYPE_P (argtype))
6971 return END_BUILTINS;
6972 }
6973 else
6974 return END_BUILTINS;
6975 }
6976
6977 /* Variable-length argument list. */
6978 return DECL_FUNCTION_CODE (fndecl);
6979 }
6980
6981 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6982 evaluate to a constant. */
6983
6984 static tree
6985 fold_builtin_constant_p (tree arg)
6986 {
6987 /* We return 1 for a numeric type that's known to be a constant
6988 value at compile-time or for an aggregate type that's a
6989 literal constant. */
6990 STRIP_NOPS (arg);
6991
6992 /* If we know this is a constant, emit the constant of one. */
6993 if (CONSTANT_CLASS_P (arg)
6994 || (TREE_CODE (arg) == CONSTRUCTOR
6995 && TREE_CONSTANT (arg)))
6996 return integer_one_node;
6997 if (TREE_CODE (arg) == ADDR_EXPR)
6998 {
6999 tree op = TREE_OPERAND (arg, 0);
7000 if (TREE_CODE (op) == STRING_CST
7001 || (TREE_CODE (op) == ARRAY_REF
7002 && integer_zerop (TREE_OPERAND (op, 1))
7003 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7004 return integer_one_node;
7005 }
7006
7007 /* If this expression has side effects, show we don't know it to be a
7008 constant. Likewise if it's a pointer or aggregate type since in
7009 those case we only want literals, since those are only optimized
7010 when generating RTL, not later.
7011 And finally, if we are compiling an initializer, not code, we
7012 need to return a definite result now; there's not going to be any
7013 more optimization done. */
7014 if (TREE_SIDE_EFFECTS (arg)
7015 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7016 || POINTER_TYPE_P (TREE_TYPE (arg))
7017 || cfun == 0
7018 || folding_initializer
7019 || force_folding_builtin_constant_p)
7020 return integer_zero_node;
7021
7022 return NULL_TREE;
7023 }
7024
7025 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7026 return it as a truthvalue. */
7027
7028 static tree
7029 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7030 {
7031 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7032
7033 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7034 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7035 ret_type = TREE_TYPE (TREE_TYPE (fn));
7036 pred_type = TREE_VALUE (arg_types);
7037 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7038
7039 pred = fold_convert_loc (loc, pred_type, pred);
7040 expected = fold_convert_loc (loc, expected_type, expected);
7041 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7042
7043 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7044 build_int_cst (ret_type, 0));
7045 }
7046
7047 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7048 NULL_TREE if no simplification is possible. */
7049
7050 static tree
7051 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7052 {
7053 tree inner, fndecl, inner_arg0;
7054 enum tree_code code;
7055
7056 /* Distribute the expected value over short-circuiting operators.
7057 See through the cast from truthvalue_type_node to long. */
7058 inner_arg0 = arg0;
7059 while (TREE_CODE (inner_arg0) == NOP_EXPR
7060 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7061 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7062 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7063
7064 /* If this is a builtin_expect within a builtin_expect keep the
7065 inner one. See through a comparison against a constant. It
7066 might have been added to create a thruthvalue. */
7067 inner = inner_arg0;
7068
7069 if (COMPARISON_CLASS_P (inner)
7070 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7071 inner = TREE_OPERAND (inner, 0);
7072
7073 if (TREE_CODE (inner) == CALL_EXPR
7074 && (fndecl = get_callee_fndecl (inner))
7075 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7076 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7077 return arg0;
7078
7079 inner = inner_arg0;
7080 code = TREE_CODE (inner);
7081 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7082 {
7083 tree op0 = TREE_OPERAND (inner, 0);
7084 tree op1 = TREE_OPERAND (inner, 1);
7085
7086 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7087 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7088 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7089
7090 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7091 }
7092
7093 /* If the argument isn't invariant then there's nothing else we can do. */
7094 if (!TREE_CONSTANT (inner_arg0))
7095 return NULL_TREE;
7096
7097 /* If we expect that a comparison against the argument will fold to
7098 a constant return the constant. In practice, this means a true
7099 constant or the address of a non-weak symbol. */
7100 inner = inner_arg0;
7101 STRIP_NOPS (inner);
7102 if (TREE_CODE (inner) == ADDR_EXPR)
7103 {
7104 do
7105 {
7106 inner = TREE_OPERAND (inner, 0);
7107 }
7108 while (TREE_CODE (inner) == COMPONENT_REF
7109 || TREE_CODE (inner) == ARRAY_REF);
7110 if ((TREE_CODE (inner) == VAR_DECL
7111 || TREE_CODE (inner) == FUNCTION_DECL)
7112 && DECL_WEAK (inner))
7113 return NULL_TREE;
7114 }
7115
7116 /* Otherwise, ARG0 already has the proper type for the return value. */
7117 return arg0;
7118 }
7119
7120 /* Fold a call to __builtin_classify_type with argument ARG. */
7121
7122 static tree
7123 fold_builtin_classify_type (tree arg)
7124 {
7125 if (arg == 0)
7126 return build_int_cst (integer_type_node, no_type_class);
7127
7128 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7129 }
7130
7131 /* Fold a call to __builtin_strlen with argument ARG. */
7132
7133 static tree
7134 fold_builtin_strlen (location_t loc, tree type, tree arg)
7135 {
7136 if (!validate_arg (arg, POINTER_TYPE))
7137 return NULL_TREE;
7138 else
7139 {
7140 tree len = c_strlen (arg, 0);
7141
7142 if (len)
7143 return fold_convert_loc (loc, type, len);
7144
7145 return NULL_TREE;
7146 }
7147 }
7148
7149 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7150
7151 static tree
7152 fold_builtin_inf (location_t loc, tree type, int warn)
7153 {
7154 REAL_VALUE_TYPE real;
7155
7156 /* __builtin_inff is intended to be usable to define INFINITY on all
7157 targets. If an infinity is not available, INFINITY expands "to a
7158 positive constant of type float that overflows at translation
7159 time", footnote "In this case, using INFINITY will violate the
7160 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7161 Thus we pedwarn to ensure this constraint violation is
7162 diagnosed. */
7163 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7164 pedwarn (loc, 0, "target format does not support infinity");
7165
7166 real_inf (&real);
7167 return build_real (type, real);
7168 }
7169
7170 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7171
7172 static tree
7173 fold_builtin_nan (tree arg, tree type, int quiet)
7174 {
7175 REAL_VALUE_TYPE real;
7176 const char *str;
7177
7178 if (!validate_arg (arg, POINTER_TYPE))
7179 return NULL_TREE;
7180 str = c_getstr (arg);
7181 if (!str)
7182 return NULL_TREE;
7183
7184 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7185 return NULL_TREE;
7186
7187 return build_real (type, real);
7188 }
7189
7190 /* Return true if the floating point expression T has an integer value.
7191 We also allow +Inf, -Inf and NaN to be considered integer values. */
7192
7193 static bool
7194 integer_valued_real_p (tree t)
7195 {
7196 switch (TREE_CODE (t))
7197 {
7198 case FLOAT_EXPR:
7199 return true;
7200
7201 case ABS_EXPR:
7202 case SAVE_EXPR:
7203 return integer_valued_real_p (TREE_OPERAND (t, 0));
7204
7205 case COMPOUND_EXPR:
7206 case MODIFY_EXPR:
7207 case BIND_EXPR:
7208 return integer_valued_real_p (TREE_OPERAND (t, 1));
7209
7210 case PLUS_EXPR:
7211 case MINUS_EXPR:
7212 case MULT_EXPR:
7213 case MIN_EXPR:
7214 case MAX_EXPR:
7215 return integer_valued_real_p (TREE_OPERAND (t, 0))
7216 && integer_valued_real_p (TREE_OPERAND (t, 1));
7217
7218 case COND_EXPR:
7219 return integer_valued_real_p (TREE_OPERAND (t, 1))
7220 && integer_valued_real_p (TREE_OPERAND (t, 2));
7221
7222 case REAL_CST:
7223 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7224
7225 case NOP_EXPR:
7226 {
7227 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7228 if (TREE_CODE (type) == INTEGER_TYPE)
7229 return true;
7230 if (TREE_CODE (type) == REAL_TYPE)
7231 return integer_valued_real_p (TREE_OPERAND (t, 0));
7232 break;
7233 }
7234
7235 case CALL_EXPR:
7236 switch (builtin_mathfn_code (t))
7237 {
7238 CASE_FLT_FN (BUILT_IN_CEIL):
7239 CASE_FLT_FN (BUILT_IN_FLOOR):
7240 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7241 CASE_FLT_FN (BUILT_IN_RINT):
7242 CASE_FLT_FN (BUILT_IN_ROUND):
7243 CASE_FLT_FN (BUILT_IN_TRUNC):
7244 return true;
7245
7246 CASE_FLT_FN (BUILT_IN_FMIN):
7247 CASE_FLT_FN (BUILT_IN_FMAX):
7248 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7249 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7250
7251 default:
7252 break;
7253 }
7254 break;
7255
7256 default:
7257 break;
7258 }
7259 return false;
7260 }
7261
7262 /* FNDECL is assumed to be a builtin where truncation can be propagated
7263 across (for instance floor((double)f) == (double)floorf (f).
7264 Do the transformation for a call with argument ARG. */
7265
7266 static tree
7267 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7268 {
7269 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7270
7271 if (!validate_arg (arg, REAL_TYPE))
7272 return NULL_TREE;
7273
7274 /* Integer rounding functions are idempotent. */
7275 if (fcode == builtin_mathfn_code (arg))
7276 return arg;
7277
7278 /* If argument is already integer valued, and we don't need to worry
7279 about setting errno, there's no need to perform rounding. */
7280 if (! flag_errno_math && integer_valued_real_p (arg))
7281 return arg;
7282
7283 if (optimize)
7284 {
7285 tree arg0 = strip_float_extensions (arg);
7286 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7287 tree newtype = TREE_TYPE (arg0);
7288 tree decl;
7289
7290 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7291 && (decl = mathfn_built_in (newtype, fcode)))
7292 return fold_convert_loc (loc, ftype,
7293 build_call_expr_loc (loc, decl, 1,
7294 fold_convert_loc (loc,
7295 newtype,
7296 arg0)));
7297 }
7298 return NULL_TREE;
7299 }
7300
7301 /* FNDECL is assumed to be builtin which can narrow the FP type of
7302 the argument, for instance lround((double)f) -> lroundf (f).
7303 Do the transformation for a call with argument ARG. */
7304
7305 static tree
7306 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7307 {
7308 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7309
7310 if (!validate_arg (arg, REAL_TYPE))
7311 return NULL_TREE;
7312
7313 /* If argument is already integer valued, and we don't need to worry
7314 about setting errno, there's no need to perform rounding. */
7315 if (! flag_errno_math && integer_valued_real_p (arg))
7316 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7317 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7318
7319 if (optimize)
7320 {
7321 tree ftype = TREE_TYPE (arg);
7322 tree arg0 = strip_float_extensions (arg);
7323 tree newtype = TREE_TYPE (arg0);
7324 tree decl;
7325
7326 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7327 && (decl = mathfn_built_in (newtype, fcode)))
7328 return build_call_expr_loc (loc, decl, 1,
7329 fold_convert_loc (loc, newtype, arg0));
7330 }
7331
7332 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7333 sizeof (int) == sizeof (long). */
7334 if (TYPE_PRECISION (integer_type_node)
7335 == TYPE_PRECISION (long_integer_type_node))
7336 {
7337 tree newfn = NULL_TREE;
7338 switch (fcode)
7339 {
7340 CASE_FLT_FN (BUILT_IN_ICEIL):
7341 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7342 break;
7343
7344 CASE_FLT_FN (BUILT_IN_IFLOOR):
7345 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7346 break;
7347
7348 CASE_FLT_FN (BUILT_IN_IROUND):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7350 break;
7351
7352 CASE_FLT_FN (BUILT_IN_IRINT):
7353 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7354 break;
7355
7356 default:
7357 break;
7358 }
7359
7360 if (newfn)
7361 {
7362 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7363 return fold_convert_loc (loc,
7364 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7365 }
7366 }
7367
7368 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7369 sizeof (long long) == sizeof (long). */
7370 if (TYPE_PRECISION (long_long_integer_type_node)
7371 == TYPE_PRECISION (long_integer_type_node))
7372 {
7373 tree newfn = NULL_TREE;
7374 switch (fcode)
7375 {
7376 CASE_FLT_FN (BUILT_IN_LLCEIL):
7377 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7378 break;
7379
7380 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7381 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7382 break;
7383
7384 CASE_FLT_FN (BUILT_IN_LLROUND):
7385 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7386 break;
7387
7388 CASE_FLT_FN (BUILT_IN_LLRINT):
7389 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7390 break;
7391
7392 default:
7393 break;
7394 }
7395
7396 if (newfn)
7397 {
7398 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7399 return fold_convert_loc (loc,
7400 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7401 }
7402 }
7403
7404 return NULL_TREE;
7405 }
7406
7407 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7408 return type. Return NULL_TREE if no simplification can be made. */
7409
7410 static tree
7411 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7412 {
7413 tree res;
7414
7415 if (!validate_arg (arg, COMPLEX_TYPE)
7416 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7417 return NULL_TREE;
7418
7419 /* Calculate the result when the argument is a constant. */
7420 if (TREE_CODE (arg) == COMPLEX_CST
7421 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7422 type, mpfr_hypot)))
7423 return res;
7424
7425 if (TREE_CODE (arg) == COMPLEX_EXPR)
7426 {
7427 tree real = TREE_OPERAND (arg, 0);
7428 tree imag = TREE_OPERAND (arg, 1);
7429
7430 /* If either part is zero, cabs is fabs of the other. */
7431 if (real_zerop (real))
7432 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7433 if (real_zerop (imag))
7434 return fold_build1_loc (loc, ABS_EXPR, type, real);
7435
7436 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7437 if (flag_unsafe_math_optimizations
7438 && operand_equal_p (real, imag, OEP_PURE_SAME))
7439 {
7440 const REAL_VALUE_TYPE sqrt2_trunc
7441 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7442 STRIP_NOPS (real);
7443 return fold_build2_loc (loc, MULT_EXPR, type,
7444 fold_build1_loc (loc, ABS_EXPR, type, real),
7445 build_real (type, sqrt2_trunc));
7446 }
7447 }
7448
7449 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7450 if (TREE_CODE (arg) == NEGATE_EXPR
7451 || TREE_CODE (arg) == CONJ_EXPR)
7452 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7453
7454 /* Don't do this when optimizing for size. */
7455 if (flag_unsafe_math_optimizations
7456 && optimize && optimize_function_for_speed_p (cfun))
7457 {
7458 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7459
7460 if (sqrtfn != NULL_TREE)
7461 {
7462 tree rpart, ipart, result;
7463
7464 arg = builtin_save_expr (arg);
7465
7466 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7467 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7468
7469 rpart = builtin_save_expr (rpart);
7470 ipart = builtin_save_expr (ipart);
7471
7472 result = fold_build2_loc (loc, PLUS_EXPR, type,
7473 fold_build2_loc (loc, MULT_EXPR, type,
7474 rpart, rpart),
7475 fold_build2_loc (loc, MULT_EXPR, type,
7476 ipart, ipart));
7477
7478 return build_call_expr_loc (loc, sqrtfn, 1, result);
7479 }
7480 }
7481
7482 return NULL_TREE;
7483 }
7484
7485 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7486 complex tree type of the result. If NEG is true, the imaginary
7487 zero is negative. */
7488
7489 static tree
7490 build_complex_cproj (tree type, bool neg)
7491 {
7492 REAL_VALUE_TYPE rinf, rzero = dconst0;
7493
7494 real_inf (&rinf);
7495 rzero.sign = neg;
7496 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7497 build_real (TREE_TYPE (type), rzero));
7498 }
7499
7500 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7501 return type. Return NULL_TREE if no simplification can be made. */
7502
7503 static tree
7504 fold_builtin_cproj (location_t loc, tree arg, tree type)
7505 {
7506 if (!validate_arg (arg, COMPLEX_TYPE)
7507 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7508 return NULL_TREE;
7509
7510 /* If there are no infinities, return arg. */
7511 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7512 return non_lvalue_loc (loc, arg);
7513
7514 /* Calculate the result when the argument is a constant. */
7515 if (TREE_CODE (arg) == COMPLEX_CST)
7516 {
7517 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7518 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7519
7520 if (real_isinf (real) || real_isinf (imag))
7521 return build_complex_cproj (type, imag->sign);
7522 else
7523 return arg;
7524 }
7525 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7526 {
7527 tree real = TREE_OPERAND (arg, 0);
7528 tree imag = TREE_OPERAND (arg, 1);
7529
7530 STRIP_NOPS (real);
7531 STRIP_NOPS (imag);
7532
7533 /* If the real part is inf and the imag part is known to be
7534 nonnegative, return (inf + 0i). Remember side-effects are
7535 possible in the imag part. */
7536 if (TREE_CODE (real) == REAL_CST
7537 && real_isinf (TREE_REAL_CST_PTR (real))
7538 && tree_expr_nonnegative_p (imag))
7539 return omit_one_operand_loc (loc, type,
7540 build_complex_cproj (type, false),
7541 arg);
7542
7543 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7544 Remember side-effects are possible in the real part. */
7545 if (TREE_CODE (imag) == REAL_CST
7546 && real_isinf (TREE_REAL_CST_PTR (imag)))
7547 return
7548 omit_one_operand_loc (loc, type,
7549 build_complex_cproj (type, TREE_REAL_CST_PTR
7550 (imag)->sign), arg);
7551 }
7552
7553 return NULL_TREE;
7554 }
7555
7556 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7557 Return NULL_TREE if no simplification can be made. */
7558
7559 static tree
7560 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7561 {
7562
7563 enum built_in_function fcode;
7564 tree res;
7565
7566 if (!validate_arg (arg, REAL_TYPE))
7567 return NULL_TREE;
7568
7569 /* Calculate the result when the argument is a constant. */
7570 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7571 return res;
7572
7573 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7574 fcode = builtin_mathfn_code (arg);
7575 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7576 {
7577 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7578 arg = fold_build2_loc (loc, MULT_EXPR, type,
7579 CALL_EXPR_ARG (arg, 0),
7580 build_real (type, dconsthalf));
7581 return build_call_expr_loc (loc, expfn, 1, arg);
7582 }
7583
7584 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7585 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7586 {
7587 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7588
7589 if (powfn)
7590 {
7591 tree arg0 = CALL_EXPR_ARG (arg, 0);
7592 tree tree_root;
7593 /* The inner root was either sqrt or cbrt. */
7594 /* This was a conditional expression but it triggered a bug
7595 in Sun C 5.5. */
7596 REAL_VALUE_TYPE dconstroot;
7597 if (BUILTIN_SQRT_P (fcode))
7598 dconstroot = dconsthalf;
7599 else
7600 dconstroot = dconst_third ();
7601
7602 /* Adjust for the outer root. */
7603 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7604 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7605 tree_root = build_real (type, dconstroot);
7606 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7607 }
7608 }
7609
7610 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7611 if (flag_unsafe_math_optimizations
7612 && (fcode == BUILT_IN_POW
7613 || fcode == BUILT_IN_POWF
7614 || fcode == BUILT_IN_POWL))
7615 {
7616 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7617 tree arg0 = CALL_EXPR_ARG (arg, 0);
7618 tree arg1 = CALL_EXPR_ARG (arg, 1);
7619 tree narg1;
7620 if (!tree_expr_nonnegative_p (arg0))
7621 arg0 = build1 (ABS_EXPR, type, arg0);
7622 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7623 build_real (type, dconsthalf));
7624 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7625 }
7626
7627 return NULL_TREE;
7628 }
7629
7630 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7631 Return NULL_TREE if no simplification can be made. */
7632
7633 static tree
7634 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7635 {
7636 const enum built_in_function fcode = builtin_mathfn_code (arg);
7637 tree res;
7638
7639 if (!validate_arg (arg, REAL_TYPE))
7640 return NULL_TREE;
7641
7642 /* Calculate the result when the argument is a constant. */
7643 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7644 return res;
7645
7646 if (flag_unsafe_math_optimizations)
7647 {
7648 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7649 if (BUILTIN_EXPONENT_P (fcode))
7650 {
7651 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7652 const REAL_VALUE_TYPE third_trunc =
7653 real_value_truncate (TYPE_MODE (type), dconst_third ());
7654 arg = fold_build2_loc (loc, MULT_EXPR, type,
7655 CALL_EXPR_ARG (arg, 0),
7656 build_real (type, third_trunc));
7657 return build_call_expr_loc (loc, expfn, 1, arg);
7658 }
7659
7660 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7661 if (BUILTIN_SQRT_P (fcode))
7662 {
7663 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7664
7665 if (powfn)
7666 {
7667 tree arg0 = CALL_EXPR_ARG (arg, 0);
7668 tree tree_root;
7669 REAL_VALUE_TYPE dconstroot = dconst_third ();
7670
7671 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7672 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7673 tree_root = build_real (type, dconstroot);
7674 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7675 }
7676 }
7677
7678 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7679 if (BUILTIN_CBRT_P (fcode))
7680 {
7681 tree arg0 = CALL_EXPR_ARG (arg, 0);
7682 if (tree_expr_nonnegative_p (arg0))
7683 {
7684 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7685
7686 if (powfn)
7687 {
7688 tree tree_root;
7689 REAL_VALUE_TYPE dconstroot;
7690
7691 real_arithmetic (&dconstroot, MULT_EXPR,
7692 dconst_third_ptr (), dconst_third_ptr ());
7693 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7694 tree_root = build_real (type, dconstroot);
7695 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7696 }
7697 }
7698 }
7699
7700 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7701 if (fcode == BUILT_IN_POW
7702 || fcode == BUILT_IN_POWF
7703 || fcode == BUILT_IN_POWL)
7704 {
7705 tree arg00 = CALL_EXPR_ARG (arg, 0);
7706 tree arg01 = CALL_EXPR_ARG (arg, 1);
7707 if (tree_expr_nonnegative_p (arg00))
7708 {
7709 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7710 const REAL_VALUE_TYPE dconstroot
7711 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7712 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7713 build_real (type, dconstroot));
7714 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7715 }
7716 }
7717 }
7718 return NULL_TREE;
7719 }
7720
7721 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7722 TYPE is the type of the return value. Return NULL_TREE if no
7723 simplification can be made. */
7724
7725 static tree
7726 fold_builtin_cos (location_t loc,
7727 tree arg, tree type, tree fndecl)
7728 {
7729 tree res, narg;
7730
7731 if (!validate_arg (arg, REAL_TYPE))
7732 return NULL_TREE;
7733
7734 /* Calculate the result when the argument is a constant. */
7735 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7736 return res;
7737
7738 /* Optimize cos(-x) into cos (x). */
7739 if ((narg = fold_strip_sign_ops (arg)))
7740 return build_call_expr_loc (loc, fndecl, 1, narg);
7741
7742 return NULL_TREE;
7743 }
7744
7745 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7746 Return NULL_TREE if no simplification can be made. */
7747
7748 static tree
7749 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7750 {
7751 if (validate_arg (arg, REAL_TYPE))
7752 {
7753 tree res, narg;
7754
7755 /* Calculate the result when the argument is a constant. */
7756 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7757 return res;
7758
7759 /* Optimize cosh(-x) into cosh (x). */
7760 if ((narg = fold_strip_sign_ops (arg)))
7761 return build_call_expr_loc (loc, fndecl, 1, narg);
7762 }
7763
7764 return NULL_TREE;
7765 }
7766
7767 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7768 argument ARG. TYPE is the type of the return value. Return
7769 NULL_TREE if no simplification can be made. */
7770
7771 static tree
7772 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7773 bool hyper)
7774 {
7775 if (validate_arg (arg, COMPLEX_TYPE)
7776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7777 {
7778 tree tmp;
7779
7780 /* Calculate the result when the argument is a constant. */
7781 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7782 return tmp;
7783
7784 /* Optimize fn(-x) into fn(x). */
7785 if ((tmp = fold_strip_sign_ops (arg)))
7786 return build_call_expr_loc (loc, fndecl, 1, tmp);
7787 }
7788
7789 return NULL_TREE;
7790 }
7791
7792 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7793 Return NULL_TREE if no simplification can be made. */
7794
7795 static tree
7796 fold_builtin_tan (tree arg, tree type)
7797 {
7798 enum built_in_function fcode;
7799 tree res;
7800
7801 if (!validate_arg (arg, REAL_TYPE))
7802 return NULL_TREE;
7803
7804 /* Calculate the result when the argument is a constant. */
7805 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7806 return res;
7807
7808 /* Optimize tan(atan(x)) = x. */
7809 fcode = builtin_mathfn_code (arg);
7810 if (flag_unsafe_math_optimizations
7811 && (fcode == BUILT_IN_ATAN
7812 || fcode == BUILT_IN_ATANF
7813 || fcode == BUILT_IN_ATANL))
7814 return CALL_EXPR_ARG (arg, 0);
7815
7816 return NULL_TREE;
7817 }
7818
7819 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7820 NULL_TREE if no simplification can be made. */
7821
7822 static tree
7823 fold_builtin_sincos (location_t loc,
7824 tree arg0, tree arg1, tree arg2)
7825 {
7826 tree type;
7827 tree res, fn, call;
7828
7829 if (!validate_arg (arg0, REAL_TYPE)
7830 || !validate_arg (arg1, POINTER_TYPE)
7831 || !validate_arg (arg2, POINTER_TYPE))
7832 return NULL_TREE;
7833
7834 type = TREE_TYPE (arg0);
7835
7836 /* Calculate the result when the argument is a constant. */
7837 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7838 return res;
7839
7840 /* Canonicalize sincos to cexpi. */
7841 if (!targetm.libc_has_function (function_c99_math_complex))
7842 return NULL_TREE;
7843 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7844 if (!fn)
7845 return NULL_TREE;
7846
7847 call = build_call_expr_loc (loc, fn, 1, arg0);
7848 call = builtin_save_expr (call);
7849
7850 return build2 (COMPOUND_EXPR, void_type_node,
7851 build2 (MODIFY_EXPR, void_type_node,
7852 build_fold_indirect_ref_loc (loc, arg1),
7853 build1 (IMAGPART_EXPR, type, call)),
7854 build2 (MODIFY_EXPR, void_type_node,
7855 build_fold_indirect_ref_loc (loc, arg2),
7856 build1 (REALPART_EXPR, type, call)));
7857 }
7858
7859 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7860 NULL_TREE if no simplification can be made. */
7861
7862 static tree
7863 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7864 {
7865 tree rtype;
7866 tree realp, imagp, ifn;
7867 tree res;
7868
7869 if (!validate_arg (arg0, COMPLEX_TYPE)
7870 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7871 return NULL_TREE;
7872
7873 /* Calculate the result when the argument is a constant. */
7874 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7875 return res;
7876
7877 rtype = TREE_TYPE (TREE_TYPE (arg0));
7878
7879 /* In case we can figure out the real part of arg0 and it is constant zero
7880 fold to cexpi. */
7881 if (!targetm.libc_has_function (function_c99_math_complex))
7882 return NULL_TREE;
7883 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7884 if (!ifn)
7885 return NULL_TREE;
7886
7887 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7888 && real_zerop (realp))
7889 {
7890 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7891 return build_call_expr_loc (loc, ifn, 1, narg);
7892 }
7893
7894 /* In case we can easily decompose real and imaginary parts split cexp
7895 to exp (r) * cexpi (i). */
7896 if (flag_unsafe_math_optimizations
7897 && realp)
7898 {
7899 tree rfn, rcall, icall;
7900
7901 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7902 if (!rfn)
7903 return NULL_TREE;
7904
7905 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7906 if (!imagp)
7907 return NULL_TREE;
7908
7909 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7910 icall = builtin_save_expr (icall);
7911 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7912 rcall = builtin_save_expr (rcall);
7913 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7914 fold_build2_loc (loc, MULT_EXPR, rtype,
7915 rcall,
7916 fold_build1_loc (loc, REALPART_EXPR,
7917 rtype, icall)),
7918 fold_build2_loc (loc, MULT_EXPR, rtype,
7919 rcall,
7920 fold_build1_loc (loc, IMAGPART_EXPR,
7921 rtype, icall)));
7922 }
7923
7924 return NULL_TREE;
7925 }
7926
7927 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7928 Return NULL_TREE if no simplification can be made. */
7929
7930 static tree
7931 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7932 {
7933 if (!validate_arg (arg, REAL_TYPE))
7934 return NULL_TREE;
7935
7936 /* Optimize trunc of constant value. */
7937 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7938 {
7939 REAL_VALUE_TYPE r, x;
7940 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7941
7942 x = TREE_REAL_CST (arg);
7943 real_trunc (&r, TYPE_MODE (type), &x);
7944 return build_real (type, r);
7945 }
7946
7947 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7948 }
7949
7950 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7951 Return NULL_TREE if no simplification can be made. */
7952
7953 static tree
7954 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7955 {
7956 if (!validate_arg (arg, REAL_TYPE))
7957 return NULL_TREE;
7958
7959 /* Optimize floor of constant value. */
7960 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7961 {
7962 REAL_VALUE_TYPE x;
7963
7964 x = TREE_REAL_CST (arg);
7965 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7966 {
7967 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7968 REAL_VALUE_TYPE r;
7969
7970 real_floor (&r, TYPE_MODE (type), &x);
7971 return build_real (type, r);
7972 }
7973 }
7974
7975 /* Fold floor (x) where x is nonnegative to trunc (x). */
7976 if (tree_expr_nonnegative_p (arg))
7977 {
7978 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7979 if (truncfn)
7980 return build_call_expr_loc (loc, truncfn, 1, arg);
7981 }
7982
7983 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 }
7985
7986 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7987 Return NULL_TREE if no simplification can be made. */
7988
7989 static tree
7990 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7991 {
7992 if (!validate_arg (arg, REAL_TYPE))
7993 return NULL_TREE;
7994
7995 /* Optimize ceil of constant value. */
7996 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7997 {
7998 REAL_VALUE_TYPE x;
7999
8000 x = TREE_REAL_CST (arg);
8001 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8002 {
8003 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8004 REAL_VALUE_TYPE r;
8005
8006 real_ceil (&r, TYPE_MODE (type), &x);
8007 return build_real (type, r);
8008 }
8009 }
8010
8011 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8012 }
8013
8014 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8015 Return NULL_TREE if no simplification can be made. */
8016
8017 static tree
8018 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8019 {
8020 if (!validate_arg (arg, REAL_TYPE))
8021 return NULL_TREE;
8022
8023 /* Optimize round of constant value. */
8024 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8025 {
8026 REAL_VALUE_TYPE x;
8027
8028 x = TREE_REAL_CST (arg);
8029 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8030 {
8031 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8032 REAL_VALUE_TYPE r;
8033
8034 real_round (&r, TYPE_MODE (type), &x);
8035 return build_real (type, r);
8036 }
8037 }
8038
8039 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8040 }
8041
8042 /* Fold function call to builtin lround, lroundf or lroundl (or the
8043 corresponding long long versions) and other rounding functions. ARG
8044 is the argument to the call. Return NULL_TREE if no simplification
8045 can be made. */
8046
8047 static tree
8048 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8049 {
8050 if (!validate_arg (arg, REAL_TYPE))
8051 return NULL_TREE;
8052
8053 /* Optimize lround of constant value. */
8054 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8055 {
8056 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8057
8058 if (real_isfinite (&x))
8059 {
8060 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8061 tree ftype = TREE_TYPE (arg);
8062 REAL_VALUE_TYPE r;
8063 bool fail = false;
8064
8065 switch (DECL_FUNCTION_CODE (fndecl))
8066 {
8067 CASE_FLT_FN (BUILT_IN_IFLOOR):
8068 CASE_FLT_FN (BUILT_IN_LFLOOR):
8069 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8070 real_floor (&r, TYPE_MODE (ftype), &x);
8071 break;
8072
8073 CASE_FLT_FN (BUILT_IN_ICEIL):
8074 CASE_FLT_FN (BUILT_IN_LCEIL):
8075 CASE_FLT_FN (BUILT_IN_LLCEIL):
8076 real_ceil (&r, TYPE_MODE (ftype), &x);
8077 break;
8078
8079 CASE_FLT_FN (BUILT_IN_IROUND):
8080 CASE_FLT_FN (BUILT_IN_LROUND):
8081 CASE_FLT_FN (BUILT_IN_LLROUND):
8082 real_round (&r, TYPE_MODE (ftype), &x);
8083 break;
8084
8085 default:
8086 gcc_unreachable ();
8087 }
8088
8089 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8090 if (!fail)
8091 return wide_int_to_tree (itype, val);
8092 }
8093 }
8094
8095 switch (DECL_FUNCTION_CODE (fndecl))
8096 {
8097 CASE_FLT_FN (BUILT_IN_LFLOOR):
8098 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8099 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8100 if (tree_expr_nonnegative_p (arg))
8101 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8102 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8103 break;
8104 default:;
8105 }
8106
8107 return fold_fixed_mathfn (loc, fndecl, arg);
8108 }
8109
8110 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8111 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8112 the argument to the call. Return NULL_TREE if no simplification can
8113 be made. */
8114
8115 static tree
8116 fold_builtin_bitop (tree fndecl, tree arg)
8117 {
8118 if (!validate_arg (arg, INTEGER_TYPE))
8119 return NULL_TREE;
8120
8121 /* Optimize for constant argument. */
8122 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8123 {
8124 int result;
8125
8126 switch (DECL_FUNCTION_CODE (fndecl))
8127 {
8128 CASE_INT_FN (BUILT_IN_FFS):
8129 result = wi::ffs (arg);
8130 break;
8131
8132 CASE_INT_FN (BUILT_IN_CLZ):
8133 result = wi::clz (arg);
8134 break;
8135
8136 CASE_INT_FN (BUILT_IN_CTZ):
8137 result = wi::ctz (arg);
8138 break;
8139
8140 CASE_INT_FN (BUILT_IN_CLRSB):
8141 result = wi::clrsb (arg);
8142 break;
8143
8144 CASE_INT_FN (BUILT_IN_POPCOUNT):
8145 result = wi::popcount (arg);
8146 break;
8147
8148 CASE_INT_FN (BUILT_IN_PARITY):
8149 result = wi::parity (arg);
8150 break;
8151
8152 default:
8153 gcc_unreachable ();
8154 }
8155
8156 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8157 }
8158
8159 return NULL_TREE;
8160 }
8161
8162 /* Fold function call to builtin_bswap and the short, long and long long
8163 variants. Return NULL_TREE if no simplification can be made. */
8164 static tree
8165 fold_builtin_bswap (tree fndecl, tree arg)
8166 {
8167 if (! validate_arg (arg, INTEGER_TYPE))
8168 return NULL_TREE;
8169
8170 /* Optimize constant value. */
8171 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8172 {
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174
8175 switch (DECL_FUNCTION_CODE (fndecl))
8176 {
8177 case BUILT_IN_BSWAP16:
8178 case BUILT_IN_BSWAP32:
8179 case BUILT_IN_BSWAP64:
8180 {
8181 signop sgn = TYPE_SIGN (type);
8182 tree result =
8183 wide_int_to_tree (type,
8184 wide_int::from (arg, TYPE_PRECISION (type),
8185 sgn).bswap ());
8186 return result;
8187 }
8188 default:
8189 gcc_unreachable ();
8190 }
8191 }
8192
8193 return NULL_TREE;
8194 }
8195
8196 /* A subroutine of fold_builtin to fold the various logarithmic
8197 functions. Return NULL_TREE if no simplification can me made.
8198 FUNC is the corresponding MPFR logarithm function. */
8199
8200 static tree
8201 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8202 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8203 {
8204 if (validate_arg (arg, REAL_TYPE))
8205 {
8206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8207 tree res;
8208 const enum built_in_function fcode = builtin_mathfn_code (arg);
8209
8210 /* Calculate the result when the argument is a constant. */
8211 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8212 return res;
8213
8214 /* Special case, optimize logN(expN(x)) = x. */
8215 if (flag_unsafe_math_optimizations
8216 && ((func == mpfr_log
8217 && (fcode == BUILT_IN_EXP
8218 || fcode == BUILT_IN_EXPF
8219 || fcode == BUILT_IN_EXPL))
8220 || (func == mpfr_log2
8221 && (fcode == BUILT_IN_EXP2
8222 || fcode == BUILT_IN_EXP2F
8223 || fcode == BUILT_IN_EXP2L))
8224 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8225 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8226
8227 /* Optimize logN(func()) for various exponential functions. We
8228 want to determine the value "x" and the power "exponent" in
8229 order to transform logN(x**exponent) into exponent*logN(x). */
8230 if (flag_unsafe_math_optimizations)
8231 {
8232 tree exponent = 0, x = 0;
8233
8234 switch (fcode)
8235 {
8236 CASE_FLT_FN (BUILT_IN_EXP):
8237 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8238 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8239 dconst_e ()));
8240 exponent = CALL_EXPR_ARG (arg, 0);
8241 break;
8242 CASE_FLT_FN (BUILT_IN_EXP2):
8243 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8244 x = build_real (type, dconst2);
8245 exponent = CALL_EXPR_ARG (arg, 0);
8246 break;
8247 CASE_FLT_FN (BUILT_IN_EXP10):
8248 CASE_FLT_FN (BUILT_IN_POW10):
8249 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8250 {
8251 REAL_VALUE_TYPE dconst10;
8252 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8253 x = build_real (type, dconst10);
8254 }
8255 exponent = CALL_EXPR_ARG (arg, 0);
8256 break;
8257 CASE_FLT_FN (BUILT_IN_SQRT):
8258 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8259 x = CALL_EXPR_ARG (arg, 0);
8260 exponent = build_real (type, dconsthalf);
8261 break;
8262 CASE_FLT_FN (BUILT_IN_CBRT):
8263 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8264 x = CALL_EXPR_ARG (arg, 0);
8265 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8266 dconst_third ()));
8267 break;
8268 CASE_FLT_FN (BUILT_IN_POW):
8269 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8270 x = CALL_EXPR_ARG (arg, 0);
8271 exponent = CALL_EXPR_ARG (arg, 1);
8272 break;
8273 default:
8274 break;
8275 }
8276
8277 /* Now perform the optimization. */
8278 if (x && exponent)
8279 {
8280 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8281 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8282 }
8283 }
8284 }
8285
8286 return NULL_TREE;
8287 }
8288
8289 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8290 NULL_TREE if no simplification can be made. */
8291
8292 static tree
8293 fold_builtin_hypot (location_t loc, tree fndecl,
8294 tree arg0, tree arg1, tree type)
8295 {
8296 tree res, narg0, narg1;
8297
8298 if (!validate_arg (arg0, REAL_TYPE)
8299 || !validate_arg (arg1, REAL_TYPE))
8300 return NULL_TREE;
8301
8302 /* Calculate the result when the argument is a constant. */
8303 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8304 return res;
8305
8306 /* If either argument to hypot has a negate or abs, strip that off.
8307 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8308 narg0 = fold_strip_sign_ops (arg0);
8309 narg1 = fold_strip_sign_ops (arg1);
8310 if (narg0 || narg1)
8311 {
8312 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8313 narg1 ? narg1 : arg1);
8314 }
8315
8316 /* If either argument is zero, hypot is fabs of the other. */
8317 if (real_zerop (arg0))
8318 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8319 else if (real_zerop (arg1))
8320 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8321
8322 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8323 if (flag_unsafe_math_optimizations
8324 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8325 {
8326 const REAL_VALUE_TYPE sqrt2_trunc
8327 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8328 return fold_build2_loc (loc, MULT_EXPR, type,
8329 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8330 build_real (type, sqrt2_trunc));
8331 }
8332
8333 return NULL_TREE;
8334 }
8335
8336
8337 /* Fold a builtin function call to pow, powf, or powl. Return
8338 NULL_TREE if no simplification can be made. */
8339 static tree
8340 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8341 {
8342 tree res;
8343
8344 if (!validate_arg (arg0, REAL_TYPE)
8345 || !validate_arg (arg1, REAL_TYPE))
8346 return NULL_TREE;
8347
8348 /* Calculate the result when the argument is a constant. */
8349 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8350 return res;
8351
8352 /* Optimize pow(1.0,y) = 1.0. */
8353 if (real_onep (arg0))
8354 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8355
8356 if (TREE_CODE (arg1) == REAL_CST
8357 && !TREE_OVERFLOW (arg1))
8358 {
8359 REAL_VALUE_TYPE cint;
8360 REAL_VALUE_TYPE c;
8361 HOST_WIDE_INT n;
8362
8363 c = TREE_REAL_CST (arg1);
8364
8365 /* Optimize pow(x,0.0) = 1.0. */
8366 if (REAL_VALUES_EQUAL (c, dconst0))
8367 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8368 arg0);
8369
8370 /* Optimize pow(x,1.0) = x. */
8371 if (REAL_VALUES_EQUAL (c, dconst1))
8372 return arg0;
8373
8374 /* Optimize pow(x,-1.0) = 1.0/x. */
8375 if (REAL_VALUES_EQUAL (c, dconstm1))
8376 return fold_build2_loc (loc, RDIV_EXPR, type,
8377 build_real (type, dconst1), arg0);
8378
8379 /* Optimize pow(x,0.5) = sqrt(x). */
8380 if (flag_unsafe_math_optimizations
8381 && REAL_VALUES_EQUAL (c, dconsthalf))
8382 {
8383 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8384
8385 if (sqrtfn != NULL_TREE)
8386 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8387 }
8388
8389 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8390 if (flag_unsafe_math_optimizations)
8391 {
8392 const REAL_VALUE_TYPE dconstroot
8393 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8394
8395 if (REAL_VALUES_EQUAL (c, dconstroot))
8396 {
8397 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8398 if (cbrtfn != NULL_TREE)
8399 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8400 }
8401 }
8402
8403 /* Check for an integer exponent. */
8404 n = real_to_integer (&c);
8405 real_from_integer (&cint, VOIDmode, n, SIGNED);
8406 if (real_identical (&c, &cint))
8407 {
8408 /* Attempt to evaluate pow at compile-time, unless this should
8409 raise an exception. */
8410 if (TREE_CODE (arg0) == REAL_CST
8411 && !TREE_OVERFLOW (arg0)
8412 && (n > 0
8413 || (!flag_trapping_math && !flag_errno_math)
8414 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8415 {
8416 REAL_VALUE_TYPE x;
8417 bool inexact;
8418
8419 x = TREE_REAL_CST (arg0);
8420 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8421 if (flag_unsafe_math_optimizations || !inexact)
8422 return build_real (type, x);
8423 }
8424
8425 /* Strip sign ops from even integer powers. */
8426 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8427 {
8428 tree narg0 = fold_strip_sign_ops (arg0);
8429 if (narg0)
8430 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8431 }
8432 }
8433 }
8434
8435 if (flag_unsafe_math_optimizations)
8436 {
8437 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8438
8439 /* Optimize pow(expN(x),y) = expN(x*y). */
8440 if (BUILTIN_EXPONENT_P (fcode))
8441 {
8442 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8443 tree arg = CALL_EXPR_ARG (arg0, 0);
8444 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8445 return build_call_expr_loc (loc, expfn, 1, arg);
8446 }
8447
8448 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8449 if (BUILTIN_SQRT_P (fcode))
8450 {
8451 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8452 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8453 build_real (type, dconsthalf));
8454 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8455 }
8456
8457 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8458 if (BUILTIN_CBRT_P (fcode))
8459 {
8460 tree arg = CALL_EXPR_ARG (arg0, 0);
8461 if (tree_expr_nonnegative_p (arg))
8462 {
8463 const REAL_VALUE_TYPE dconstroot
8464 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8465 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8466 build_real (type, dconstroot));
8467 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8468 }
8469 }
8470
8471 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8472 if (fcode == BUILT_IN_POW
8473 || fcode == BUILT_IN_POWF
8474 || fcode == BUILT_IN_POWL)
8475 {
8476 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8477 if (tree_expr_nonnegative_p (arg00))
8478 {
8479 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8480 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8481 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8482 }
8483 }
8484 }
8485
8486 return NULL_TREE;
8487 }
8488
8489 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8490 Return NULL_TREE if no simplification can be made. */
8491 static tree
8492 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8493 tree arg0, tree arg1, tree type)
8494 {
8495 if (!validate_arg (arg0, REAL_TYPE)
8496 || !validate_arg (arg1, INTEGER_TYPE))
8497 return NULL_TREE;
8498
8499 /* Optimize pow(1.0,y) = 1.0. */
8500 if (real_onep (arg0))
8501 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8502
8503 if (tree_fits_shwi_p (arg1))
8504 {
8505 HOST_WIDE_INT c = tree_to_shwi (arg1);
8506
8507 /* Evaluate powi at compile-time. */
8508 if (TREE_CODE (arg0) == REAL_CST
8509 && !TREE_OVERFLOW (arg0))
8510 {
8511 REAL_VALUE_TYPE x;
8512 x = TREE_REAL_CST (arg0);
8513 real_powi (&x, TYPE_MODE (type), &x, c);
8514 return build_real (type, x);
8515 }
8516
8517 /* Optimize pow(x,0) = 1.0. */
8518 if (c == 0)
8519 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8520 arg0);
8521
8522 /* Optimize pow(x,1) = x. */
8523 if (c == 1)
8524 return arg0;
8525
8526 /* Optimize pow(x,-1) = 1.0/x. */
8527 if (c == -1)
8528 return fold_build2_loc (loc, RDIV_EXPR, type,
8529 build_real (type, dconst1), arg0);
8530 }
8531
8532 return NULL_TREE;
8533 }
8534
8535 /* A subroutine of fold_builtin to fold the various exponent
8536 functions. Return NULL_TREE if no simplification can be made.
8537 FUNC is the corresponding MPFR exponent function. */
8538
8539 static tree
8540 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8541 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8542 {
8543 if (validate_arg (arg, REAL_TYPE))
8544 {
8545 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8546 tree res;
8547
8548 /* Calculate the result when the argument is a constant. */
8549 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8550 return res;
8551
8552 /* Optimize expN(logN(x)) = x. */
8553 if (flag_unsafe_math_optimizations)
8554 {
8555 const enum built_in_function fcode = builtin_mathfn_code (arg);
8556
8557 if ((func == mpfr_exp
8558 && (fcode == BUILT_IN_LOG
8559 || fcode == BUILT_IN_LOGF
8560 || fcode == BUILT_IN_LOGL))
8561 || (func == mpfr_exp2
8562 && (fcode == BUILT_IN_LOG2
8563 || fcode == BUILT_IN_LOG2F
8564 || fcode == BUILT_IN_LOG2L))
8565 || (func == mpfr_exp10
8566 && (fcode == BUILT_IN_LOG10
8567 || fcode == BUILT_IN_LOG10F
8568 || fcode == BUILT_IN_LOG10L)))
8569 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8570 }
8571 }
8572
8573 return NULL_TREE;
8574 }
8575
8576 /* Return true if VAR is a VAR_DECL or a component thereof. */
8577
8578 static bool
8579 var_decl_component_p (tree var)
8580 {
8581 tree inner = var;
8582 while (handled_component_p (inner))
8583 inner = TREE_OPERAND (inner, 0);
8584 return SSA_VAR_P (inner);
8585 }
8586
8587 /* Fold function call to builtin memset. Return
8588 NULL_TREE if no simplification can be made. */
8589
8590 static tree
8591 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8592 tree type, bool ignore)
8593 {
8594 tree var, ret, etype;
8595 unsigned HOST_WIDE_INT length, cval;
8596
8597 if (! validate_arg (dest, POINTER_TYPE)
8598 || ! validate_arg (c, INTEGER_TYPE)
8599 || ! validate_arg (len, INTEGER_TYPE))
8600 return NULL_TREE;
8601
8602 if (! tree_fits_uhwi_p (len))
8603 return NULL_TREE;
8604
8605 /* If the LEN parameter is zero, return DEST. */
8606 if (integer_zerop (len))
8607 return omit_one_operand_loc (loc, type, dest, c);
8608
8609 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8610 return NULL_TREE;
8611
8612 var = dest;
8613 STRIP_NOPS (var);
8614 if (TREE_CODE (var) != ADDR_EXPR)
8615 return NULL_TREE;
8616
8617 var = TREE_OPERAND (var, 0);
8618 if (TREE_THIS_VOLATILE (var))
8619 return NULL_TREE;
8620
8621 etype = TREE_TYPE (var);
8622 if (TREE_CODE (etype) == ARRAY_TYPE)
8623 etype = TREE_TYPE (etype);
8624
8625 if (!INTEGRAL_TYPE_P (etype)
8626 && !POINTER_TYPE_P (etype))
8627 return NULL_TREE;
8628
8629 if (! var_decl_component_p (var))
8630 return NULL_TREE;
8631
8632 length = tree_to_uhwi (len);
8633 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8634 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8635 return NULL_TREE;
8636
8637 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8638 return NULL_TREE;
8639
8640 if (integer_zerop (c))
8641 cval = 0;
8642 else
8643 {
8644 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8645 return NULL_TREE;
8646
8647 cval = TREE_INT_CST_LOW (c);
8648 cval &= 0xff;
8649 cval |= cval << 8;
8650 cval |= cval << 16;
8651 cval |= (cval << 31) << 1;
8652 }
8653
8654 ret = build_int_cst_type (etype, cval);
8655 var = build_fold_indirect_ref_loc (loc,
8656 fold_convert_loc (loc,
8657 build_pointer_type (etype),
8658 dest));
8659 ret = build2 (MODIFY_EXPR, etype, var, ret);
8660 if (ignore)
8661 return ret;
8662
8663 return omit_one_operand_loc (loc, type, dest, ret);
8664 }
8665
8666 /* Fold function call to builtin memset. Return
8667 NULL_TREE if no simplification can be made. */
8668
8669 static tree
8670 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8671 {
8672 if (! validate_arg (dest, POINTER_TYPE)
8673 || ! validate_arg (size, INTEGER_TYPE))
8674 return NULL_TREE;
8675
8676 if (!ignore)
8677 return NULL_TREE;
8678
8679 /* New argument list transforming bzero(ptr x, int y) to
8680 memset(ptr x, int 0, size_t y). This is done this way
8681 so that if it isn't expanded inline, we fallback to
8682 calling bzero instead of memset. */
8683
8684 return fold_builtin_memset (loc, dest, integer_zero_node,
8685 fold_convert_loc (loc, size_type_node, size),
8686 void_type_node, ignore);
8687 }
8688
8689 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8690 NULL_TREE if no simplification can be made.
8691 If ENDP is 0, return DEST (like memcpy).
8692 If ENDP is 1, return DEST+LEN (like mempcpy).
8693 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8694 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8695 (memmove). */
8696
8697 static tree
8698 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8699 tree len, tree type, bool ignore, int endp)
8700 {
8701 tree destvar, srcvar, expr;
8702
8703 if (! validate_arg (dest, POINTER_TYPE)
8704 || ! validate_arg (src, POINTER_TYPE)
8705 || ! validate_arg (len, INTEGER_TYPE))
8706 return NULL_TREE;
8707
8708 /* If the LEN parameter is zero, return DEST. */
8709 if (integer_zerop (len))
8710 return omit_one_operand_loc (loc, type, dest, src);
8711
8712 /* If SRC and DEST are the same (and not volatile), return
8713 DEST{,+LEN,+LEN-1}. */
8714 if (operand_equal_p (src, dest, 0))
8715 expr = len;
8716 else
8717 {
8718 tree srctype, desttype;
8719 unsigned int src_align, dest_align;
8720 tree off0;
8721
8722 if (endp == 3)
8723 {
8724 src_align = get_pointer_alignment (src);
8725 dest_align = get_pointer_alignment (dest);
8726
8727 /* Both DEST and SRC must be pointer types.
8728 ??? This is what old code did. Is the testing for pointer types
8729 really mandatory?
8730
8731 If either SRC is readonly or length is 1, we can use memcpy. */
8732 if (!dest_align || !src_align)
8733 return NULL_TREE;
8734 if (readonly_data_expr (src)
8735 || (tree_fits_uhwi_p (len)
8736 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8737 >= tree_to_uhwi (len))))
8738 {
8739 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8740 if (!fn)
8741 return NULL_TREE;
8742 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8743 }
8744
8745 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8746 if (TREE_CODE (src) == ADDR_EXPR
8747 && TREE_CODE (dest) == ADDR_EXPR)
8748 {
8749 tree src_base, dest_base, fn;
8750 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8751 HOST_WIDE_INT size = -1;
8752 HOST_WIDE_INT maxsize = -1;
8753
8754 srcvar = TREE_OPERAND (src, 0);
8755 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8756 &size, &maxsize);
8757 destvar = TREE_OPERAND (dest, 0);
8758 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8759 &size, &maxsize);
8760 if (tree_fits_uhwi_p (len))
8761 maxsize = tree_to_uhwi (len);
8762 else
8763 maxsize = -1;
8764 src_offset /= BITS_PER_UNIT;
8765 dest_offset /= BITS_PER_UNIT;
8766 if (SSA_VAR_P (src_base)
8767 && SSA_VAR_P (dest_base))
8768 {
8769 if (operand_equal_p (src_base, dest_base, 0)
8770 && ranges_overlap_p (src_offset, maxsize,
8771 dest_offset, maxsize))
8772 return NULL_TREE;
8773 }
8774 else if (TREE_CODE (src_base) == MEM_REF
8775 && TREE_CODE (dest_base) == MEM_REF)
8776 {
8777 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8778 TREE_OPERAND (dest_base, 0), 0))
8779 return NULL_TREE;
8780 offset_int off = mem_ref_offset (src_base) + src_offset;
8781 if (!wi::fits_shwi_p (off))
8782 return NULL_TREE;
8783 src_offset = off.to_shwi ();
8784
8785 off = mem_ref_offset (dest_base) + dest_offset;
8786 if (!wi::fits_shwi_p (off))
8787 return NULL_TREE;
8788 dest_offset = off.to_shwi ();
8789 if (ranges_overlap_p (src_offset, maxsize,
8790 dest_offset, maxsize))
8791 return NULL_TREE;
8792 }
8793 else
8794 return NULL_TREE;
8795
8796 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8797 if (!fn)
8798 return NULL_TREE;
8799 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8800 }
8801
8802 /* If the destination and source do not alias optimize into
8803 memcpy as well. */
8804 if ((is_gimple_min_invariant (dest)
8805 || TREE_CODE (dest) == SSA_NAME)
8806 && (is_gimple_min_invariant (src)
8807 || TREE_CODE (src) == SSA_NAME))
8808 {
8809 ao_ref destr, srcr;
8810 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8811 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8812 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8813 {
8814 tree fn;
8815 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8816 if (!fn)
8817 return NULL_TREE;
8818 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8819 }
8820 }
8821
8822 return NULL_TREE;
8823 }
8824
8825 if (!tree_fits_shwi_p (len))
8826 return NULL_TREE;
8827 /* FIXME:
8828 This logic lose for arguments like (type *)malloc (sizeof (type)),
8829 since we strip the casts of up to VOID return value from malloc.
8830 Perhaps we ought to inherit type from non-VOID argument here? */
8831 STRIP_NOPS (src);
8832 STRIP_NOPS (dest);
8833 if (!POINTER_TYPE_P (TREE_TYPE (src))
8834 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8835 return NULL_TREE;
8836 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8837 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8838 {
8839 tree tem = TREE_OPERAND (src, 0);
8840 STRIP_NOPS (tem);
8841 if (tem != TREE_OPERAND (src, 0))
8842 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8843 }
8844 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8845 {
8846 tree tem = TREE_OPERAND (dest, 0);
8847 STRIP_NOPS (tem);
8848 if (tem != TREE_OPERAND (dest, 0))
8849 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8850 }
8851 srctype = TREE_TYPE (TREE_TYPE (src));
8852 if (TREE_CODE (srctype) == ARRAY_TYPE
8853 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8854 {
8855 srctype = TREE_TYPE (srctype);
8856 STRIP_NOPS (src);
8857 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8858 }
8859 desttype = TREE_TYPE (TREE_TYPE (dest));
8860 if (TREE_CODE (desttype) == ARRAY_TYPE
8861 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8862 {
8863 desttype = TREE_TYPE (desttype);
8864 STRIP_NOPS (dest);
8865 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8866 }
8867 if (TREE_ADDRESSABLE (srctype)
8868 || TREE_ADDRESSABLE (desttype))
8869 return NULL_TREE;
8870
8871 src_align = get_pointer_alignment (src);
8872 dest_align = get_pointer_alignment (dest);
8873 if (dest_align < TYPE_ALIGN (desttype)
8874 || src_align < TYPE_ALIGN (srctype))
8875 return NULL_TREE;
8876
8877 if (!ignore)
8878 dest = builtin_save_expr (dest);
8879
8880 /* Build accesses at offset zero with a ref-all character type. */
8881 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8882 ptr_mode, true), 0);
8883
8884 destvar = dest;
8885 STRIP_NOPS (destvar);
8886 if (TREE_CODE (destvar) == ADDR_EXPR
8887 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8888 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8889 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8890 else
8891 destvar = NULL_TREE;
8892
8893 srcvar = src;
8894 STRIP_NOPS (srcvar);
8895 if (TREE_CODE (srcvar) == ADDR_EXPR
8896 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8897 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8898 {
8899 if (!destvar
8900 || src_align >= TYPE_ALIGN (desttype))
8901 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8902 srcvar, off0);
8903 else if (!STRICT_ALIGNMENT)
8904 {
8905 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8906 src_align);
8907 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8908 }
8909 else
8910 srcvar = NULL_TREE;
8911 }
8912 else
8913 srcvar = NULL_TREE;
8914
8915 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8916 return NULL_TREE;
8917
8918 if (srcvar == NULL_TREE)
8919 {
8920 STRIP_NOPS (src);
8921 if (src_align >= TYPE_ALIGN (desttype))
8922 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8923 else
8924 {
8925 if (STRICT_ALIGNMENT)
8926 return NULL_TREE;
8927 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8928 src_align);
8929 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8930 }
8931 }
8932 else if (destvar == NULL_TREE)
8933 {
8934 STRIP_NOPS (dest);
8935 if (dest_align >= TYPE_ALIGN (srctype))
8936 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8937 else
8938 {
8939 if (STRICT_ALIGNMENT)
8940 return NULL_TREE;
8941 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8942 dest_align);
8943 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8944 }
8945 }
8946
8947 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8948 }
8949
8950 if (ignore)
8951 return expr;
8952
8953 if (endp == 0 || endp == 3)
8954 return omit_one_operand_loc (loc, type, dest, expr);
8955
8956 if (expr == len)
8957 expr = NULL_TREE;
8958
8959 if (endp == 2)
8960 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8961 ssize_int (1));
8962
8963 dest = fold_build_pointer_plus_loc (loc, dest, len);
8964 dest = fold_convert_loc (loc, type, dest);
8965 if (expr)
8966 dest = omit_one_operand_loc (loc, type, dest, expr);
8967 return dest;
8968 }
8969
8970 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8971 If LEN is not NULL, it represents the length of the string to be
8972 copied. Return NULL_TREE if no simplification can be made. */
8973
8974 tree
8975 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8976 {
8977 tree fn;
8978
8979 if (!validate_arg (dest, POINTER_TYPE)
8980 || !validate_arg (src, POINTER_TYPE))
8981 return NULL_TREE;
8982
8983 /* If SRC and DEST are the same (and not volatile), return DEST. */
8984 if (operand_equal_p (src, dest, 0))
8985 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8986
8987 if (optimize_function_for_size_p (cfun))
8988 return NULL_TREE;
8989
8990 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8991 if (!fn)
8992 return NULL_TREE;
8993
8994 if (!len)
8995 {
8996 len = c_strlen (src, 1);
8997 if (! len || TREE_SIDE_EFFECTS (len))
8998 return NULL_TREE;
8999 }
9000
9001 len = fold_convert_loc (loc, size_type_node, len);
9002 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9003 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9004 build_call_expr_loc (loc, fn, 3, dest, src, len));
9005 }
9006
9007 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9008 Return NULL_TREE if no simplification can be made. */
9009
9010 static tree
9011 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9012 {
9013 tree fn, len, lenp1, call, type;
9014
9015 if (!validate_arg (dest, POINTER_TYPE)
9016 || !validate_arg (src, POINTER_TYPE))
9017 return NULL_TREE;
9018
9019 len = c_strlen (src, 1);
9020 if (!len
9021 || TREE_CODE (len) != INTEGER_CST)
9022 return NULL_TREE;
9023
9024 if (optimize_function_for_size_p (cfun)
9025 /* If length is zero it's small enough. */
9026 && !integer_zerop (len))
9027 return NULL_TREE;
9028
9029 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9030 if (!fn)
9031 return NULL_TREE;
9032
9033 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9034 fold_convert_loc (loc, size_type_node, len),
9035 build_int_cst (size_type_node, 1));
9036 /* We use dest twice in building our expression. Save it from
9037 multiple expansions. */
9038 dest = builtin_save_expr (dest);
9039 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9040
9041 type = TREE_TYPE (TREE_TYPE (fndecl));
9042 dest = fold_build_pointer_plus_loc (loc, dest, len);
9043 dest = fold_convert_loc (loc, type, dest);
9044 dest = omit_one_operand_loc (loc, type, dest, call);
9045 return dest;
9046 }
9047
9048 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9049 If SLEN is not NULL, it represents the length of the source string.
9050 Return NULL_TREE if no simplification can be made. */
9051
9052 tree
9053 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9054 tree src, tree len, tree slen)
9055 {
9056 tree fn;
9057
9058 if (!validate_arg (dest, POINTER_TYPE)
9059 || !validate_arg (src, POINTER_TYPE)
9060 || !validate_arg (len, INTEGER_TYPE))
9061 return NULL_TREE;
9062
9063 /* If the LEN parameter is zero, return DEST. */
9064 if (integer_zerop (len))
9065 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9066
9067 /* We can't compare slen with len as constants below if len is not a
9068 constant. */
9069 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9070 return NULL_TREE;
9071
9072 if (!slen)
9073 slen = c_strlen (src, 1);
9074
9075 /* Now, we must be passed a constant src ptr parameter. */
9076 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9077 return NULL_TREE;
9078
9079 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9080
9081 /* We do not support simplification of this case, though we do
9082 support it when expanding trees into RTL. */
9083 /* FIXME: generate a call to __builtin_memset. */
9084 if (tree_int_cst_lt (slen, len))
9085 return NULL_TREE;
9086
9087 /* OK transform into builtin memcpy. */
9088 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9089 if (!fn)
9090 return NULL_TREE;
9091
9092 len = fold_convert_loc (loc, size_type_node, len);
9093 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9094 build_call_expr_loc (loc, fn, 3, dest, src, len));
9095 }
9096
9097 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9098 arguments to the call, and TYPE is its return type.
9099 Return NULL_TREE if no simplification can be made. */
9100
9101 static tree
9102 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9103 {
9104 if (!validate_arg (arg1, POINTER_TYPE)
9105 || !validate_arg (arg2, INTEGER_TYPE)
9106 || !validate_arg (len, INTEGER_TYPE))
9107 return NULL_TREE;
9108 else
9109 {
9110 const char *p1;
9111
9112 if (TREE_CODE (arg2) != INTEGER_CST
9113 || !tree_fits_uhwi_p (len))
9114 return NULL_TREE;
9115
9116 p1 = c_getstr (arg1);
9117 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9118 {
9119 char c;
9120 const char *r;
9121 tree tem;
9122
9123 if (target_char_cast (arg2, &c))
9124 return NULL_TREE;
9125
9126 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9127
9128 if (r == NULL)
9129 return build_int_cst (TREE_TYPE (arg1), 0);
9130
9131 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9132 return fold_convert_loc (loc, type, tem);
9133 }
9134 return NULL_TREE;
9135 }
9136 }
9137
9138 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9139 Return NULL_TREE if no simplification can be made. */
9140
9141 static tree
9142 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9143 {
9144 const char *p1, *p2;
9145
9146 if (!validate_arg (arg1, POINTER_TYPE)
9147 || !validate_arg (arg2, POINTER_TYPE)
9148 || !validate_arg (len, INTEGER_TYPE))
9149 return NULL_TREE;
9150
9151 /* If the LEN parameter is zero, return zero. */
9152 if (integer_zerop (len))
9153 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9154 arg1, arg2);
9155
9156 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9157 if (operand_equal_p (arg1, arg2, 0))
9158 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9159
9160 p1 = c_getstr (arg1);
9161 p2 = c_getstr (arg2);
9162
9163 /* If all arguments are constant, and the value of len is not greater
9164 than the lengths of arg1 and arg2, evaluate at compile-time. */
9165 if (tree_fits_uhwi_p (len) && p1 && p2
9166 && compare_tree_int (len, strlen (p1) + 1) <= 0
9167 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9168 {
9169 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9170
9171 if (r > 0)
9172 return integer_one_node;
9173 else if (r < 0)
9174 return integer_minus_one_node;
9175 else
9176 return integer_zero_node;
9177 }
9178
9179 /* If len parameter is one, return an expression corresponding to
9180 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9181 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9182 {
9183 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9184 tree cst_uchar_ptr_node
9185 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9186
9187 tree ind1
9188 = fold_convert_loc (loc, integer_type_node,
9189 build1 (INDIRECT_REF, cst_uchar_node,
9190 fold_convert_loc (loc,
9191 cst_uchar_ptr_node,
9192 arg1)));
9193 tree ind2
9194 = fold_convert_loc (loc, integer_type_node,
9195 build1 (INDIRECT_REF, cst_uchar_node,
9196 fold_convert_loc (loc,
9197 cst_uchar_ptr_node,
9198 arg2)));
9199 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9200 }
9201
9202 return NULL_TREE;
9203 }
9204
9205 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9206 Return NULL_TREE if no simplification can be made. */
9207
9208 static tree
9209 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9210 {
9211 const char *p1, *p2;
9212
9213 if (!validate_arg (arg1, POINTER_TYPE)
9214 || !validate_arg (arg2, POINTER_TYPE))
9215 return NULL_TREE;
9216
9217 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9218 if (operand_equal_p (arg1, arg2, 0))
9219 return integer_zero_node;
9220
9221 p1 = c_getstr (arg1);
9222 p2 = c_getstr (arg2);
9223
9224 if (p1 && p2)
9225 {
9226 const int i = strcmp (p1, p2);
9227 if (i < 0)
9228 return integer_minus_one_node;
9229 else if (i > 0)
9230 return integer_one_node;
9231 else
9232 return integer_zero_node;
9233 }
9234
9235 /* If the second arg is "", return *(const unsigned char*)arg1. */
9236 if (p2 && *p2 == '\0')
9237 {
9238 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9239 tree cst_uchar_ptr_node
9240 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9241
9242 return fold_convert_loc (loc, integer_type_node,
9243 build1 (INDIRECT_REF, cst_uchar_node,
9244 fold_convert_loc (loc,
9245 cst_uchar_ptr_node,
9246 arg1)));
9247 }
9248
9249 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9250 if (p1 && *p1 == '\0')
9251 {
9252 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9253 tree cst_uchar_ptr_node
9254 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9255
9256 tree temp
9257 = fold_convert_loc (loc, integer_type_node,
9258 build1 (INDIRECT_REF, cst_uchar_node,
9259 fold_convert_loc (loc,
9260 cst_uchar_ptr_node,
9261 arg2)));
9262 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9263 }
9264
9265 return NULL_TREE;
9266 }
9267
9268 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9269 Return NULL_TREE if no simplification can be made. */
9270
9271 static tree
9272 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9273 {
9274 const char *p1, *p2;
9275
9276 if (!validate_arg (arg1, POINTER_TYPE)
9277 || !validate_arg (arg2, POINTER_TYPE)
9278 || !validate_arg (len, INTEGER_TYPE))
9279 return NULL_TREE;
9280
9281 /* If the LEN parameter is zero, return zero. */
9282 if (integer_zerop (len))
9283 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9284 arg1, arg2);
9285
9286 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9287 if (operand_equal_p (arg1, arg2, 0))
9288 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9289
9290 p1 = c_getstr (arg1);
9291 p2 = c_getstr (arg2);
9292
9293 if (tree_fits_uhwi_p (len) && p1 && p2)
9294 {
9295 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9296 if (i > 0)
9297 return integer_one_node;
9298 else if (i < 0)
9299 return integer_minus_one_node;
9300 else
9301 return integer_zero_node;
9302 }
9303
9304 /* If the second arg is "", and the length is greater than zero,
9305 return *(const unsigned char*)arg1. */
9306 if (p2 && *p2 == '\0'
9307 && TREE_CODE (len) == INTEGER_CST
9308 && tree_int_cst_sgn (len) == 1)
9309 {
9310 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9311 tree cst_uchar_ptr_node
9312 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9313
9314 return fold_convert_loc (loc, integer_type_node,
9315 build1 (INDIRECT_REF, cst_uchar_node,
9316 fold_convert_loc (loc,
9317 cst_uchar_ptr_node,
9318 arg1)));
9319 }
9320
9321 /* If the first arg is "", and the length is greater than zero,
9322 return -*(const unsigned char*)arg2. */
9323 if (p1 && *p1 == '\0'
9324 && TREE_CODE (len) == INTEGER_CST
9325 && tree_int_cst_sgn (len) == 1)
9326 {
9327 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9328 tree cst_uchar_ptr_node
9329 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9330
9331 tree temp = fold_convert_loc (loc, integer_type_node,
9332 build1 (INDIRECT_REF, cst_uchar_node,
9333 fold_convert_loc (loc,
9334 cst_uchar_ptr_node,
9335 arg2)));
9336 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9337 }
9338
9339 /* If len parameter is one, return an expression corresponding to
9340 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9341 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9342 {
9343 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9344 tree cst_uchar_ptr_node
9345 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9346
9347 tree ind1 = fold_convert_loc (loc, integer_type_node,
9348 build1 (INDIRECT_REF, cst_uchar_node,
9349 fold_convert_loc (loc,
9350 cst_uchar_ptr_node,
9351 arg1)));
9352 tree ind2 = fold_convert_loc (loc, integer_type_node,
9353 build1 (INDIRECT_REF, cst_uchar_node,
9354 fold_convert_loc (loc,
9355 cst_uchar_ptr_node,
9356 arg2)));
9357 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9358 }
9359
9360 return NULL_TREE;
9361 }
9362
9363 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9364 ARG. Return NULL_TREE if no simplification can be made. */
9365
9366 static tree
9367 fold_builtin_signbit (location_t loc, tree arg, tree type)
9368 {
9369 if (!validate_arg (arg, REAL_TYPE))
9370 return NULL_TREE;
9371
9372 /* If ARG is a compile-time constant, determine the result. */
9373 if (TREE_CODE (arg) == REAL_CST
9374 && !TREE_OVERFLOW (arg))
9375 {
9376 REAL_VALUE_TYPE c;
9377
9378 c = TREE_REAL_CST (arg);
9379 return (REAL_VALUE_NEGATIVE (c)
9380 ? build_one_cst (type)
9381 : build_zero_cst (type));
9382 }
9383
9384 /* If ARG is non-negative, the result is always zero. */
9385 if (tree_expr_nonnegative_p (arg))
9386 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9387
9388 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9389 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9390 return fold_convert (type,
9391 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9392 build_real (TREE_TYPE (arg), dconst0)));
9393
9394 return NULL_TREE;
9395 }
9396
9397 /* Fold function call to builtin copysign, copysignf or copysignl with
9398 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9399 be made. */
9400
9401 static tree
9402 fold_builtin_copysign (location_t loc, tree fndecl,
9403 tree arg1, tree arg2, tree type)
9404 {
9405 tree tem;
9406
9407 if (!validate_arg (arg1, REAL_TYPE)
9408 || !validate_arg (arg2, REAL_TYPE))
9409 return NULL_TREE;
9410
9411 /* copysign(X,X) is X. */
9412 if (operand_equal_p (arg1, arg2, 0))
9413 return fold_convert_loc (loc, type, arg1);
9414
9415 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9416 if (TREE_CODE (arg1) == REAL_CST
9417 && TREE_CODE (arg2) == REAL_CST
9418 && !TREE_OVERFLOW (arg1)
9419 && !TREE_OVERFLOW (arg2))
9420 {
9421 REAL_VALUE_TYPE c1, c2;
9422
9423 c1 = TREE_REAL_CST (arg1);
9424 c2 = TREE_REAL_CST (arg2);
9425 /* c1.sign := c2.sign. */
9426 real_copysign (&c1, &c2);
9427 return build_real (type, c1);
9428 }
9429
9430 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9431 Remember to evaluate Y for side-effects. */
9432 if (tree_expr_nonnegative_p (arg2))
9433 return omit_one_operand_loc (loc, type,
9434 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9435 arg2);
9436
9437 /* Strip sign changing operations for the first argument. */
9438 tem = fold_strip_sign_ops (arg1);
9439 if (tem)
9440 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9441
9442 return NULL_TREE;
9443 }
9444
9445 /* Fold a call to builtin isascii with argument ARG. */
9446
9447 static tree
9448 fold_builtin_isascii (location_t loc, tree arg)
9449 {
9450 if (!validate_arg (arg, INTEGER_TYPE))
9451 return NULL_TREE;
9452 else
9453 {
9454 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9455 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9456 build_int_cst (integer_type_node,
9457 ~ (unsigned HOST_WIDE_INT) 0x7f));
9458 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9459 arg, integer_zero_node);
9460 }
9461 }
9462
9463 /* Fold a call to builtin toascii with argument ARG. */
9464
9465 static tree
9466 fold_builtin_toascii (location_t loc, tree arg)
9467 {
9468 if (!validate_arg (arg, INTEGER_TYPE))
9469 return NULL_TREE;
9470
9471 /* Transform toascii(c) -> (c & 0x7f). */
9472 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9473 build_int_cst (integer_type_node, 0x7f));
9474 }
9475
9476 /* Fold a call to builtin isdigit with argument ARG. */
9477
9478 static tree
9479 fold_builtin_isdigit (location_t loc, tree arg)
9480 {
9481 if (!validate_arg (arg, INTEGER_TYPE))
9482 return NULL_TREE;
9483 else
9484 {
9485 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9486 /* According to the C standard, isdigit is unaffected by locale.
9487 However, it definitely is affected by the target character set. */
9488 unsigned HOST_WIDE_INT target_digit0
9489 = lang_hooks.to_target_charset ('0');
9490
9491 if (target_digit0 == 0)
9492 return NULL_TREE;
9493
9494 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9495 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9496 build_int_cst (unsigned_type_node, target_digit0));
9497 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9498 build_int_cst (unsigned_type_node, 9));
9499 }
9500 }
9501
9502 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9503
9504 static tree
9505 fold_builtin_fabs (location_t loc, tree arg, tree type)
9506 {
9507 if (!validate_arg (arg, REAL_TYPE))
9508 return NULL_TREE;
9509
9510 arg = fold_convert_loc (loc, type, arg);
9511 if (TREE_CODE (arg) == REAL_CST)
9512 return fold_abs_const (arg, type);
9513 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9514 }
9515
9516 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9517
9518 static tree
9519 fold_builtin_abs (location_t loc, tree arg, tree type)
9520 {
9521 if (!validate_arg (arg, INTEGER_TYPE))
9522 return NULL_TREE;
9523
9524 arg = fold_convert_loc (loc, type, arg);
9525 if (TREE_CODE (arg) == INTEGER_CST)
9526 return fold_abs_const (arg, type);
9527 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9528 }
9529
9530 /* Fold a fma operation with arguments ARG[012]. */
9531
9532 tree
9533 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9534 tree type, tree arg0, tree arg1, tree arg2)
9535 {
9536 if (TREE_CODE (arg0) == REAL_CST
9537 && TREE_CODE (arg1) == REAL_CST
9538 && TREE_CODE (arg2) == REAL_CST)
9539 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9540
9541 return NULL_TREE;
9542 }
9543
9544 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9545
9546 static tree
9547 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9548 {
9549 if (validate_arg (arg0, REAL_TYPE)
9550 && validate_arg (arg1, REAL_TYPE)
9551 && validate_arg (arg2, REAL_TYPE))
9552 {
9553 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9554 if (tem)
9555 return tem;
9556
9557 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9558 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9559 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9560 }
9561 return NULL_TREE;
9562 }
9563
9564 /* Fold a call to builtin fmin or fmax. */
9565
9566 static tree
9567 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9568 tree type, bool max)
9569 {
9570 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9571 {
9572 /* Calculate the result when the argument is a constant. */
9573 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9574
9575 if (res)
9576 return res;
9577
9578 /* If either argument is NaN, return the other one. Avoid the
9579 transformation if we get (and honor) a signalling NaN. Using
9580 omit_one_operand() ensures we create a non-lvalue. */
9581 if (TREE_CODE (arg0) == REAL_CST
9582 && real_isnan (&TREE_REAL_CST (arg0))
9583 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9584 || ! TREE_REAL_CST (arg0).signalling))
9585 return omit_one_operand_loc (loc, type, arg1, arg0);
9586 if (TREE_CODE (arg1) == REAL_CST
9587 && real_isnan (&TREE_REAL_CST (arg1))
9588 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9589 || ! TREE_REAL_CST (arg1).signalling))
9590 return omit_one_operand_loc (loc, type, arg0, arg1);
9591
9592 /* Transform fmin/fmax(x,x) -> x. */
9593 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9594 return omit_one_operand_loc (loc, type, arg0, arg1);
9595
9596 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9597 functions to return the numeric arg if the other one is NaN.
9598 These tree codes don't honor that, so only transform if
9599 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9600 handled, so we don't have to worry about it either. */
9601 if (flag_finite_math_only)
9602 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9603 fold_convert_loc (loc, type, arg0),
9604 fold_convert_loc (loc, type, arg1));
9605 }
9606 return NULL_TREE;
9607 }
9608
9609 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9610
9611 static tree
9612 fold_builtin_carg (location_t loc, tree arg, tree type)
9613 {
9614 if (validate_arg (arg, COMPLEX_TYPE)
9615 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9616 {
9617 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9618
9619 if (atan2_fn)
9620 {
9621 tree new_arg = builtin_save_expr (arg);
9622 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9623 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9624 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9625 }
9626 }
9627
9628 return NULL_TREE;
9629 }
9630
9631 /* Fold a call to builtin logb/ilogb. */
9632
9633 static tree
9634 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9635 {
9636 if (! validate_arg (arg, REAL_TYPE))
9637 return NULL_TREE;
9638
9639 STRIP_NOPS (arg);
9640
9641 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9642 {
9643 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9644
9645 switch (value->cl)
9646 {
9647 case rvc_nan:
9648 case rvc_inf:
9649 /* If arg is Inf or NaN and we're logb, return it. */
9650 if (TREE_CODE (rettype) == REAL_TYPE)
9651 {
9652 /* For logb(-Inf) we have to return +Inf. */
9653 if (real_isinf (value) && real_isneg (value))
9654 {
9655 REAL_VALUE_TYPE tem;
9656 real_inf (&tem);
9657 return build_real (rettype, tem);
9658 }
9659 return fold_convert_loc (loc, rettype, arg);
9660 }
9661 /* Fall through... */
9662 case rvc_zero:
9663 /* Zero may set errno and/or raise an exception for logb, also
9664 for ilogb we don't know FP_ILOGB0. */
9665 return NULL_TREE;
9666 case rvc_normal:
9667 /* For normal numbers, proceed iff radix == 2. In GCC,
9668 normalized significands are in the range [0.5, 1.0). We
9669 want the exponent as if they were [1.0, 2.0) so get the
9670 exponent and subtract 1. */
9671 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9672 return fold_convert_loc (loc, rettype,
9673 build_int_cst (integer_type_node,
9674 REAL_EXP (value)-1));
9675 break;
9676 }
9677 }
9678
9679 return NULL_TREE;
9680 }
9681
9682 /* Fold a call to builtin significand, if radix == 2. */
9683
9684 static tree
9685 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9686 {
9687 if (! validate_arg (arg, REAL_TYPE))
9688 return NULL_TREE;
9689
9690 STRIP_NOPS (arg);
9691
9692 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9693 {
9694 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9695
9696 switch (value->cl)
9697 {
9698 case rvc_zero:
9699 case rvc_nan:
9700 case rvc_inf:
9701 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9702 return fold_convert_loc (loc, rettype, arg);
9703 case rvc_normal:
9704 /* For normal numbers, proceed iff radix == 2. */
9705 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9706 {
9707 REAL_VALUE_TYPE result = *value;
9708 /* In GCC, normalized significands are in the range [0.5,
9709 1.0). We want them to be [1.0, 2.0) so set the
9710 exponent to 1. */
9711 SET_REAL_EXP (&result, 1);
9712 return build_real (rettype, result);
9713 }
9714 break;
9715 }
9716 }
9717
9718 return NULL_TREE;
9719 }
9720
9721 /* Fold a call to builtin frexp, we can assume the base is 2. */
9722
9723 static tree
9724 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9725 {
9726 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9727 return NULL_TREE;
9728
9729 STRIP_NOPS (arg0);
9730
9731 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9732 return NULL_TREE;
9733
9734 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9735
9736 /* Proceed if a valid pointer type was passed in. */
9737 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9738 {
9739 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9740 tree frac, exp;
9741
9742 switch (value->cl)
9743 {
9744 case rvc_zero:
9745 /* For +-0, return (*exp = 0, +-0). */
9746 exp = integer_zero_node;
9747 frac = arg0;
9748 break;
9749 case rvc_nan:
9750 case rvc_inf:
9751 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9752 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9753 case rvc_normal:
9754 {
9755 /* Since the frexp function always expects base 2, and in
9756 GCC normalized significands are already in the range
9757 [0.5, 1.0), we have exactly what frexp wants. */
9758 REAL_VALUE_TYPE frac_rvt = *value;
9759 SET_REAL_EXP (&frac_rvt, 0);
9760 frac = build_real (rettype, frac_rvt);
9761 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9762 }
9763 break;
9764 default:
9765 gcc_unreachable ();
9766 }
9767
9768 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9769 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9770 TREE_SIDE_EFFECTS (arg1) = 1;
9771 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9772 }
9773
9774 return NULL_TREE;
9775 }
9776
9777 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9778 then we can assume the base is two. If it's false, then we have to
9779 check the mode of the TYPE parameter in certain cases. */
9780
9781 static tree
9782 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9783 tree type, bool ldexp)
9784 {
9785 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9786 {
9787 STRIP_NOPS (arg0);
9788 STRIP_NOPS (arg1);
9789
9790 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9791 if (real_zerop (arg0) || integer_zerop (arg1)
9792 || (TREE_CODE (arg0) == REAL_CST
9793 && !real_isfinite (&TREE_REAL_CST (arg0))))
9794 return omit_one_operand_loc (loc, type, arg0, arg1);
9795
9796 /* If both arguments are constant, then try to evaluate it. */
9797 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9798 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9799 && tree_fits_shwi_p (arg1))
9800 {
9801 /* Bound the maximum adjustment to twice the range of the
9802 mode's valid exponents. Use abs to ensure the range is
9803 positive as a sanity check. */
9804 const long max_exp_adj = 2 *
9805 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9806 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9807
9808 /* Get the user-requested adjustment. */
9809 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9810
9811 /* The requested adjustment must be inside this range. This
9812 is a preliminary cap to avoid things like overflow, we
9813 may still fail to compute the result for other reasons. */
9814 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9815 {
9816 REAL_VALUE_TYPE initial_result;
9817
9818 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9819
9820 /* Ensure we didn't overflow. */
9821 if (! real_isinf (&initial_result))
9822 {
9823 const REAL_VALUE_TYPE trunc_result
9824 = real_value_truncate (TYPE_MODE (type), initial_result);
9825
9826 /* Only proceed if the target mode can hold the
9827 resulting value. */
9828 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9829 return build_real (type, trunc_result);
9830 }
9831 }
9832 }
9833 }
9834
9835 return NULL_TREE;
9836 }
9837
9838 /* Fold a call to builtin modf. */
9839
9840 static tree
9841 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9842 {
9843 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9844 return NULL_TREE;
9845
9846 STRIP_NOPS (arg0);
9847
9848 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9849 return NULL_TREE;
9850
9851 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9852
9853 /* Proceed if a valid pointer type was passed in. */
9854 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9855 {
9856 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9857 REAL_VALUE_TYPE trunc, frac;
9858
9859 switch (value->cl)
9860 {
9861 case rvc_nan:
9862 case rvc_zero:
9863 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9864 trunc = frac = *value;
9865 break;
9866 case rvc_inf:
9867 /* For +-Inf, return (*arg1 = arg0, +-0). */
9868 frac = dconst0;
9869 frac.sign = value->sign;
9870 trunc = *value;
9871 break;
9872 case rvc_normal:
9873 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9874 real_trunc (&trunc, VOIDmode, value);
9875 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9876 /* If the original number was negative and already
9877 integral, then the fractional part is -0.0. */
9878 if (value->sign && frac.cl == rvc_zero)
9879 frac.sign = value->sign;
9880 break;
9881 }
9882
9883 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9884 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9885 build_real (rettype, trunc));
9886 TREE_SIDE_EFFECTS (arg1) = 1;
9887 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9888 build_real (rettype, frac));
9889 }
9890
9891 return NULL_TREE;
9892 }
9893
9894 /* Given a location LOC, an interclass builtin function decl FNDECL
9895 and its single argument ARG, return an folded expression computing
9896 the same, or NULL_TREE if we either couldn't or didn't want to fold
9897 (the latter happen if there's an RTL instruction available). */
9898
9899 static tree
9900 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9901 {
9902 enum machine_mode mode;
9903
9904 if (!validate_arg (arg, REAL_TYPE))
9905 return NULL_TREE;
9906
9907 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9908 return NULL_TREE;
9909
9910 mode = TYPE_MODE (TREE_TYPE (arg));
9911
9912 /* If there is no optab, try generic code. */
9913 switch (DECL_FUNCTION_CODE (fndecl))
9914 {
9915 tree result;
9916
9917 CASE_FLT_FN (BUILT_IN_ISINF):
9918 {
9919 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9920 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9921 tree const type = TREE_TYPE (arg);
9922 REAL_VALUE_TYPE r;
9923 char buf[128];
9924
9925 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9926 real_from_string (&r, buf);
9927 result = build_call_expr (isgr_fn, 2,
9928 fold_build1_loc (loc, ABS_EXPR, type, arg),
9929 build_real (type, r));
9930 return result;
9931 }
9932 CASE_FLT_FN (BUILT_IN_FINITE):
9933 case BUILT_IN_ISFINITE:
9934 {
9935 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9936 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9937 tree const type = TREE_TYPE (arg);
9938 REAL_VALUE_TYPE r;
9939 char buf[128];
9940
9941 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9942 real_from_string (&r, buf);
9943 result = build_call_expr (isle_fn, 2,
9944 fold_build1_loc (loc, ABS_EXPR, type, arg),
9945 build_real (type, r));
9946 /*result = fold_build2_loc (loc, UNGT_EXPR,
9947 TREE_TYPE (TREE_TYPE (fndecl)),
9948 fold_build1_loc (loc, ABS_EXPR, type, arg),
9949 build_real (type, r));
9950 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9951 TREE_TYPE (TREE_TYPE (fndecl)),
9952 result);*/
9953 return result;
9954 }
9955 case BUILT_IN_ISNORMAL:
9956 {
9957 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9958 islessequal(fabs(x),DBL_MAX). */
9959 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9960 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9961 tree const type = TREE_TYPE (arg);
9962 REAL_VALUE_TYPE rmax, rmin;
9963 char buf[128];
9964
9965 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9966 real_from_string (&rmax, buf);
9967 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9968 real_from_string (&rmin, buf);
9969 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9970 result = build_call_expr (isle_fn, 2, arg,
9971 build_real (type, rmax));
9972 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9973 build_call_expr (isge_fn, 2, arg,
9974 build_real (type, rmin)));
9975 return result;
9976 }
9977 default:
9978 break;
9979 }
9980
9981 return NULL_TREE;
9982 }
9983
9984 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9985 ARG is the argument for the call. */
9986
9987 static tree
9988 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9989 {
9990 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9991 REAL_VALUE_TYPE r;
9992
9993 if (!validate_arg (arg, REAL_TYPE))
9994 return NULL_TREE;
9995
9996 switch (builtin_index)
9997 {
9998 case BUILT_IN_ISINF:
9999 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10000 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10001
10002 if (TREE_CODE (arg) == REAL_CST)
10003 {
10004 r = TREE_REAL_CST (arg);
10005 if (real_isinf (&r))
10006 return real_compare (GT_EXPR, &r, &dconst0)
10007 ? integer_one_node : integer_minus_one_node;
10008 else
10009 return integer_zero_node;
10010 }
10011
10012 return NULL_TREE;
10013
10014 case BUILT_IN_ISINF_SIGN:
10015 {
10016 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10017 /* In a boolean context, GCC will fold the inner COND_EXPR to
10018 1. So e.g. "if (isinf_sign(x))" would be folded to just
10019 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10020 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10021 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10022 tree tmp = NULL_TREE;
10023
10024 arg = builtin_save_expr (arg);
10025
10026 if (signbit_fn && isinf_fn)
10027 {
10028 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10029 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10030
10031 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10032 signbit_call, integer_zero_node);
10033 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10034 isinf_call, integer_zero_node);
10035
10036 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10037 integer_minus_one_node, integer_one_node);
10038 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10039 isinf_call, tmp,
10040 integer_zero_node);
10041 }
10042
10043 return tmp;
10044 }
10045
10046 case BUILT_IN_ISFINITE:
10047 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10048 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10049 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10050
10051 if (TREE_CODE (arg) == REAL_CST)
10052 {
10053 r = TREE_REAL_CST (arg);
10054 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10055 }
10056
10057 return NULL_TREE;
10058
10059 case BUILT_IN_ISNAN:
10060 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10061 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10062
10063 if (TREE_CODE (arg) == REAL_CST)
10064 {
10065 r = TREE_REAL_CST (arg);
10066 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10067 }
10068
10069 arg = builtin_save_expr (arg);
10070 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10071
10072 default:
10073 gcc_unreachable ();
10074 }
10075 }
10076
10077 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10078 This builtin will generate code to return the appropriate floating
10079 point classification depending on the value of the floating point
10080 number passed in. The possible return values must be supplied as
10081 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10082 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10083 one floating point argument which is "type generic". */
10084
10085 static tree
10086 fold_builtin_fpclassify (location_t loc, tree exp)
10087 {
10088 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10089 arg, type, res, tmp;
10090 enum machine_mode mode;
10091 REAL_VALUE_TYPE r;
10092 char buf[128];
10093
10094 /* Verify the required arguments in the original call. */
10095 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10096 INTEGER_TYPE, INTEGER_TYPE,
10097 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10098 return NULL_TREE;
10099
10100 fp_nan = CALL_EXPR_ARG (exp, 0);
10101 fp_infinite = CALL_EXPR_ARG (exp, 1);
10102 fp_normal = CALL_EXPR_ARG (exp, 2);
10103 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10104 fp_zero = CALL_EXPR_ARG (exp, 4);
10105 arg = CALL_EXPR_ARG (exp, 5);
10106 type = TREE_TYPE (arg);
10107 mode = TYPE_MODE (type);
10108 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10109
10110 /* fpclassify(x) ->
10111 isnan(x) ? FP_NAN :
10112 (fabs(x) == Inf ? FP_INFINITE :
10113 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10114 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10115
10116 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10117 build_real (type, dconst0));
10118 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10119 tmp, fp_zero, fp_subnormal);
10120
10121 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10122 real_from_string (&r, buf);
10123 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10124 arg, build_real (type, r));
10125 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10126
10127 if (HONOR_INFINITIES (mode))
10128 {
10129 real_inf (&r);
10130 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10131 build_real (type, r));
10132 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10133 fp_infinite, res);
10134 }
10135
10136 if (HONOR_NANS (mode))
10137 {
10138 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10139 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10140 }
10141
10142 return res;
10143 }
10144
10145 /* Fold a call to an unordered comparison function such as
10146 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10147 being called and ARG0 and ARG1 are the arguments for the call.
10148 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10149 the opposite of the desired result. UNORDERED_CODE is used
10150 for modes that can hold NaNs and ORDERED_CODE is used for
10151 the rest. */
10152
10153 static tree
10154 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10155 enum tree_code unordered_code,
10156 enum tree_code ordered_code)
10157 {
10158 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10159 enum tree_code code;
10160 tree type0, type1;
10161 enum tree_code code0, code1;
10162 tree cmp_type = NULL_TREE;
10163
10164 type0 = TREE_TYPE (arg0);
10165 type1 = TREE_TYPE (arg1);
10166
10167 code0 = TREE_CODE (type0);
10168 code1 = TREE_CODE (type1);
10169
10170 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10171 /* Choose the wider of two real types. */
10172 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10173 ? type0 : type1;
10174 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10175 cmp_type = type0;
10176 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10177 cmp_type = type1;
10178
10179 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10180 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10181
10182 if (unordered_code == UNORDERED_EXPR)
10183 {
10184 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10185 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10186 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10187 }
10188
10189 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10190 : ordered_code;
10191 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10192 fold_build2_loc (loc, code, type, arg0, arg1));
10193 }
10194
10195 /* Fold a call to built-in function FNDECL with 0 arguments.
10196 IGNORE is true if the result of the function call is ignored. This
10197 function returns NULL_TREE if no simplification was possible. */
10198
10199 static tree
10200 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10201 {
10202 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10203 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10204 switch (fcode)
10205 {
10206 CASE_FLT_FN (BUILT_IN_INF):
10207 case BUILT_IN_INFD32:
10208 case BUILT_IN_INFD64:
10209 case BUILT_IN_INFD128:
10210 return fold_builtin_inf (loc, type, true);
10211
10212 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10213 return fold_builtin_inf (loc, type, false);
10214
10215 case BUILT_IN_CLASSIFY_TYPE:
10216 return fold_builtin_classify_type (NULL_TREE);
10217
10218 case BUILT_IN_UNREACHABLE:
10219 if (flag_sanitize & SANITIZE_UNREACHABLE
10220 && (current_function_decl == NULL
10221 || !lookup_attribute ("no_sanitize_undefined",
10222 DECL_ATTRIBUTES (current_function_decl))))
10223 return ubsan_instrument_unreachable (loc);
10224 break;
10225
10226 default:
10227 break;
10228 }
10229 return NULL_TREE;
10230 }
10231
10232 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10233 IGNORE is true if the result of the function call is ignored. This
10234 function returns NULL_TREE if no simplification was possible. */
10235
10236 static tree
10237 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10238 {
10239 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10240 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10241 switch (fcode)
10242 {
10243 case BUILT_IN_CONSTANT_P:
10244 {
10245 tree val = fold_builtin_constant_p (arg0);
10246
10247 /* Gimplification will pull the CALL_EXPR for the builtin out of
10248 an if condition. When not optimizing, we'll not CSE it back.
10249 To avoid link error types of regressions, return false now. */
10250 if (!val && !optimize)
10251 val = integer_zero_node;
10252
10253 return val;
10254 }
10255
10256 case BUILT_IN_CLASSIFY_TYPE:
10257 return fold_builtin_classify_type (arg0);
10258
10259 case BUILT_IN_STRLEN:
10260 return fold_builtin_strlen (loc, type, arg0);
10261
10262 CASE_FLT_FN (BUILT_IN_FABS):
10263 case BUILT_IN_FABSD32:
10264 case BUILT_IN_FABSD64:
10265 case BUILT_IN_FABSD128:
10266 return fold_builtin_fabs (loc, arg0, type);
10267
10268 case BUILT_IN_ABS:
10269 case BUILT_IN_LABS:
10270 case BUILT_IN_LLABS:
10271 case BUILT_IN_IMAXABS:
10272 return fold_builtin_abs (loc, arg0, type);
10273
10274 CASE_FLT_FN (BUILT_IN_CONJ):
10275 if (validate_arg (arg0, COMPLEX_TYPE)
10276 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10277 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10278 break;
10279
10280 CASE_FLT_FN (BUILT_IN_CREAL):
10281 if (validate_arg (arg0, COMPLEX_TYPE)
10282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10283 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10284 break;
10285
10286 CASE_FLT_FN (BUILT_IN_CIMAG):
10287 if (validate_arg (arg0, COMPLEX_TYPE)
10288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10289 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10290 break;
10291
10292 CASE_FLT_FN (BUILT_IN_CCOS):
10293 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10294
10295 CASE_FLT_FN (BUILT_IN_CCOSH):
10296 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10297
10298 CASE_FLT_FN (BUILT_IN_CPROJ):
10299 return fold_builtin_cproj (loc, arg0, type);
10300
10301 CASE_FLT_FN (BUILT_IN_CSIN):
10302 if (validate_arg (arg0, COMPLEX_TYPE)
10303 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10304 return do_mpc_arg1 (arg0, type, mpc_sin);
10305 break;
10306
10307 CASE_FLT_FN (BUILT_IN_CSINH):
10308 if (validate_arg (arg0, COMPLEX_TYPE)
10309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10310 return do_mpc_arg1 (arg0, type, mpc_sinh);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_CTAN):
10314 if (validate_arg (arg0, COMPLEX_TYPE)
10315 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10316 return do_mpc_arg1 (arg0, type, mpc_tan);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_CTANH):
10320 if (validate_arg (arg0, COMPLEX_TYPE)
10321 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10322 return do_mpc_arg1 (arg0, type, mpc_tanh);
10323 break;
10324
10325 CASE_FLT_FN (BUILT_IN_CLOG):
10326 if (validate_arg (arg0, COMPLEX_TYPE)
10327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10328 return do_mpc_arg1 (arg0, type, mpc_log);
10329 break;
10330
10331 CASE_FLT_FN (BUILT_IN_CSQRT):
10332 if (validate_arg (arg0, COMPLEX_TYPE)
10333 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10334 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10335 break;
10336
10337 CASE_FLT_FN (BUILT_IN_CASIN):
10338 if (validate_arg (arg0, COMPLEX_TYPE)
10339 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10340 return do_mpc_arg1 (arg0, type, mpc_asin);
10341 break;
10342
10343 CASE_FLT_FN (BUILT_IN_CACOS):
10344 if (validate_arg (arg0, COMPLEX_TYPE)
10345 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10346 return do_mpc_arg1 (arg0, type, mpc_acos);
10347 break;
10348
10349 CASE_FLT_FN (BUILT_IN_CATAN):
10350 if (validate_arg (arg0, COMPLEX_TYPE)
10351 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10352 return do_mpc_arg1 (arg0, type, mpc_atan);
10353 break;
10354
10355 CASE_FLT_FN (BUILT_IN_CASINH):
10356 if (validate_arg (arg0, COMPLEX_TYPE)
10357 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10358 return do_mpc_arg1 (arg0, type, mpc_asinh);
10359 break;
10360
10361 CASE_FLT_FN (BUILT_IN_CACOSH):
10362 if (validate_arg (arg0, COMPLEX_TYPE)
10363 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10364 return do_mpc_arg1 (arg0, type, mpc_acosh);
10365 break;
10366
10367 CASE_FLT_FN (BUILT_IN_CATANH):
10368 if (validate_arg (arg0, COMPLEX_TYPE)
10369 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10370 return do_mpc_arg1 (arg0, type, mpc_atanh);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_CABS):
10374 return fold_builtin_cabs (loc, arg0, type, fndecl);
10375
10376 CASE_FLT_FN (BUILT_IN_CARG):
10377 return fold_builtin_carg (loc, arg0, type);
10378
10379 CASE_FLT_FN (BUILT_IN_SQRT):
10380 return fold_builtin_sqrt (loc, arg0, type);
10381
10382 CASE_FLT_FN (BUILT_IN_CBRT):
10383 return fold_builtin_cbrt (loc, arg0, type);
10384
10385 CASE_FLT_FN (BUILT_IN_ASIN):
10386 if (validate_arg (arg0, REAL_TYPE))
10387 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10388 &dconstm1, &dconst1, true);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_ACOS):
10392 if (validate_arg (arg0, REAL_TYPE))
10393 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10394 &dconstm1, &dconst1, true);
10395 break;
10396
10397 CASE_FLT_FN (BUILT_IN_ATAN):
10398 if (validate_arg (arg0, REAL_TYPE))
10399 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_ASINH):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10405 break;
10406
10407 CASE_FLT_FN (BUILT_IN_ACOSH):
10408 if (validate_arg (arg0, REAL_TYPE))
10409 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10410 &dconst1, NULL, true);
10411 break;
10412
10413 CASE_FLT_FN (BUILT_IN_ATANH):
10414 if (validate_arg (arg0, REAL_TYPE))
10415 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10416 &dconstm1, &dconst1, false);
10417 break;
10418
10419 CASE_FLT_FN (BUILT_IN_SIN):
10420 if (validate_arg (arg0, REAL_TYPE))
10421 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10422 break;
10423
10424 CASE_FLT_FN (BUILT_IN_COS):
10425 return fold_builtin_cos (loc, arg0, type, fndecl);
10426
10427 CASE_FLT_FN (BUILT_IN_TAN):
10428 return fold_builtin_tan (arg0, type);
10429
10430 CASE_FLT_FN (BUILT_IN_CEXP):
10431 return fold_builtin_cexp (loc, arg0, type);
10432
10433 CASE_FLT_FN (BUILT_IN_CEXPI):
10434 if (validate_arg (arg0, REAL_TYPE))
10435 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10436 break;
10437
10438 CASE_FLT_FN (BUILT_IN_SINH):
10439 if (validate_arg (arg0, REAL_TYPE))
10440 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10441 break;
10442
10443 CASE_FLT_FN (BUILT_IN_COSH):
10444 return fold_builtin_cosh (loc, arg0, type, fndecl);
10445
10446 CASE_FLT_FN (BUILT_IN_TANH):
10447 if (validate_arg (arg0, REAL_TYPE))
10448 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10449 break;
10450
10451 CASE_FLT_FN (BUILT_IN_ERF):
10452 if (validate_arg (arg0, REAL_TYPE))
10453 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10454 break;
10455
10456 CASE_FLT_FN (BUILT_IN_ERFC):
10457 if (validate_arg (arg0, REAL_TYPE))
10458 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10459 break;
10460
10461 CASE_FLT_FN (BUILT_IN_TGAMMA):
10462 if (validate_arg (arg0, REAL_TYPE))
10463 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10464 break;
10465
10466 CASE_FLT_FN (BUILT_IN_EXP):
10467 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10468
10469 CASE_FLT_FN (BUILT_IN_EXP2):
10470 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10471
10472 CASE_FLT_FN (BUILT_IN_EXP10):
10473 CASE_FLT_FN (BUILT_IN_POW10):
10474 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10475
10476 CASE_FLT_FN (BUILT_IN_EXPM1):
10477 if (validate_arg (arg0, REAL_TYPE))
10478 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10479 break;
10480
10481 CASE_FLT_FN (BUILT_IN_LOG):
10482 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10483
10484 CASE_FLT_FN (BUILT_IN_LOG2):
10485 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10486
10487 CASE_FLT_FN (BUILT_IN_LOG10):
10488 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10489
10490 CASE_FLT_FN (BUILT_IN_LOG1P):
10491 if (validate_arg (arg0, REAL_TYPE))
10492 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10493 &dconstm1, NULL, false);
10494 break;
10495
10496 CASE_FLT_FN (BUILT_IN_J0):
10497 if (validate_arg (arg0, REAL_TYPE))
10498 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10499 NULL, NULL, 0);
10500 break;
10501
10502 CASE_FLT_FN (BUILT_IN_J1):
10503 if (validate_arg (arg0, REAL_TYPE))
10504 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10505 NULL, NULL, 0);
10506 break;
10507
10508 CASE_FLT_FN (BUILT_IN_Y0):
10509 if (validate_arg (arg0, REAL_TYPE))
10510 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10511 &dconst0, NULL, false);
10512 break;
10513
10514 CASE_FLT_FN (BUILT_IN_Y1):
10515 if (validate_arg (arg0, REAL_TYPE))
10516 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10517 &dconst0, NULL, false);
10518 break;
10519
10520 CASE_FLT_FN (BUILT_IN_NAN):
10521 case BUILT_IN_NAND32:
10522 case BUILT_IN_NAND64:
10523 case BUILT_IN_NAND128:
10524 return fold_builtin_nan (arg0, type, true);
10525
10526 CASE_FLT_FN (BUILT_IN_NANS):
10527 return fold_builtin_nan (arg0, type, false);
10528
10529 CASE_FLT_FN (BUILT_IN_FLOOR):
10530 return fold_builtin_floor (loc, fndecl, arg0);
10531
10532 CASE_FLT_FN (BUILT_IN_CEIL):
10533 return fold_builtin_ceil (loc, fndecl, arg0);
10534
10535 CASE_FLT_FN (BUILT_IN_TRUNC):
10536 return fold_builtin_trunc (loc, fndecl, arg0);
10537
10538 CASE_FLT_FN (BUILT_IN_ROUND):
10539 return fold_builtin_round (loc, fndecl, arg0);
10540
10541 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10542 CASE_FLT_FN (BUILT_IN_RINT):
10543 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10544
10545 CASE_FLT_FN (BUILT_IN_ICEIL):
10546 CASE_FLT_FN (BUILT_IN_LCEIL):
10547 CASE_FLT_FN (BUILT_IN_LLCEIL):
10548 CASE_FLT_FN (BUILT_IN_LFLOOR):
10549 CASE_FLT_FN (BUILT_IN_IFLOOR):
10550 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10551 CASE_FLT_FN (BUILT_IN_IROUND):
10552 CASE_FLT_FN (BUILT_IN_LROUND):
10553 CASE_FLT_FN (BUILT_IN_LLROUND):
10554 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10555
10556 CASE_FLT_FN (BUILT_IN_IRINT):
10557 CASE_FLT_FN (BUILT_IN_LRINT):
10558 CASE_FLT_FN (BUILT_IN_LLRINT):
10559 return fold_fixed_mathfn (loc, fndecl, arg0);
10560
10561 case BUILT_IN_BSWAP16:
10562 case BUILT_IN_BSWAP32:
10563 case BUILT_IN_BSWAP64:
10564 return fold_builtin_bswap (fndecl, arg0);
10565
10566 CASE_INT_FN (BUILT_IN_FFS):
10567 CASE_INT_FN (BUILT_IN_CLZ):
10568 CASE_INT_FN (BUILT_IN_CTZ):
10569 CASE_INT_FN (BUILT_IN_CLRSB):
10570 CASE_INT_FN (BUILT_IN_POPCOUNT):
10571 CASE_INT_FN (BUILT_IN_PARITY):
10572 return fold_builtin_bitop (fndecl, arg0);
10573
10574 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10575 return fold_builtin_signbit (loc, arg0, type);
10576
10577 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10578 return fold_builtin_significand (loc, arg0, type);
10579
10580 CASE_FLT_FN (BUILT_IN_ILOGB):
10581 CASE_FLT_FN (BUILT_IN_LOGB):
10582 return fold_builtin_logb (loc, arg0, type);
10583
10584 case BUILT_IN_ISASCII:
10585 return fold_builtin_isascii (loc, arg0);
10586
10587 case BUILT_IN_TOASCII:
10588 return fold_builtin_toascii (loc, arg0);
10589
10590 case BUILT_IN_ISDIGIT:
10591 return fold_builtin_isdigit (loc, arg0);
10592
10593 CASE_FLT_FN (BUILT_IN_FINITE):
10594 case BUILT_IN_FINITED32:
10595 case BUILT_IN_FINITED64:
10596 case BUILT_IN_FINITED128:
10597 case BUILT_IN_ISFINITE:
10598 {
10599 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10600 if (ret)
10601 return ret;
10602 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10603 }
10604
10605 CASE_FLT_FN (BUILT_IN_ISINF):
10606 case BUILT_IN_ISINFD32:
10607 case BUILT_IN_ISINFD64:
10608 case BUILT_IN_ISINFD128:
10609 {
10610 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10611 if (ret)
10612 return ret;
10613 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10614 }
10615
10616 case BUILT_IN_ISNORMAL:
10617 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10618
10619 case BUILT_IN_ISINF_SIGN:
10620 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10621
10622 CASE_FLT_FN (BUILT_IN_ISNAN):
10623 case BUILT_IN_ISNAND32:
10624 case BUILT_IN_ISNAND64:
10625 case BUILT_IN_ISNAND128:
10626 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10627
10628 case BUILT_IN_PRINTF:
10629 case BUILT_IN_PRINTF_UNLOCKED:
10630 case BUILT_IN_VPRINTF:
10631 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10632
10633 case BUILT_IN_FREE:
10634 if (integer_zerop (arg0))
10635 return build_empty_stmt (loc);
10636 break;
10637
10638 default:
10639 break;
10640 }
10641
10642 return NULL_TREE;
10643
10644 }
10645
10646 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10647 IGNORE is true if the result of the function call is ignored. This
10648 function returns NULL_TREE if no simplification was possible. */
10649
10650 static tree
10651 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10652 {
10653 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10654 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10655
10656 switch (fcode)
10657 {
10658 CASE_FLT_FN (BUILT_IN_JN):
10659 if (validate_arg (arg0, INTEGER_TYPE)
10660 && validate_arg (arg1, REAL_TYPE))
10661 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10662 break;
10663
10664 CASE_FLT_FN (BUILT_IN_YN):
10665 if (validate_arg (arg0, INTEGER_TYPE)
10666 && validate_arg (arg1, REAL_TYPE))
10667 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10668 &dconst0, false);
10669 break;
10670
10671 CASE_FLT_FN (BUILT_IN_DREM):
10672 CASE_FLT_FN (BUILT_IN_REMAINDER):
10673 if (validate_arg (arg0, REAL_TYPE)
10674 && validate_arg (arg1, REAL_TYPE))
10675 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10676 break;
10677
10678 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10679 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10680 if (validate_arg (arg0, REAL_TYPE)
10681 && validate_arg (arg1, POINTER_TYPE))
10682 return do_mpfr_lgamma_r (arg0, arg1, type);
10683 break;
10684
10685 CASE_FLT_FN (BUILT_IN_ATAN2):
10686 if (validate_arg (arg0, REAL_TYPE)
10687 && validate_arg (arg1, REAL_TYPE))
10688 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10689 break;
10690
10691 CASE_FLT_FN (BUILT_IN_FDIM):
10692 if (validate_arg (arg0, REAL_TYPE)
10693 && validate_arg (arg1, REAL_TYPE))
10694 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10695 break;
10696
10697 CASE_FLT_FN (BUILT_IN_HYPOT):
10698 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10699
10700 CASE_FLT_FN (BUILT_IN_CPOW):
10701 if (validate_arg (arg0, COMPLEX_TYPE)
10702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10703 && validate_arg (arg1, COMPLEX_TYPE)
10704 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10705 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10706 break;
10707
10708 CASE_FLT_FN (BUILT_IN_LDEXP):
10709 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10710 CASE_FLT_FN (BUILT_IN_SCALBN):
10711 CASE_FLT_FN (BUILT_IN_SCALBLN):
10712 return fold_builtin_load_exponent (loc, arg0, arg1,
10713 type, /*ldexp=*/false);
10714
10715 CASE_FLT_FN (BUILT_IN_FREXP):
10716 return fold_builtin_frexp (loc, arg0, arg1, type);
10717
10718 CASE_FLT_FN (BUILT_IN_MODF):
10719 return fold_builtin_modf (loc, arg0, arg1, type);
10720
10721 case BUILT_IN_BZERO:
10722 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10723
10724 case BUILT_IN_FPUTS:
10725 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10726
10727 case BUILT_IN_FPUTS_UNLOCKED:
10728 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10729
10730 case BUILT_IN_STRSTR:
10731 return fold_builtin_strstr (loc, arg0, arg1, type);
10732
10733 case BUILT_IN_STRCAT:
10734 return fold_builtin_strcat (loc, arg0, arg1);
10735
10736 case BUILT_IN_STRSPN:
10737 return fold_builtin_strspn (loc, arg0, arg1);
10738
10739 case BUILT_IN_STRCSPN:
10740 return fold_builtin_strcspn (loc, arg0, arg1);
10741
10742 case BUILT_IN_STRCHR:
10743 case BUILT_IN_INDEX:
10744 return fold_builtin_strchr (loc, arg0, arg1, type);
10745
10746 case BUILT_IN_STRRCHR:
10747 case BUILT_IN_RINDEX:
10748 return fold_builtin_strrchr (loc, arg0, arg1, type);
10749
10750 case BUILT_IN_STRCPY:
10751 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10752
10753 case BUILT_IN_STPCPY:
10754 if (ignore)
10755 {
10756 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10757 if (!fn)
10758 break;
10759
10760 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10761 }
10762 else
10763 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10764 break;
10765
10766 case BUILT_IN_STRCMP:
10767 return fold_builtin_strcmp (loc, arg0, arg1);
10768
10769 case BUILT_IN_STRPBRK:
10770 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10771
10772 case BUILT_IN_EXPECT:
10773 return fold_builtin_expect (loc, arg0, arg1);
10774
10775 CASE_FLT_FN (BUILT_IN_POW):
10776 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10777
10778 CASE_FLT_FN (BUILT_IN_POWI):
10779 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10780
10781 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10782 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10783
10784 CASE_FLT_FN (BUILT_IN_FMIN):
10785 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10786
10787 CASE_FLT_FN (BUILT_IN_FMAX):
10788 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10789
10790 case BUILT_IN_ISGREATER:
10791 return fold_builtin_unordered_cmp (loc, fndecl,
10792 arg0, arg1, UNLE_EXPR, LE_EXPR);
10793 case BUILT_IN_ISGREATEREQUAL:
10794 return fold_builtin_unordered_cmp (loc, fndecl,
10795 arg0, arg1, UNLT_EXPR, LT_EXPR);
10796 case BUILT_IN_ISLESS:
10797 return fold_builtin_unordered_cmp (loc, fndecl,
10798 arg0, arg1, UNGE_EXPR, GE_EXPR);
10799 case BUILT_IN_ISLESSEQUAL:
10800 return fold_builtin_unordered_cmp (loc, fndecl,
10801 arg0, arg1, UNGT_EXPR, GT_EXPR);
10802 case BUILT_IN_ISLESSGREATER:
10803 return fold_builtin_unordered_cmp (loc, fndecl,
10804 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10805 case BUILT_IN_ISUNORDERED:
10806 return fold_builtin_unordered_cmp (loc, fndecl,
10807 arg0, arg1, UNORDERED_EXPR,
10808 NOP_EXPR);
10809
10810 /* We do the folding for va_start in the expander. */
10811 case BUILT_IN_VA_START:
10812 break;
10813
10814 case BUILT_IN_SPRINTF:
10815 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10816
10817 case BUILT_IN_OBJECT_SIZE:
10818 return fold_builtin_object_size (arg0, arg1);
10819
10820 case BUILT_IN_PRINTF:
10821 case BUILT_IN_PRINTF_UNLOCKED:
10822 case BUILT_IN_VPRINTF:
10823 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10824
10825 case BUILT_IN_PRINTF_CHK:
10826 case BUILT_IN_VPRINTF_CHK:
10827 if (!validate_arg (arg0, INTEGER_TYPE)
10828 || TREE_SIDE_EFFECTS (arg0))
10829 return NULL_TREE;
10830 else
10831 return fold_builtin_printf (loc, fndecl,
10832 arg1, NULL_TREE, ignore, fcode);
10833 break;
10834
10835 case BUILT_IN_FPRINTF:
10836 case BUILT_IN_FPRINTF_UNLOCKED:
10837 case BUILT_IN_VFPRINTF:
10838 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10839 ignore, fcode);
10840
10841 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10842 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10843
10844 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10845 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10846
10847 default:
10848 break;
10849 }
10850 return NULL_TREE;
10851 }
10852
10853 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10854 and ARG2. IGNORE is true if the result of the function call is ignored.
10855 This function returns NULL_TREE if no simplification was possible. */
10856
10857 static tree
10858 fold_builtin_3 (location_t loc, tree fndecl,
10859 tree arg0, tree arg1, tree arg2, bool ignore)
10860 {
10861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10862 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10863 switch (fcode)
10864 {
10865
10866 CASE_FLT_FN (BUILT_IN_SINCOS):
10867 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10868
10869 CASE_FLT_FN (BUILT_IN_FMA):
10870 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10871 break;
10872
10873 CASE_FLT_FN (BUILT_IN_REMQUO):
10874 if (validate_arg (arg0, REAL_TYPE)
10875 && validate_arg (arg1, REAL_TYPE)
10876 && validate_arg (arg2, POINTER_TYPE))
10877 return do_mpfr_remquo (arg0, arg1, arg2);
10878 break;
10879
10880 case BUILT_IN_MEMSET:
10881 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10882
10883 case BUILT_IN_BCOPY:
10884 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10885 void_type_node, true, /*endp=*/3);
10886
10887 case BUILT_IN_MEMCPY:
10888 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10889 type, ignore, /*endp=*/0);
10890
10891 case BUILT_IN_MEMPCPY:
10892 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10893 type, ignore, /*endp=*/1);
10894
10895 case BUILT_IN_MEMMOVE:
10896 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10897 type, ignore, /*endp=*/3);
10898
10899 case BUILT_IN_STRNCAT:
10900 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10901
10902 case BUILT_IN_STRNCPY:
10903 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10904
10905 case BUILT_IN_STRNCMP:
10906 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10907
10908 case BUILT_IN_MEMCHR:
10909 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10910
10911 case BUILT_IN_BCMP:
10912 case BUILT_IN_MEMCMP:
10913 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10914
10915 case BUILT_IN_SPRINTF:
10916 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10917
10918 case BUILT_IN_SNPRINTF:
10919 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10920
10921 case BUILT_IN_STRCPY_CHK:
10922 case BUILT_IN_STPCPY_CHK:
10923 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10924 ignore, fcode);
10925
10926 case BUILT_IN_STRCAT_CHK:
10927 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10928
10929 case BUILT_IN_PRINTF_CHK:
10930 case BUILT_IN_VPRINTF_CHK:
10931 if (!validate_arg (arg0, INTEGER_TYPE)
10932 || TREE_SIDE_EFFECTS (arg0))
10933 return NULL_TREE;
10934 else
10935 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10936 break;
10937
10938 case BUILT_IN_FPRINTF:
10939 case BUILT_IN_FPRINTF_UNLOCKED:
10940 case BUILT_IN_VFPRINTF:
10941 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10942 ignore, fcode);
10943
10944 case BUILT_IN_FPRINTF_CHK:
10945 case BUILT_IN_VFPRINTF_CHK:
10946 if (!validate_arg (arg1, INTEGER_TYPE)
10947 || TREE_SIDE_EFFECTS (arg1))
10948 return NULL_TREE;
10949 else
10950 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10951 ignore, fcode);
10952
10953 default:
10954 break;
10955 }
10956 return NULL_TREE;
10957 }
10958
10959 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10960 ARG2, and ARG3. IGNORE is true if the result of the function call is
10961 ignored. This function returns NULL_TREE if no simplification was
10962 possible. */
10963
10964 static tree
10965 fold_builtin_4 (location_t loc, tree fndecl,
10966 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10967 {
10968 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10969
10970 switch (fcode)
10971 {
10972 case BUILT_IN_MEMCPY_CHK:
10973 case BUILT_IN_MEMPCPY_CHK:
10974 case BUILT_IN_MEMMOVE_CHK:
10975 case BUILT_IN_MEMSET_CHK:
10976 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10977 NULL_TREE, ignore,
10978 DECL_FUNCTION_CODE (fndecl));
10979
10980 case BUILT_IN_STRNCPY_CHK:
10981 case BUILT_IN_STPNCPY_CHK:
10982 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10983 ignore, fcode);
10984
10985 case BUILT_IN_STRNCAT_CHK:
10986 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10987
10988 case BUILT_IN_SNPRINTF:
10989 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10990
10991 case BUILT_IN_FPRINTF_CHK:
10992 case BUILT_IN_VFPRINTF_CHK:
10993 if (!validate_arg (arg1, INTEGER_TYPE)
10994 || TREE_SIDE_EFFECTS (arg1))
10995 return NULL_TREE;
10996 else
10997 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10998 ignore, fcode);
10999 break;
11000
11001 default:
11002 break;
11003 }
11004 return NULL_TREE;
11005 }
11006
11007 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11008 arguments, where NARGS <= 4. IGNORE is true if the result of the
11009 function call is ignored. This function returns NULL_TREE if no
11010 simplification was possible. Note that this only folds builtins with
11011 fixed argument patterns. Foldings that do varargs-to-varargs
11012 transformations, or that match calls with more than 4 arguments,
11013 need to be handled with fold_builtin_varargs instead. */
11014
11015 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11016
11017 static tree
11018 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11019 {
11020 tree ret = NULL_TREE;
11021
11022 switch (nargs)
11023 {
11024 case 0:
11025 ret = fold_builtin_0 (loc, fndecl, ignore);
11026 break;
11027 case 1:
11028 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11029 break;
11030 case 2:
11031 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11032 break;
11033 case 3:
11034 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11035 break;
11036 case 4:
11037 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11038 ignore);
11039 break;
11040 default:
11041 break;
11042 }
11043 if (ret)
11044 {
11045 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11046 SET_EXPR_LOCATION (ret, loc);
11047 TREE_NO_WARNING (ret) = 1;
11048 return ret;
11049 }
11050 return NULL_TREE;
11051 }
11052
11053 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11054 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11055 of arguments in ARGS to be omitted. OLDNARGS is the number of
11056 elements in ARGS. */
11057
11058 static tree
11059 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11060 int skip, tree fndecl, int n, va_list newargs)
11061 {
11062 int nargs = oldnargs - skip + n;
11063 tree *buffer;
11064
11065 if (n > 0)
11066 {
11067 int i, j;
11068
11069 buffer = XALLOCAVEC (tree, nargs);
11070 for (i = 0; i < n; i++)
11071 buffer[i] = va_arg (newargs, tree);
11072 for (j = skip; j < oldnargs; j++, i++)
11073 buffer[i] = args[j];
11074 }
11075 else
11076 buffer = args + skip;
11077
11078 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11079 }
11080
11081 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11082 list ARGS along with N new arguments specified as the "..."
11083 parameters. SKIP is the number of arguments in ARGS to be omitted.
11084 OLDNARGS is the number of elements in ARGS. */
11085
11086 static tree
11087 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11088 int skip, tree fndecl, int n, ...)
11089 {
11090 va_list ap;
11091 tree t;
11092
11093 va_start (ap, n);
11094 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11095 va_end (ap);
11096
11097 return t;
11098 }
11099
11100 /* Return true if FNDECL shouldn't be folded right now.
11101 If a built-in function has an inline attribute always_inline
11102 wrapper, defer folding it after always_inline functions have
11103 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11104 might not be performed. */
11105
11106 bool
11107 avoid_folding_inline_builtin (tree fndecl)
11108 {
11109 return (DECL_DECLARED_INLINE_P (fndecl)
11110 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11111 && cfun
11112 && !cfun->always_inline_functions_inlined
11113 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11114 }
11115
11116 /* A wrapper function for builtin folding that prevents warnings for
11117 "statement without effect" and the like, caused by removing the
11118 call node earlier than the warning is generated. */
11119
11120 tree
11121 fold_call_expr (location_t loc, tree exp, bool ignore)
11122 {
11123 tree ret = NULL_TREE;
11124 tree fndecl = get_callee_fndecl (exp);
11125 if (fndecl
11126 && TREE_CODE (fndecl) == FUNCTION_DECL
11127 && DECL_BUILT_IN (fndecl)
11128 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11129 yet. Defer folding until we see all the arguments
11130 (after inlining). */
11131 && !CALL_EXPR_VA_ARG_PACK (exp))
11132 {
11133 int nargs = call_expr_nargs (exp);
11134
11135 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11136 instead last argument is __builtin_va_arg_pack (). Defer folding
11137 even in that case, until arguments are finalized. */
11138 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11139 {
11140 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11141 if (fndecl2
11142 && TREE_CODE (fndecl2) == FUNCTION_DECL
11143 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11144 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11145 return NULL_TREE;
11146 }
11147
11148 if (avoid_folding_inline_builtin (fndecl))
11149 return NULL_TREE;
11150
11151 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11152 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11153 CALL_EXPR_ARGP (exp), ignore);
11154 else
11155 {
11156 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11157 {
11158 tree *args = CALL_EXPR_ARGP (exp);
11159 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11160 }
11161 if (!ret)
11162 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11163 if (ret)
11164 return ret;
11165 }
11166 }
11167 return NULL_TREE;
11168 }
11169
11170 /* Conveniently construct a function call expression. FNDECL names the
11171 function to be called and N arguments are passed in the array
11172 ARGARRAY. */
11173
11174 tree
11175 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11176 {
11177 tree fntype = TREE_TYPE (fndecl);
11178 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11179
11180 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11181 }
11182
11183 /* Conveniently construct a function call expression. FNDECL names the
11184 function to be called and the arguments are passed in the vector
11185 VEC. */
11186
11187 tree
11188 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11189 {
11190 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11191 vec_safe_address (vec));
11192 }
11193
11194
11195 /* Conveniently construct a function call expression. FNDECL names the
11196 function to be called, N is the number of arguments, and the "..."
11197 parameters are the argument expressions. */
11198
11199 tree
11200 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11201 {
11202 va_list ap;
11203 tree *argarray = XALLOCAVEC (tree, n);
11204 int i;
11205
11206 va_start (ap, n);
11207 for (i = 0; i < n; i++)
11208 argarray[i] = va_arg (ap, tree);
11209 va_end (ap);
11210 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11211 }
11212
11213 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11214 varargs macros aren't supported by all bootstrap compilers. */
11215
11216 tree
11217 build_call_expr (tree fndecl, int n, ...)
11218 {
11219 va_list ap;
11220 tree *argarray = XALLOCAVEC (tree, n);
11221 int i;
11222
11223 va_start (ap, n);
11224 for (i = 0; i < n; i++)
11225 argarray[i] = va_arg (ap, tree);
11226 va_end (ap);
11227 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11228 }
11229
11230 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11231 N arguments are passed in the array ARGARRAY. */
11232
11233 tree
11234 fold_builtin_call_array (location_t loc, tree type,
11235 tree fn,
11236 int n,
11237 tree *argarray)
11238 {
11239 tree ret = NULL_TREE;
11240 tree exp;
11241
11242 if (TREE_CODE (fn) == ADDR_EXPR)
11243 {
11244 tree fndecl = TREE_OPERAND (fn, 0);
11245 if (TREE_CODE (fndecl) == FUNCTION_DECL
11246 && DECL_BUILT_IN (fndecl))
11247 {
11248 /* If last argument is __builtin_va_arg_pack (), arguments to this
11249 function are not finalized yet. Defer folding until they are. */
11250 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11251 {
11252 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11253 if (fndecl2
11254 && TREE_CODE (fndecl2) == FUNCTION_DECL
11255 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11256 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11257 return build_call_array_loc (loc, type, fn, n, argarray);
11258 }
11259 if (avoid_folding_inline_builtin (fndecl))
11260 return build_call_array_loc (loc, type, fn, n, argarray);
11261 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11262 {
11263 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11264 if (ret)
11265 return ret;
11266
11267 return build_call_array_loc (loc, type, fn, n, argarray);
11268 }
11269 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11270 {
11271 /* First try the transformations that don't require consing up
11272 an exp. */
11273 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11274 if (ret)
11275 return ret;
11276 }
11277
11278 /* If we got this far, we need to build an exp. */
11279 exp = build_call_array_loc (loc, type, fn, n, argarray);
11280 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11281 return ret ? ret : exp;
11282 }
11283 }
11284
11285 return build_call_array_loc (loc, type, fn, n, argarray);
11286 }
11287
11288 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11289 along with N new arguments specified as the "..." parameters. SKIP
11290 is the number of arguments in EXP to be omitted. This function is used
11291 to do varargs-to-varargs transformations. */
11292
11293 static tree
11294 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11295 {
11296 va_list ap;
11297 tree t;
11298
11299 va_start (ap, n);
11300 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11301 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11302 va_end (ap);
11303
11304 return t;
11305 }
11306
11307 /* Validate a single argument ARG against a tree code CODE representing
11308 a type. */
11309
11310 static bool
11311 validate_arg (const_tree arg, enum tree_code code)
11312 {
11313 if (!arg)
11314 return false;
11315 else if (code == POINTER_TYPE)
11316 return POINTER_TYPE_P (TREE_TYPE (arg));
11317 else if (code == INTEGER_TYPE)
11318 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11319 return code == TREE_CODE (TREE_TYPE (arg));
11320 }
11321
11322 /* This function validates the types of a function call argument list
11323 against a specified list of tree_codes. If the last specifier is a 0,
11324 that represents an ellipses, otherwise the last specifier must be a
11325 VOID_TYPE.
11326
11327 This is the GIMPLE version of validate_arglist. Eventually we want to
11328 completely convert builtins.c to work from GIMPLEs and the tree based
11329 validate_arglist will then be removed. */
11330
11331 bool
11332 validate_gimple_arglist (const_gimple call, ...)
11333 {
11334 enum tree_code code;
11335 bool res = 0;
11336 va_list ap;
11337 const_tree arg;
11338 size_t i;
11339
11340 va_start (ap, call);
11341 i = 0;
11342
11343 do
11344 {
11345 code = (enum tree_code) va_arg (ap, int);
11346 switch (code)
11347 {
11348 case 0:
11349 /* This signifies an ellipses, any further arguments are all ok. */
11350 res = true;
11351 goto end;
11352 case VOID_TYPE:
11353 /* This signifies an endlink, if no arguments remain, return
11354 true, otherwise return false. */
11355 res = (i == gimple_call_num_args (call));
11356 goto end;
11357 default:
11358 /* If no parameters remain or the parameter's code does not
11359 match the specified code, return false. Otherwise continue
11360 checking any remaining arguments. */
11361 arg = gimple_call_arg (call, i++);
11362 if (!validate_arg (arg, code))
11363 goto end;
11364 break;
11365 }
11366 }
11367 while (1);
11368
11369 /* We need gotos here since we can only have one VA_CLOSE in a
11370 function. */
11371 end: ;
11372 va_end (ap);
11373
11374 return res;
11375 }
11376
11377 /* Default target-specific builtin expander that does nothing. */
11378
11379 rtx
11380 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11381 rtx target ATTRIBUTE_UNUSED,
11382 rtx subtarget ATTRIBUTE_UNUSED,
11383 enum machine_mode mode ATTRIBUTE_UNUSED,
11384 int ignore ATTRIBUTE_UNUSED)
11385 {
11386 return NULL_RTX;
11387 }
11388
11389 /* Returns true is EXP represents data that would potentially reside
11390 in a readonly section. */
11391
11392 static bool
11393 readonly_data_expr (tree exp)
11394 {
11395 STRIP_NOPS (exp);
11396
11397 if (TREE_CODE (exp) != ADDR_EXPR)
11398 return false;
11399
11400 exp = get_base_address (TREE_OPERAND (exp, 0));
11401 if (!exp)
11402 return false;
11403
11404 /* Make sure we call decl_readonly_section only for trees it
11405 can handle (since it returns true for everything it doesn't
11406 understand). */
11407 if (TREE_CODE (exp) == STRING_CST
11408 || TREE_CODE (exp) == CONSTRUCTOR
11409 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11410 return decl_readonly_section (exp, 0);
11411 else
11412 return false;
11413 }
11414
11415 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11416 to the call, and TYPE is its return type.
11417
11418 Return NULL_TREE if no simplification was possible, otherwise return the
11419 simplified form of the call as a tree.
11420
11421 The simplified form may be a constant or other expression which
11422 computes the same value, but in a more efficient manner (including
11423 calls to other builtin functions).
11424
11425 The call may contain arguments which need to be evaluated, but
11426 which are not useful to determine the result of the call. In
11427 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11428 COMPOUND_EXPR will be an argument which must be evaluated.
11429 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11430 COMPOUND_EXPR in the chain will contain the tree for the simplified
11431 form of the builtin function call. */
11432
11433 static tree
11434 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11435 {
11436 if (!validate_arg (s1, POINTER_TYPE)
11437 || !validate_arg (s2, POINTER_TYPE))
11438 return NULL_TREE;
11439 else
11440 {
11441 tree fn;
11442 const char *p1, *p2;
11443
11444 p2 = c_getstr (s2);
11445 if (p2 == NULL)
11446 return NULL_TREE;
11447
11448 p1 = c_getstr (s1);
11449 if (p1 != NULL)
11450 {
11451 const char *r = strstr (p1, p2);
11452 tree tem;
11453
11454 if (r == NULL)
11455 return build_int_cst (TREE_TYPE (s1), 0);
11456
11457 /* Return an offset into the constant string argument. */
11458 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11459 return fold_convert_loc (loc, type, tem);
11460 }
11461
11462 /* The argument is const char *, and the result is char *, so we need
11463 a type conversion here to avoid a warning. */
11464 if (p2[0] == '\0')
11465 return fold_convert_loc (loc, type, s1);
11466
11467 if (p2[1] != '\0')
11468 return NULL_TREE;
11469
11470 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11471 if (!fn)
11472 return NULL_TREE;
11473
11474 /* New argument list transforming strstr(s1, s2) to
11475 strchr(s1, s2[0]). */
11476 return build_call_expr_loc (loc, fn, 2, s1,
11477 build_int_cst (integer_type_node, p2[0]));
11478 }
11479 }
11480
11481 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11482 the call, and TYPE is its return type.
11483
11484 Return NULL_TREE if no simplification was possible, otherwise return the
11485 simplified form of the call as a tree.
11486
11487 The simplified form may be a constant or other expression which
11488 computes the same value, but in a more efficient manner (including
11489 calls to other builtin functions).
11490
11491 The call may contain arguments which need to be evaluated, but
11492 which are not useful to determine the result of the call. In
11493 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11494 COMPOUND_EXPR will be an argument which must be evaluated.
11495 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11496 COMPOUND_EXPR in the chain will contain the tree for the simplified
11497 form of the builtin function call. */
11498
11499 static tree
11500 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11501 {
11502 if (!validate_arg (s1, POINTER_TYPE)
11503 || !validate_arg (s2, INTEGER_TYPE))
11504 return NULL_TREE;
11505 else
11506 {
11507 const char *p1;
11508
11509 if (TREE_CODE (s2) != INTEGER_CST)
11510 return NULL_TREE;
11511
11512 p1 = c_getstr (s1);
11513 if (p1 != NULL)
11514 {
11515 char c;
11516 const char *r;
11517 tree tem;
11518
11519 if (target_char_cast (s2, &c))
11520 return NULL_TREE;
11521
11522 r = strchr (p1, c);
11523
11524 if (r == NULL)
11525 return build_int_cst (TREE_TYPE (s1), 0);
11526
11527 /* Return an offset into the constant string argument. */
11528 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11529 return fold_convert_loc (loc, type, tem);
11530 }
11531 return NULL_TREE;
11532 }
11533 }
11534
11535 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11536 the call, and TYPE is its return type.
11537
11538 Return NULL_TREE if no simplification was possible, otherwise return the
11539 simplified form of the call as a tree.
11540
11541 The simplified form may be a constant or other expression which
11542 computes the same value, but in a more efficient manner (including
11543 calls to other builtin functions).
11544
11545 The call may contain arguments which need to be evaluated, but
11546 which are not useful to determine the result of the call. In
11547 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11548 COMPOUND_EXPR will be an argument which must be evaluated.
11549 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11550 COMPOUND_EXPR in the chain will contain the tree for the simplified
11551 form of the builtin function call. */
11552
11553 static tree
11554 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11555 {
11556 if (!validate_arg (s1, POINTER_TYPE)
11557 || !validate_arg (s2, INTEGER_TYPE))
11558 return NULL_TREE;
11559 else
11560 {
11561 tree fn;
11562 const char *p1;
11563
11564 if (TREE_CODE (s2) != INTEGER_CST)
11565 return NULL_TREE;
11566
11567 p1 = c_getstr (s1);
11568 if (p1 != NULL)
11569 {
11570 char c;
11571 const char *r;
11572 tree tem;
11573
11574 if (target_char_cast (s2, &c))
11575 return NULL_TREE;
11576
11577 r = strrchr (p1, c);
11578
11579 if (r == NULL)
11580 return build_int_cst (TREE_TYPE (s1), 0);
11581
11582 /* Return an offset into the constant string argument. */
11583 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11584 return fold_convert_loc (loc, type, tem);
11585 }
11586
11587 if (! integer_zerop (s2))
11588 return NULL_TREE;
11589
11590 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11591 if (!fn)
11592 return NULL_TREE;
11593
11594 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11595 return build_call_expr_loc (loc, fn, 2, s1, s2);
11596 }
11597 }
11598
11599 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11600 to the call, and TYPE is its return type.
11601
11602 Return NULL_TREE if no simplification was possible, otherwise return the
11603 simplified form of the call as a tree.
11604
11605 The simplified form may be a constant or other expression which
11606 computes the same value, but in a more efficient manner (including
11607 calls to other builtin functions).
11608
11609 The call may contain arguments which need to be evaluated, but
11610 which are not useful to determine the result of the call. In
11611 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11612 COMPOUND_EXPR will be an argument which must be evaluated.
11613 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11614 COMPOUND_EXPR in the chain will contain the tree for the simplified
11615 form of the builtin function call. */
11616
11617 static tree
11618 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11619 {
11620 if (!validate_arg (s1, POINTER_TYPE)
11621 || !validate_arg (s2, POINTER_TYPE))
11622 return NULL_TREE;
11623 else
11624 {
11625 tree fn;
11626 const char *p1, *p2;
11627
11628 p2 = c_getstr (s2);
11629 if (p2 == NULL)
11630 return NULL_TREE;
11631
11632 p1 = c_getstr (s1);
11633 if (p1 != NULL)
11634 {
11635 const char *r = strpbrk (p1, p2);
11636 tree tem;
11637
11638 if (r == NULL)
11639 return build_int_cst (TREE_TYPE (s1), 0);
11640
11641 /* Return an offset into the constant string argument. */
11642 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11643 return fold_convert_loc (loc, type, tem);
11644 }
11645
11646 if (p2[0] == '\0')
11647 /* strpbrk(x, "") == NULL.
11648 Evaluate and ignore s1 in case it had side-effects. */
11649 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11650
11651 if (p2[1] != '\0')
11652 return NULL_TREE; /* Really call strpbrk. */
11653
11654 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11655 if (!fn)
11656 return NULL_TREE;
11657
11658 /* New argument list transforming strpbrk(s1, s2) to
11659 strchr(s1, s2[0]). */
11660 return build_call_expr_loc (loc, fn, 2, s1,
11661 build_int_cst (integer_type_node, p2[0]));
11662 }
11663 }
11664
11665 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11666 to the call.
11667
11668 Return NULL_TREE if no simplification was possible, otherwise return the
11669 simplified form of the call as a tree.
11670
11671 The simplified form may be a constant or other expression which
11672 computes the same value, but in a more efficient manner (including
11673 calls to other builtin functions).
11674
11675 The call may contain arguments which need to be evaluated, but
11676 which are not useful to determine the result of the call. In
11677 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11678 COMPOUND_EXPR will be an argument which must be evaluated.
11679 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11680 COMPOUND_EXPR in the chain will contain the tree for the simplified
11681 form of the builtin function call. */
11682
11683 static tree
11684 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11685 {
11686 if (!validate_arg (dst, POINTER_TYPE)
11687 || !validate_arg (src, POINTER_TYPE))
11688 return NULL_TREE;
11689 else
11690 {
11691 const char *p = c_getstr (src);
11692
11693 /* If the string length is zero, return the dst parameter. */
11694 if (p && *p == '\0')
11695 return dst;
11696
11697 if (optimize_insn_for_speed_p ())
11698 {
11699 /* See if we can store by pieces into (dst + strlen(dst)). */
11700 tree newdst, call;
11701 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11702 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11703
11704 if (!strlen_fn || !strcpy_fn)
11705 return NULL_TREE;
11706
11707 /* If we don't have a movstr we don't want to emit an strcpy
11708 call. We have to do that if the length of the source string
11709 isn't computable (in that case we can use memcpy probably
11710 later expanding to a sequence of mov instructions). If we
11711 have movstr instructions we can emit strcpy calls. */
11712 if (!HAVE_movstr)
11713 {
11714 tree len = c_strlen (src, 1);
11715 if (! len || TREE_SIDE_EFFECTS (len))
11716 return NULL_TREE;
11717 }
11718
11719 /* Stabilize the argument list. */
11720 dst = builtin_save_expr (dst);
11721
11722 /* Create strlen (dst). */
11723 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11724 /* Create (dst p+ strlen (dst)). */
11725
11726 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11727 newdst = builtin_save_expr (newdst);
11728
11729 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11730 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11731 }
11732 return NULL_TREE;
11733 }
11734 }
11735
11736 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11737 arguments to the call.
11738
11739 Return NULL_TREE if no simplification was possible, otherwise return the
11740 simplified form of the call as a tree.
11741
11742 The simplified form may be a constant or other expression which
11743 computes the same value, but in a more efficient manner (including
11744 calls to other builtin functions).
11745
11746 The call may contain arguments which need to be evaluated, but
11747 which are not useful to determine the result of the call. In
11748 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11749 COMPOUND_EXPR will be an argument which must be evaluated.
11750 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11751 COMPOUND_EXPR in the chain will contain the tree for the simplified
11752 form of the builtin function call. */
11753
11754 static tree
11755 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11756 {
11757 if (!validate_arg (dst, POINTER_TYPE)
11758 || !validate_arg (src, POINTER_TYPE)
11759 || !validate_arg (len, INTEGER_TYPE))
11760 return NULL_TREE;
11761 else
11762 {
11763 const char *p = c_getstr (src);
11764
11765 /* If the requested length is zero, or the src parameter string
11766 length is zero, return the dst parameter. */
11767 if (integer_zerop (len) || (p && *p == '\0'))
11768 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11769
11770 /* If the requested len is greater than or equal to the string
11771 length, call strcat. */
11772 if (TREE_CODE (len) == INTEGER_CST && p
11773 && compare_tree_int (len, strlen (p)) >= 0)
11774 {
11775 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11776
11777 /* If the replacement _DECL isn't initialized, don't do the
11778 transformation. */
11779 if (!fn)
11780 return NULL_TREE;
11781
11782 return build_call_expr_loc (loc, fn, 2, dst, src);
11783 }
11784 return NULL_TREE;
11785 }
11786 }
11787
11788 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11789 to the call.
11790
11791 Return NULL_TREE if no simplification was possible, otherwise return the
11792 simplified form of the call as a tree.
11793
11794 The simplified form may be a constant or other expression which
11795 computes the same value, but in a more efficient manner (including
11796 calls to other builtin functions).
11797
11798 The call may contain arguments which need to be evaluated, but
11799 which are not useful to determine the result of the call. In
11800 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11801 COMPOUND_EXPR will be an argument which must be evaluated.
11802 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11803 COMPOUND_EXPR in the chain will contain the tree for the simplified
11804 form of the builtin function call. */
11805
11806 static tree
11807 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11808 {
11809 if (!validate_arg (s1, POINTER_TYPE)
11810 || !validate_arg (s2, POINTER_TYPE))
11811 return NULL_TREE;
11812 else
11813 {
11814 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11815
11816 /* If both arguments are constants, evaluate at compile-time. */
11817 if (p1 && p2)
11818 {
11819 const size_t r = strspn (p1, p2);
11820 return build_int_cst (size_type_node, r);
11821 }
11822
11823 /* If either argument is "", return NULL_TREE. */
11824 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11825 /* Evaluate and ignore both arguments in case either one has
11826 side-effects. */
11827 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11828 s1, s2);
11829 return NULL_TREE;
11830 }
11831 }
11832
11833 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11834 to the call.
11835
11836 Return NULL_TREE if no simplification was possible, otherwise return the
11837 simplified form of the call as a tree.
11838
11839 The simplified form may be a constant or other expression which
11840 computes the same value, but in a more efficient manner (including
11841 calls to other builtin functions).
11842
11843 The call may contain arguments which need to be evaluated, but
11844 which are not useful to determine the result of the call. In
11845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11846 COMPOUND_EXPR will be an argument which must be evaluated.
11847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11848 COMPOUND_EXPR in the chain will contain the tree for the simplified
11849 form of the builtin function call. */
11850
11851 static tree
11852 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11853 {
11854 if (!validate_arg (s1, POINTER_TYPE)
11855 || !validate_arg (s2, POINTER_TYPE))
11856 return NULL_TREE;
11857 else
11858 {
11859 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11860
11861 /* If both arguments are constants, evaluate at compile-time. */
11862 if (p1 && p2)
11863 {
11864 const size_t r = strcspn (p1, p2);
11865 return build_int_cst (size_type_node, r);
11866 }
11867
11868 /* If the first argument is "", return NULL_TREE. */
11869 if (p1 && *p1 == '\0')
11870 {
11871 /* Evaluate and ignore argument s2 in case it has
11872 side-effects. */
11873 return omit_one_operand_loc (loc, size_type_node,
11874 size_zero_node, s2);
11875 }
11876
11877 /* If the second argument is "", return __builtin_strlen(s1). */
11878 if (p2 && *p2 == '\0')
11879 {
11880 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11881
11882 /* If the replacement _DECL isn't initialized, don't do the
11883 transformation. */
11884 if (!fn)
11885 return NULL_TREE;
11886
11887 return build_call_expr_loc (loc, fn, 1, s1);
11888 }
11889 return NULL_TREE;
11890 }
11891 }
11892
11893 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11894 to the call. IGNORE is true if the value returned
11895 by the builtin will be ignored. UNLOCKED is true is true if this
11896 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11897 the known length of the string. Return NULL_TREE if no simplification
11898 was possible. */
11899
11900 tree
11901 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11902 bool ignore, bool unlocked, tree len)
11903 {
11904 /* If we're using an unlocked function, assume the other unlocked
11905 functions exist explicitly. */
11906 tree const fn_fputc = (unlocked
11907 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11908 : builtin_decl_implicit (BUILT_IN_FPUTC));
11909 tree const fn_fwrite = (unlocked
11910 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11911 : builtin_decl_implicit (BUILT_IN_FWRITE));
11912
11913 /* If the return value is used, don't do the transformation. */
11914 if (!ignore)
11915 return NULL_TREE;
11916
11917 /* Verify the arguments in the original call. */
11918 if (!validate_arg (arg0, POINTER_TYPE)
11919 || !validate_arg (arg1, POINTER_TYPE))
11920 return NULL_TREE;
11921
11922 if (! len)
11923 len = c_strlen (arg0, 0);
11924
11925 /* Get the length of the string passed to fputs. If the length
11926 can't be determined, punt. */
11927 if (!len
11928 || TREE_CODE (len) != INTEGER_CST)
11929 return NULL_TREE;
11930
11931 switch (compare_tree_int (len, 1))
11932 {
11933 case -1: /* length is 0, delete the call entirely . */
11934 return omit_one_operand_loc (loc, integer_type_node,
11935 integer_zero_node, arg1);;
11936
11937 case 0: /* length is 1, call fputc. */
11938 {
11939 const char *p = c_getstr (arg0);
11940
11941 if (p != NULL)
11942 {
11943 if (fn_fputc)
11944 return build_call_expr_loc (loc, fn_fputc, 2,
11945 build_int_cst
11946 (integer_type_node, p[0]), arg1);
11947 else
11948 return NULL_TREE;
11949 }
11950 }
11951 /* FALLTHROUGH */
11952 case 1: /* length is greater than 1, call fwrite. */
11953 {
11954 /* If optimizing for size keep fputs. */
11955 if (optimize_function_for_size_p (cfun))
11956 return NULL_TREE;
11957 /* New argument list transforming fputs(string, stream) to
11958 fwrite(string, 1, len, stream). */
11959 if (fn_fwrite)
11960 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11961 size_one_node, len, arg1);
11962 else
11963 return NULL_TREE;
11964 }
11965 default:
11966 gcc_unreachable ();
11967 }
11968 return NULL_TREE;
11969 }
11970
11971 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11972 produced. False otherwise. This is done so that we don't output the error
11973 or warning twice or three times. */
11974
11975 bool
11976 fold_builtin_next_arg (tree exp, bool va_start_p)
11977 {
11978 tree fntype = TREE_TYPE (current_function_decl);
11979 int nargs = call_expr_nargs (exp);
11980 tree arg;
11981 /* There is good chance the current input_location points inside the
11982 definition of the va_start macro (perhaps on the token for
11983 builtin) in a system header, so warnings will not be emitted.
11984 Use the location in real source code. */
11985 source_location current_location =
11986 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11987 NULL);
11988
11989 if (!stdarg_p (fntype))
11990 {
11991 error ("%<va_start%> used in function with fixed args");
11992 return true;
11993 }
11994
11995 if (va_start_p)
11996 {
11997 if (va_start_p && (nargs != 2))
11998 {
11999 error ("wrong number of arguments to function %<va_start%>");
12000 return true;
12001 }
12002 arg = CALL_EXPR_ARG (exp, 1);
12003 }
12004 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12005 when we checked the arguments and if needed issued a warning. */
12006 else
12007 {
12008 if (nargs == 0)
12009 {
12010 /* Evidently an out of date version of <stdarg.h>; can't validate
12011 va_start's second argument, but can still work as intended. */
12012 warning_at (current_location,
12013 OPT_Wvarargs,
12014 "%<__builtin_next_arg%> called without an argument");
12015 return true;
12016 }
12017 else if (nargs > 1)
12018 {
12019 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12020 return true;
12021 }
12022 arg = CALL_EXPR_ARG (exp, 0);
12023 }
12024
12025 if (TREE_CODE (arg) == SSA_NAME)
12026 arg = SSA_NAME_VAR (arg);
12027
12028 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12029 or __builtin_next_arg (0) the first time we see it, after checking
12030 the arguments and if needed issuing a warning. */
12031 if (!integer_zerop (arg))
12032 {
12033 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12034
12035 /* Strip off all nops for the sake of the comparison. This
12036 is not quite the same as STRIP_NOPS. It does more.
12037 We must also strip off INDIRECT_EXPR for C++ reference
12038 parameters. */
12039 while (CONVERT_EXPR_P (arg)
12040 || TREE_CODE (arg) == INDIRECT_REF)
12041 arg = TREE_OPERAND (arg, 0);
12042 if (arg != last_parm)
12043 {
12044 /* FIXME: Sometimes with the tree optimizers we can get the
12045 not the last argument even though the user used the last
12046 argument. We just warn and set the arg to be the last
12047 argument so that we will get wrong-code because of
12048 it. */
12049 warning_at (current_location,
12050 OPT_Wvarargs,
12051 "second parameter of %<va_start%> not last named argument");
12052 }
12053
12054 /* Undefined by C99 7.15.1.4p4 (va_start):
12055 "If the parameter parmN is declared with the register storage
12056 class, with a function or array type, or with a type that is
12057 not compatible with the type that results after application of
12058 the default argument promotions, the behavior is undefined."
12059 */
12060 else if (DECL_REGISTER (arg))
12061 {
12062 warning_at (current_location,
12063 OPT_Wvarargs,
12064 "undefined behaviour when second parameter of "
12065 "%<va_start%> is declared with %<register%> storage");
12066 }
12067
12068 /* We want to verify the second parameter just once before the tree
12069 optimizers are run and then avoid keeping it in the tree,
12070 as otherwise we could warn even for correct code like:
12071 void foo (int i, ...)
12072 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12073 if (va_start_p)
12074 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12075 else
12076 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12077 }
12078 return false;
12079 }
12080
12081
12082 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12083 ORIG may be null if this is a 2-argument call. We don't attempt to
12084 simplify calls with more than 3 arguments.
12085
12086 Return NULL_TREE if no simplification was possible, otherwise return the
12087 simplified form of the call as a tree. If IGNORED is true, it means that
12088 the caller does not use the returned value of the function. */
12089
12090 static tree
12091 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12092 tree orig, int ignored)
12093 {
12094 tree call, retval;
12095 const char *fmt_str = NULL;
12096
12097 /* Verify the required arguments in the original call. We deal with two
12098 types of sprintf() calls: 'sprintf (str, fmt)' and
12099 'sprintf (dest, "%s", orig)'. */
12100 if (!validate_arg (dest, POINTER_TYPE)
12101 || !validate_arg (fmt, POINTER_TYPE))
12102 return NULL_TREE;
12103 if (orig && !validate_arg (orig, POINTER_TYPE))
12104 return NULL_TREE;
12105
12106 /* Check whether the format is a literal string constant. */
12107 fmt_str = c_getstr (fmt);
12108 if (fmt_str == NULL)
12109 return NULL_TREE;
12110
12111 call = NULL_TREE;
12112 retval = NULL_TREE;
12113
12114 if (!init_target_chars ())
12115 return NULL_TREE;
12116
12117 /* If the format doesn't contain % args or %%, use strcpy. */
12118 if (strchr (fmt_str, target_percent) == NULL)
12119 {
12120 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12121
12122 if (!fn)
12123 return NULL_TREE;
12124
12125 /* Don't optimize sprintf (buf, "abc", ptr++). */
12126 if (orig)
12127 return NULL_TREE;
12128
12129 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12130 'format' is known to contain no % formats. */
12131 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12132 if (!ignored)
12133 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12134 }
12135
12136 /* If the format is "%s", use strcpy if the result isn't used. */
12137 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12138 {
12139 tree fn;
12140 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12141
12142 if (!fn)
12143 return NULL_TREE;
12144
12145 /* Don't crash on sprintf (str1, "%s"). */
12146 if (!orig)
12147 return NULL_TREE;
12148
12149 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12150 if (!ignored)
12151 {
12152 retval = c_strlen (orig, 1);
12153 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12154 return NULL_TREE;
12155 }
12156 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12157 }
12158
12159 if (call && retval)
12160 {
12161 retval = fold_convert_loc
12162 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12163 retval);
12164 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12165 }
12166 else
12167 return call;
12168 }
12169
12170 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12171 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12172 attempt to simplify calls with more than 4 arguments.
12173
12174 Return NULL_TREE if no simplification was possible, otherwise return the
12175 simplified form of the call as a tree. If IGNORED is true, it means that
12176 the caller does not use the returned value of the function. */
12177
12178 static tree
12179 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12180 tree orig, int ignored)
12181 {
12182 tree call, retval;
12183 const char *fmt_str = NULL;
12184 unsigned HOST_WIDE_INT destlen;
12185
12186 /* Verify the required arguments in the original call. We deal with two
12187 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12188 'snprintf (dest, cst, "%s", orig)'. */
12189 if (!validate_arg (dest, POINTER_TYPE)
12190 || !validate_arg (destsize, INTEGER_TYPE)
12191 || !validate_arg (fmt, POINTER_TYPE))
12192 return NULL_TREE;
12193 if (orig && !validate_arg (orig, POINTER_TYPE))
12194 return NULL_TREE;
12195
12196 if (!tree_fits_uhwi_p (destsize))
12197 return NULL_TREE;
12198
12199 /* Check whether the format is a literal string constant. */
12200 fmt_str = c_getstr (fmt);
12201 if (fmt_str == NULL)
12202 return NULL_TREE;
12203
12204 call = NULL_TREE;
12205 retval = NULL_TREE;
12206
12207 if (!init_target_chars ())
12208 return NULL_TREE;
12209
12210 destlen = tree_to_uhwi (destsize);
12211
12212 /* If the format doesn't contain % args or %%, use strcpy. */
12213 if (strchr (fmt_str, target_percent) == NULL)
12214 {
12215 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12216 size_t len = strlen (fmt_str);
12217
12218 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12219 if (orig)
12220 return NULL_TREE;
12221
12222 /* We could expand this as
12223 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12224 or to
12225 memcpy (str, fmt_with_nul_at_cstm1, cst);
12226 but in the former case that might increase code size
12227 and in the latter case grow .rodata section too much.
12228 So punt for now. */
12229 if (len >= destlen)
12230 return NULL_TREE;
12231
12232 if (!fn)
12233 return NULL_TREE;
12234
12235 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12236 'format' is known to contain no % formats and
12237 strlen (fmt) < cst. */
12238 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12239
12240 if (!ignored)
12241 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12242 }
12243
12244 /* If the format is "%s", use strcpy if the result isn't used. */
12245 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12246 {
12247 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12248 unsigned HOST_WIDE_INT origlen;
12249
12250 /* Don't crash on snprintf (str1, cst, "%s"). */
12251 if (!orig)
12252 return NULL_TREE;
12253
12254 retval = c_strlen (orig, 1);
12255 if (!retval || !tree_fits_uhwi_p (retval))
12256 return NULL_TREE;
12257
12258 origlen = tree_to_uhwi (retval);
12259 /* We could expand this as
12260 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12261 or to
12262 memcpy (str1, str2_with_nul_at_cstm1, cst);
12263 but in the former case that might increase code size
12264 and in the latter case grow .rodata section too much.
12265 So punt for now. */
12266 if (origlen >= destlen)
12267 return NULL_TREE;
12268
12269 /* Convert snprintf (str1, cst, "%s", str2) into
12270 strcpy (str1, str2) if strlen (str2) < cst. */
12271 if (!fn)
12272 return NULL_TREE;
12273
12274 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12275
12276 if (ignored)
12277 retval = NULL_TREE;
12278 }
12279
12280 if (call && retval)
12281 {
12282 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12283 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12284 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12285 }
12286 else
12287 return call;
12288 }
12289
12290 /* Expand a call EXP to __builtin_object_size. */
12291
12292 rtx
12293 expand_builtin_object_size (tree exp)
12294 {
12295 tree ost;
12296 int object_size_type;
12297 tree fndecl = get_callee_fndecl (exp);
12298
12299 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12300 {
12301 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12302 exp, fndecl);
12303 expand_builtin_trap ();
12304 return const0_rtx;
12305 }
12306
12307 ost = CALL_EXPR_ARG (exp, 1);
12308 STRIP_NOPS (ost);
12309
12310 if (TREE_CODE (ost) != INTEGER_CST
12311 || tree_int_cst_sgn (ost) < 0
12312 || compare_tree_int (ost, 3) > 0)
12313 {
12314 error ("%Klast argument of %D is not integer constant between 0 and 3",
12315 exp, fndecl);
12316 expand_builtin_trap ();
12317 return const0_rtx;
12318 }
12319
12320 object_size_type = tree_to_shwi (ost);
12321
12322 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12323 }
12324
12325 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12326 FCODE is the BUILT_IN_* to use.
12327 Return NULL_RTX if we failed; the caller should emit a normal call,
12328 otherwise try to get the result in TARGET, if convenient (and in
12329 mode MODE if that's convenient). */
12330
12331 static rtx
12332 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12333 enum built_in_function fcode)
12334 {
12335 tree dest, src, len, size;
12336
12337 if (!validate_arglist (exp,
12338 POINTER_TYPE,
12339 fcode == BUILT_IN_MEMSET_CHK
12340 ? INTEGER_TYPE : POINTER_TYPE,
12341 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12342 return NULL_RTX;
12343
12344 dest = CALL_EXPR_ARG (exp, 0);
12345 src = CALL_EXPR_ARG (exp, 1);
12346 len = CALL_EXPR_ARG (exp, 2);
12347 size = CALL_EXPR_ARG (exp, 3);
12348
12349 if (! tree_fits_uhwi_p (size))
12350 return NULL_RTX;
12351
12352 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12353 {
12354 tree fn;
12355
12356 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12357 {
12358 warning_at (tree_nonartificial_location (exp),
12359 0, "%Kcall to %D will always overflow destination buffer",
12360 exp, get_callee_fndecl (exp));
12361 return NULL_RTX;
12362 }
12363
12364 fn = NULL_TREE;
12365 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12366 mem{cpy,pcpy,move,set} is available. */
12367 switch (fcode)
12368 {
12369 case BUILT_IN_MEMCPY_CHK:
12370 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12371 break;
12372 case BUILT_IN_MEMPCPY_CHK:
12373 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12374 break;
12375 case BUILT_IN_MEMMOVE_CHK:
12376 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12377 break;
12378 case BUILT_IN_MEMSET_CHK:
12379 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12380 break;
12381 default:
12382 break;
12383 }
12384
12385 if (! fn)
12386 return NULL_RTX;
12387
12388 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12389 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12390 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12391 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12392 }
12393 else if (fcode == BUILT_IN_MEMSET_CHK)
12394 return NULL_RTX;
12395 else
12396 {
12397 unsigned int dest_align = get_pointer_alignment (dest);
12398
12399 /* If DEST is not a pointer type, call the normal function. */
12400 if (dest_align == 0)
12401 return NULL_RTX;
12402
12403 /* If SRC and DEST are the same (and not volatile), do nothing. */
12404 if (operand_equal_p (src, dest, 0))
12405 {
12406 tree expr;
12407
12408 if (fcode != BUILT_IN_MEMPCPY_CHK)
12409 {
12410 /* Evaluate and ignore LEN in case it has side-effects. */
12411 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12412 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12413 }
12414
12415 expr = fold_build_pointer_plus (dest, len);
12416 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12417 }
12418
12419 /* __memmove_chk special case. */
12420 if (fcode == BUILT_IN_MEMMOVE_CHK)
12421 {
12422 unsigned int src_align = get_pointer_alignment (src);
12423
12424 if (src_align == 0)
12425 return NULL_RTX;
12426
12427 /* If src is categorized for a readonly section we can use
12428 normal __memcpy_chk. */
12429 if (readonly_data_expr (src))
12430 {
12431 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12432 if (!fn)
12433 return NULL_RTX;
12434 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12435 dest, src, len, size);
12436 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12437 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12438 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12439 }
12440 }
12441 return NULL_RTX;
12442 }
12443 }
12444
12445 /* Emit warning if a buffer overflow is detected at compile time. */
12446
12447 static void
12448 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12449 {
12450 int is_strlen = 0;
12451 tree len, size;
12452 location_t loc = tree_nonartificial_location (exp);
12453
12454 switch (fcode)
12455 {
12456 case BUILT_IN_STRCPY_CHK:
12457 case BUILT_IN_STPCPY_CHK:
12458 /* For __strcat_chk the warning will be emitted only if overflowing
12459 by at least strlen (dest) + 1 bytes. */
12460 case BUILT_IN_STRCAT_CHK:
12461 len = CALL_EXPR_ARG (exp, 1);
12462 size = CALL_EXPR_ARG (exp, 2);
12463 is_strlen = 1;
12464 break;
12465 case BUILT_IN_STRNCAT_CHK:
12466 case BUILT_IN_STRNCPY_CHK:
12467 case BUILT_IN_STPNCPY_CHK:
12468 len = CALL_EXPR_ARG (exp, 2);
12469 size = CALL_EXPR_ARG (exp, 3);
12470 break;
12471 case BUILT_IN_SNPRINTF_CHK:
12472 case BUILT_IN_VSNPRINTF_CHK:
12473 len = CALL_EXPR_ARG (exp, 1);
12474 size = CALL_EXPR_ARG (exp, 3);
12475 break;
12476 default:
12477 gcc_unreachable ();
12478 }
12479
12480 if (!len || !size)
12481 return;
12482
12483 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12484 return;
12485
12486 if (is_strlen)
12487 {
12488 len = c_strlen (len, 1);
12489 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12490 return;
12491 }
12492 else if (fcode == BUILT_IN_STRNCAT_CHK)
12493 {
12494 tree src = CALL_EXPR_ARG (exp, 1);
12495 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12496 return;
12497 src = c_strlen (src, 1);
12498 if (! src || ! tree_fits_uhwi_p (src))
12499 {
12500 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12501 exp, get_callee_fndecl (exp));
12502 return;
12503 }
12504 else if (tree_int_cst_lt (src, size))
12505 return;
12506 }
12507 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12508 return;
12509
12510 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12511 exp, get_callee_fndecl (exp));
12512 }
12513
12514 /* Emit warning if a buffer overflow is detected at compile time
12515 in __sprintf_chk/__vsprintf_chk calls. */
12516
12517 static void
12518 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12519 {
12520 tree size, len, fmt;
12521 const char *fmt_str;
12522 int nargs = call_expr_nargs (exp);
12523
12524 /* Verify the required arguments in the original call. */
12525
12526 if (nargs < 4)
12527 return;
12528 size = CALL_EXPR_ARG (exp, 2);
12529 fmt = CALL_EXPR_ARG (exp, 3);
12530
12531 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12532 return;
12533
12534 /* Check whether the format is a literal string constant. */
12535 fmt_str = c_getstr (fmt);
12536 if (fmt_str == NULL)
12537 return;
12538
12539 if (!init_target_chars ())
12540 return;
12541
12542 /* If the format doesn't contain % args or %%, we know its size. */
12543 if (strchr (fmt_str, target_percent) == 0)
12544 len = build_int_cstu (size_type_node, strlen (fmt_str));
12545 /* If the format is "%s" and first ... argument is a string literal,
12546 we know it too. */
12547 else if (fcode == BUILT_IN_SPRINTF_CHK
12548 && strcmp (fmt_str, target_percent_s) == 0)
12549 {
12550 tree arg;
12551
12552 if (nargs < 5)
12553 return;
12554 arg = CALL_EXPR_ARG (exp, 4);
12555 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12556 return;
12557
12558 len = c_strlen (arg, 1);
12559 if (!len || ! tree_fits_uhwi_p (len))
12560 return;
12561 }
12562 else
12563 return;
12564
12565 if (! tree_int_cst_lt (len, size))
12566 warning_at (tree_nonartificial_location (exp),
12567 0, "%Kcall to %D will always overflow destination buffer",
12568 exp, get_callee_fndecl (exp));
12569 }
12570
12571 /* Emit warning if a free is called with address of a variable. */
12572
12573 static void
12574 maybe_emit_free_warning (tree exp)
12575 {
12576 tree arg = CALL_EXPR_ARG (exp, 0);
12577
12578 STRIP_NOPS (arg);
12579 if (TREE_CODE (arg) != ADDR_EXPR)
12580 return;
12581
12582 arg = get_base_address (TREE_OPERAND (arg, 0));
12583 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12584 return;
12585
12586 if (SSA_VAR_P (arg))
12587 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12588 "%Kattempt to free a non-heap object %qD", exp, arg);
12589 else
12590 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12591 "%Kattempt to free a non-heap object", exp);
12592 }
12593
12594 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12595 if possible. */
12596
12597 tree
12598 fold_builtin_object_size (tree ptr, tree ost)
12599 {
12600 unsigned HOST_WIDE_INT bytes;
12601 int object_size_type;
12602
12603 if (!validate_arg (ptr, POINTER_TYPE)
12604 || !validate_arg (ost, INTEGER_TYPE))
12605 return NULL_TREE;
12606
12607 STRIP_NOPS (ost);
12608
12609 if (TREE_CODE (ost) != INTEGER_CST
12610 || tree_int_cst_sgn (ost) < 0
12611 || compare_tree_int (ost, 3) > 0)
12612 return NULL_TREE;
12613
12614 object_size_type = tree_to_shwi (ost);
12615
12616 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12617 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12618 and (size_t) 0 for types 2 and 3. */
12619 if (TREE_SIDE_EFFECTS (ptr))
12620 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12621
12622 if (TREE_CODE (ptr) == ADDR_EXPR)
12623 {
12624 bytes = compute_builtin_object_size (ptr, object_size_type);
12625 if (wi::fits_to_tree_p (bytes, size_type_node))
12626 return build_int_cstu (size_type_node, bytes);
12627 }
12628 else if (TREE_CODE (ptr) == SSA_NAME)
12629 {
12630 /* If object size is not known yet, delay folding until
12631 later. Maybe subsequent passes will help determining
12632 it. */
12633 bytes = compute_builtin_object_size (ptr, object_size_type);
12634 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12635 && wi::fits_to_tree_p (bytes, size_type_node))
12636 return build_int_cstu (size_type_node, bytes);
12637 }
12638
12639 return NULL_TREE;
12640 }
12641
12642 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12643 DEST, SRC, LEN, and SIZE are the arguments to the call.
12644 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12645 code of the builtin. If MAXLEN is not NULL, it is maximum length
12646 passed as third argument. */
12647
12648 tree
12649 fold_builtin_memory_chk (location_t loc, tree fndecl,
12650 tree dest, tree src, tree len, tree size,
12651 tree maxlen, bool ignore,
12652 enum built_in_function fcode)
12653 {
12654 tree fn;
12655
12656 if (!validate_arg (dest, POINTER_TYPE)
12657 || !validate_arg (src,
12658 (fcode == BUILT_IN_MEMSET_CHK
12659 ? INTEGER_TYPE : POINTER_TYPE))
12660 || !validate_arg (len, INTEGER_TYPE)
12661 || !validate_arg (size, INTEGER_TYPE))
12662 return NULL_TREE;
12663
12664 /* If SRC and DEST are the same (and not volatile), return DEST
12665 (resp. DEST+LEN for __mempcpy_chk). */
12666 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12667 {
12668 if (fcode != BUILT_IN_MEMPCPY_CHK)
12669 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12670 dest, len);
12671 else
12672 {
12673 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12674 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12675 }
12676 }
12677
12678 if (! tree_fits_uhwi_p (size))
12679 return NULL_TREE;
12680
12681 if (! integer_all_onesp (size))
12682 {
12683 if (! tree_fits_uhwi_p (len))
12684 {
12685 /* If LEN is not constant, try MAXLEN too.
12686 For MAXLEN only allow optimizing into non-_ocs function
12687 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12688 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12689 {
12690 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12691 {
12692 /* (void) __mempcpy_chk () can be optimized into
12693 (void) __memcpy_chk (). */
12694 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12695 if (!fn)
12696 return NULL_TREE;
12697
12698 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12699 }
12700 return NULL_TREE;
12701 }
12702 }
12703 else
12704 maxlen = len;
12705
12706 if (tree_int_cst_lt (size, maxlen))
12707 return NULL_TREE;
12708 }
12709
12710 fn = NULL_TREE;
12711 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12712 mem{cpy,pcpy,move,set} is available. */
12713 switch (fcode)
12714 {
12715 case BUILT_IN_MEMCPY_CHK:
12716 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12717 break;
12718 case BUILT_IN_MEMPCPY_CHK:
12719 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12720 break;
12721 case BUILT_IN_MEMMOVE_CHK:
12722 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12723 break;
12724 case BUILT_IN_MEMSET_CHK:
12725 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12726 break;
12727 default:
12728 break;
12729 }
12730
12731 if (!fn)
12732 return NULL_TREE;
12733
12734 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12735 }
12736
12737 /* Fold a call to the __st[rp]cpy_chk builtin.
12738 DEST, SRC, and SIZE are the arguments to the call.
12739 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12740 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12741 strings passed as second argument. */
12742
12743 tree
12744 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12745 tree src, tree size,
12746 tree maxlen, bool ignore,
12747 enum built_in_function fcode)
12748 {
12749 tree len, fn;
12750
12751 if (!validate_arg (dest, POINTER_TYPE)
12752 || !validate_arg (src, POINTER_TYPE)
12753 || !validate_arg (size, INTEGER_TYPE))
12754 return NULL_TREE;
12755
12756 /* If SRC and DEST are the same (and not volatile), return DEST. */
12757 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12758 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12759
12760 if (! tree_fits_uhwi_p (size))
12761 return NULL_TREE;
12762
12763 if (! integer_all_onesp (size))
12764 {
12765 len = c_strlen (src, 1);
12766 if (! len || ! tree_fits_uhwi_p (len))
12767 {
12768 /* If LEN is not constant, try MAXLEN too.
12769 For MAXLEN only allow optimizing into non-_ocs function
12770 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12771 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12772 {
12773 if (fcode == BUILT_IN_STPCPY_CHK)
12774 {
12775 if (! ignore)
12776 return NULL_TREE;
12777
12778 /* If return value of __stpcpy_chk is ignored,
12779 optimize into __strcpy_chk. */
12780 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12781 if (!fn)
12782 return NULL_TREE;
12783
12784 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12785 }
12786
12787 if (! len || TREE_SIDE_EFFECTS (len))
12788 return NULL_TREE;
12789
12790 /* If c_strlen returned something, but not a constant,
12791 transform __strcpy_chk into __memcpy_chk. */
12792 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12793 if (!fn)
12794 return NULL_TREE;
12795
12796 len = fold_convert_loc (loc, size_type_node, len);
12797 len = size_binop_loc (loc, PLUS_EXPR, len,
12798 build_int_cst (size_type_node, 1));
12799 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12800 build_call_expr_loc (loc, fn, 4,
12801 dest, src, len, size));
12802 }
12803 }
12804 else
12805 maxlen = len;
12806
12807 if (! tree_int_cst_lt (maxlen, size))
12808 return NULL_TREE;
12809 }
12810
12811 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12812 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12813 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12814 if (!fn)
12815 return NULL_TREE;
12816
12817 return build_call_expr_loc (loc, fn, 2, dest, src);
12818 }
12819
12820 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12821 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12822 length passed as third argument. IGNORE is true if return value can be
12823 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12824
12825 tree
12826 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12827 tree len, tree size, tree maxlen, bool ignore,
12828 enum built_in_function fcode)
12829 {
12830 tree fn;
12831
12832 if (!validate_arg (dest, POINTER_TYPE)
12833 || !validate_arg (src, POINTER_TYPE)
12834 || !validate_arg (len, INTEGER_TYPE)
12835 || !validate_arg (size, INTEGER_TYPE))
12836 return NULL_TREE;
12837
12838 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12839 {
12840 /* If return value of __stpncpy_chk is ignored,
12841 optimize into __strncpy_chk. */
12842 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12843 if (fn)
12844 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12845 }
12846
12847 if (! tree_fits_uhwi_p (size))
12848 return NULL_TREE;
12849
12850 if (! integer_all_onesp (size))
12851 {
12852 if (! tree_fits_uhwi_p (len))
12853 {
12854 /* If LEN is not constant, try MAXLEN too.
12855 For MAXLEN only allow optimizing into non-_ocs function
12856 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12857 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12858 return NULL_TREE;
12859 }
12860 else
12861 maxlen = len;
12862
12863 if (tree_int_cst_lt (size, maxlen))
12864 return NULL_TREE;
12865 }
12866
12867 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12868 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12869 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12870 if (!fn)
12871 return NULL_TREE;
12872
12873 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12874 }
12875
12876 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12877 are the arguments to the call. */
12878
12879 static tree
12880 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12881 tree src, tree size)
12882 {
12883 tree fn;
12884 const char *p;
12885
12886 if (!validate_arg (dest, POINTER_TYPE)
12887 || !validate_arg (src, POINTER_TYPE)
12888 || !validate_arg (size, INTEGER_TYPE))
12889 return NULL_TREE;
12890
12891 p = c_getstr (src);
12892 /* If the SRC parameter is "", return DEST. */
12893 if (p && *p == '\0')
12894 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12895
12896 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12897 return NULL_TREE;
12898
12899 /* If __builtin_strcat_chk is used, assume strcat is available. */
12900 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12901 if (!fn)
12902 return NULL_TREE;
12903
12904 return build_call_expr_loc (loc, fn, 2, dest, src);
12905 }
12906
12907 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12908 LEN, and SIZE. */
12909
12910 static tree
12911 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12912 tree dest, tree src, tree len, tree size)
12913 {
12914 tree fn;
12915 const char *p;
12916
12917 if (!validate_arg (dest, POINTER_TYPE)
12918 || !validate_arg (src, POINTER_TYPE)
12919 || !validate_arg (size, INTEGER_TYPE)
12920 || !validate_arg (size, INTEGER_TYPE))
12921 return NULL_TREE;
12922
12923 p = c_getstr (src);
12924 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12925 if (p && *p == '\0')
12926 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12927 else if (integer_zerop (len))
12928 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12929
12930 if (! tree_fits_uhwi_p (size))
12931 return NULL_TREE;
12932
12933 if (! integer_all_onesp (size))
12934 {
12935 tree src_len = c_strlen (src, 1);
12936 if (src_len
12937 && tree_fits_uhwi_p (src_len)
12938 && tree_fits_uhwi_p (len)
12939 && ! tree_int_cst_lt (len, src_len))
12940 {
12941 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12942 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12943 if (!fn)
12944 return NULL_TREE;
12945
12946 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12947 }
12948 return NULL_TREE;
12949 }
12950
12951 /* If __builtin_strncat_chk is used, assume strncat is available. */
12952 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12953 if (!fn)
12954 return NULL_TREE;
12955
12956 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12957 }
12958
12959 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12960 Return NULL_TREE if a normal call should be emitted rather than
12961 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12962 or BUILT_IN_VSPRINTF_CHK. */
12963
12964 static tree
12965 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12966 enum built_in_function fcode)
12967 {
12968 tree dest, size, len, fn, fmt, flag;
12969 const char *fmt_str;
12970
12971 /* Verify the required arguments in the original call. */
12972 if (nargs < 4)
12973 return NULL_TREE;
12974 dest = args[0];
12975 if (!validate_arg (dest, POINTER_TYPE))
12976 return NULL_TREE;
12977 flag = args[1];
12978 if (!validate_arg (flag, INTEGER_TYPE))
12979 return NULL_TREE;
12980 size = args[2];
12981 if (!validate_arg (size, INTEGER_TYPE))
12982 return NULL_TREE;
12983 fmt = args[3];
12984 if (!validate_arg (fmt, POINTER_TYPE))
12985 return NULL_TREE;
12986
12987 if (! tree_fits_uhwi_p (size))
12988 return NULL_TREE;
12989
12990 len = NULL_TREE;
12991
12992 if (!init_target_chars ())
12993 return NULL_TREE;
12994
12995 /* Check whether the format is a literal string constant. */
12996 fmt_str = c_getstr (fmt);
12997 if (fmt_str != NULL)
12998 {
12999 /* If the format doesn't contain % args or %%, we know the size. */
13000 if (strchr (fmt_str, target_percent) == 0)
13001 {
13002 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13003 len = build_int_cstu (size_type_node, strlen (fmt_str));
13004 }
13005 /* If the format is "%s" and first ... argument is a string literal,
13006 we know the size too. */
13007 else if (fcode == BUILT_IN_SPRINTF_CHK
13008 && strcmp (fmt_str, target_percent_s) == 0)
13009 {
13010 tree arg;
13011
13012 if (nargs == 5)
13013 {
13014 arg = args[4];
13015 if (validate_arg (arg, POINTER_TYPE))
13016 {
13017 len = c_strlen (arg, 1);
13018 if (! len || ! tree_fits_uhwi_p (len))
13019 len = NULL_TREE;
13020 }
13021 }
13022 }
13023 }
13024
13025 if (! integer_all_onesp (size))
13026 {
13027 if (! len || ! tree_int_cst_lt (len, size))
13028 return NULL_TREE;
13029 }
13030
13031 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13032 or if format doesn't contain % chars or is "%s". */
13033 if (! integer_zerop (flag))
13034 {
13035 if (fmt_str == NULL)
13036 return NULL_TREE;
13037 if (strchr (fmt_str, target_percent) != NULL
13038 && strcmp (fmt_str, target_percent_s))
13039 return NULL_TREE;
13040 }
13041
13042 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13043 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13044 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13045 if (!fn)
13046 return NULL_TREE;
13047
13048 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13049 }
13050
13051 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13052 a normal call should be emitted rather than expanding the function
13053 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13054
13055 static tree
13056 fold_builtin_sprintf_chk (location_t loc, tree exp,
13057 enum built_in_function fcode)
13058 {
13059 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13060 CALL_EXPR_ARGP (exp), fcode);
13061 }
13062
13063 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13064 NULL_TREE if a normal call should be emitted rather than expanding
13065 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13066 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13067 passed as second argument. */
13068
13069 static tree
13070 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13071 tree maxlen, enum built_in_function fcode)
13072 {
13073 tree dest, size, len, fn, fmt, flag;
13074 const char *fmt_str;
13075
13076 /* Verify the required arguments in the original call. */
13077 if (nargs < 5)
13078 return NULL_TREE;
13079 dest = args[0];
13080 if (!validate_arg (dest, POINTER_TYPE))
13081 return NULL_TREE;
13082 len = args[1];
13083 if (!validate_arg (len, INTEGER_TYPE))
13084 return NULL_TREE;
13085 flag = args[2];
13086 if (!validate_arg (flag, INTEGER_TYPE))
13087 return NULL_TREE;
13088 size = args[3];
13089 if (!validate_arg (size, INTEGER_TYPE))
13090 return NULL_TREE;
13091 fmt = args[4];
13092 if (!validate_arg (fmt, POINTER_TYPE))
13093 return NULL_TREE;
13094
13095 if (! tree_fits_uhwi_p (size))
13096 return NULL_TREE;
13097
13098 if (! integer_all_onesp (size))
13099 {
13100 if (! tree_fits_uhwi_p (len))
13101 {
13102 /* If LEN is not constant, try MAXLEN too.
13103 For MAXLEN only allow optimizing into non-_ocs function
13104 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13105 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13106 return NULL_TREE;
13107 }
13108 else
13109 maxlen = len;
13110
13111 if (tree_int_cst_lt (size, maxlen))
13112 return NULL_TREE;
13113 }
13114
13115 if (!init_target_chars ())
13116 return NULL_TREE;
13117
13118 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13119 or if format doesn't contain % chars or is "%s". */
13120 if (! integer_zerop (flag))
13121 {
13122 fmt_str = c_getstr (fmt);
13123 if (fmt_str == NULL)
13124 return NULL_TREE;
13125 if (strchr (fmt_str, target_percent) != NULL
13126 && strcmp (fmt_str, target_percent_s))
13127 return NULL_TREE;
13128 }
13129
13130 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13131 available. */
13132 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13133 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13134 if (!fn)
13135 return NULL_TREE;
13136
13137 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13138 }
13139
13140 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13141 a normal call should be emitted rather than expanding the function
13142 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13143 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13144 passed as second argument. */
13145
13146 static tree
13147 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13148 enum built_in_function fcode)
13149 {
13150 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13151 CALL_EXPR_ARGP (exp), maxlen, fcode);
13152 }
13153
13154 /* Builtins with folding operations that operate on "..." arguments
13155 need special handling; we need to store the arguments in a convenient
13156 data structure before attempting any folding. Fortunately there are
13157 only a few builtins that fall into this category. FNDECL is the
13158 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13159 result of the function call is ignored. */
13160
13161 static tree
13162 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13163 bool ignore ATTRIBUTE_UNUSED)
13164 {
13165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13166 tree ret = NULL_TREE;
13167
13168 switch (fcode)
13169 {
13170 case BUILT_IN_SPRINTF_CHK:
13171 case BUILT_IN_VSPRINTF_CHK:
13172 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13173 break;
13174
13175 case BUILT_IN_SNPRINTF_CHK:
13176 case BUILT_IN_VSNPRINTF_CHK:
13177 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13178 break;
13179
13180 case BUILT_IN_FPCLASSIFY:
13181 ret = fold_builtin_fpclassify (loc, exp);
13182 break;
13183
13184 default:
13185 break;
13186 }
13187 if (ret)
13188 {
13189 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13190 SET_EXPR_LOCATION (ret, loc);
13191 TREE_NO_WARNING (ret) = 1;
13192 return ret;
13193 }
13194 return NULL_TREE;
13195 }
13196
13197 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13198 FMT and ARG are the arguments to the call; we don't fold cases with
13199 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13200
13201 Return NULL_TREE if no simplification was possible, otherwise return the
13202 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13203 code of the function to be simplified. */
13204
13205 static tree
13206 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13207 tree arg, bool ignore,
13208 enum built_in_function fcode)
13209 {
13210 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13211 const char *fmt_str = NULL;
13212
13213 /* If the return value is used, don't do the transformation. */
13214 if (! ignore)
13215 return NULL_TREE;
13216
13217 /* Verify the required arguments in the original call. */
13218 if (!validate_arg (fmt, POINTER_TYPE))
13219 return NULL_TREE;
13220
13221 /* Check whether the format is a literal string constant. */
13222 fmt_str = c_getstr (fmt);
13223 if (fmt_str == NULL)
13224 return NULL_TREE;
13225
13226 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13227 {
13228 /* If we're using an unlocked function, assume the other
13229 unlocked functions exist explicitly. */
13230 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13231 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13232 }
13233 else
13234 {
13235 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13236 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13237 }
13238
13239 if (!init_target_chars ())
13240 return NULL_TREE;
13241
13242 if (strcmp (fmt_str, target_percent_s) == 0
13243 || strchr (fmt_str, target_percent) == NULL)
13244 {
13245 const char *str;
13246
13247 if (strcmp (fmt_str, target_percent_s) == 0)
13248 {
13249 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13250 return NULL_TREE;
13251
13252 if (!arg || !validate_arg (arg, POINTER_TYPE))
13253 return NULL_TREE;
13254
13255 str = c_getstr (arg);
13256 if (str == NULL)
13257 return NULL_TREE;
13258 }
13259 else
13260 {
13261 /* The format specifier doesn't contain any '%' characters. */
13262 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13263 && arg)
13264 return NULL_TREE;
13265 str = fmt_str;
13266 }
13267
13268 /* If the string was "", printf does nothing. */
13269 if (str[0] == '\0')
13270 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13271
13272 /* If the string has length of 1, call putchar. */
13273 if (str[1] == '\0')
13274 {
13275 /* Given printf("c"), (where c is any one character,)
13276 convert "c"[0] to an int and pass that to the replacement
13277 function. */
13278 newarg = build_int_cst (integer_type_node, str[0]);
13279 if (fn_putchar)
13280 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13281 }
13282 else
13283 {
13284 /* If the string was "string\n", call puts("string"). */
13285 size_t len = strlen (str);
13286 if ((unsigned char)str[len - 1] == target_newline
13287 && (size_t) (int) len == len
13288 && (int) len > 0)
13289 {
13290 char *newstr;
13291 tree offset_node, string_cst;
13292
13293 /* Create a NUL-terminated string that's one char shorter
13294 than the original, stripping off the trailing '\n'. */
13295 newarg = build_string_literal (len, str);
13296 string_cst = string_constant (newarg, &offset_node);
13297 gcc_checking_assert (string_cst
13298 && (TREE_STRING_LENGTH (string_cst)
13299 == (int) len)
13300 && integer_zerop (offset_node)
13301 && (unsigned char)
13302 TREE_STRING_POINTER (string_cst)[len - 1]
13303 == target_newline);
13304 /* build_string_literal creates a new STRING_CST,
13305 modify it in place to avoid double copying. */
13306 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13307 newstr[len - 1] = '\0';
13308 if (fn_puts)
13309 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13310 }
13311 else
13312 /* We'd like to arrange to call fputs(string,stdout) here,
13313 but we need stdout and don't have a way to get it yet. */
13314 return NULL_TREE;
13315 }
13316 }
13317
13318 /* The other optimizations can be done only on the non-va_list variants. */
13319 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13320 return NULL_TREE;
13321
13322 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13323 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13324 {
13325 if (!arg || !validate_arg (arg, POINTER_TYPE))
13326 return NULL_TREE;
13327 if (fn_puts)
13328 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13329 }
13330
13331 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13332 else if (strcmp (fmt_str, target_percent_c) == 0)
13333 {
13334 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13335 return NULL_TREE;
13336 if (fn_putchar)
13337 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13338 }
13339
13340 if (!call)
13341 return NULL_TREE;
13342
13343 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13344 }
13345
13346 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13347 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13348 more than 3 arguments, and ARG may be null in the 2-argument case.
13349
13350 Return NULL_TREE if no simplification was possible, otherwise return the
13351 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13352 code of the function to be simplified. */
13353
13354 static tree
13355 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13356 tree fmt, tree arg, bool ignore,
13357 enum built_in_function fcode)
13358 {
13359 tree fn_fputc, fn_fputs, call = NULL_TREE;
13360 const char *fmt_str = NULL;
13361
13362 /* If the return value is used, don't do the transformation. */
13363 if (! ignore)
13364 return NULL_TREE;
13365
13366 /* Verify the required arguments in the original call. */
13367 if (!validate_arg (fp, POINTER_TYPE))
13368 return NULL_TREE;
13369 if (!validate_arg (fmt, POINTER_TYPE))
13370 return NULL_TREE;
13371
13372 /* Check whether the format is a literal string constant. */
13373 fmt_str = c_getstr (fmt);
13374 if (fmt_str == NULL)
13375 return NULL_TREE;
13376
13377 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13378 {
13379 /* If we're using an unlocked function, assume the other
13380 unlocked functions exist explicitly. */
13381 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13382 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13383 }
13384 else
13385 {
13386 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13387 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13388 }
13389
13390 if (!init_target_chars ())
13391 return NULL_TREE;
13392
13393 /* If the format doesn't contain % args or %%, use strcpy. */
13394 if (strchr (fmt_str, target_percent) == NULL)
13395 {
13396 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13397 && arg)
13398 return NULL_TREE;
13399
13400 /* If the format specifier was "", fprintf does nothing. */
13401 if (fmt_str[0] == '\0')
13402 {
13403 /* If FP has side-effects, just wait until gimplification is
13404 done. */
13405 if (TREE_SIDE_EFFECTS (fp))
13406 return NULL_TREE;
13407
13408 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13409 }
13410
13411 /* When "string" doesn't contain %, replace all cases of
13412 fprintf (fp, string) with fputs (string, fp). The fputs
13413 builtin will take care of special cases like length == 1. */
13414 if (fn_fputs)
13415 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13416 }
13417
13418 /* The other optimizations can be done only on the non-va_list variants. */
13419 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13420 return NULL_TREE;
13421
13422 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13423 else if (strcmp (fmt_str, target_percent_s) == 0)
13424 {
13425 if (!arg || !validate_arg (arg, POINTER_TYPE))
13426 return NULL_TREE;
13427 if (fn_fputs)
13428 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13429 }
13430
13431 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13432 else if (strcmp (fmt_str, target_percent_c) == 0)
13433 {
13434 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13435 return NULL_TREE;
13436 if (fn_fputc)
13437 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13438 }
13439
13440 if (!call)
13441 return NULL_TREE;
13442 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13443 }
13444
13445 /* Initialize format string characters in the target charset. */
13446
13447 static bool
13448 init_target_chars (void)
13449 {
13450 static bool init;
13451 if (!init)
13452 {
13453 target_newline = lang_hooks.to_target_charset ('\n');
13454 target_percent = lang_hooks.to_target_charset ('%');
13455 target_c = lang_hooks.to_target_charset ('c');
13456 target_s = lang_hooks.to_target_charset ('s');
13457 if (target_newline == 0 || target_percent == 0 || target_c == 0
13458 || target_s == 0)
13459 return false;
13460
13461 target_percent_c[0] = target_percent;
13462 target_percent_c[1] = target_c;
13463 target_percent_c[2] = '\0';
13464
13465 target_percent_s[0] = target_percent;
13466 target_percent_s[1] = target_s;
13467 target_percent_s[2] = '\0';
13468
13469 target_percent_s_newline[0] = target_percent;
13470 target_percent_s_newline[1] = target_s;
13471 target_percent_s_newline[2] = target_newline;
13472 target_percent_s_newline[3] = '\0';
13473
13474 init = true;
13475 }
13476 return true;
13477 }
13478
13479 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13480 and no overflow/underflow occurred. INEXACT is true if M was not
13481 exactly calculated. TYPE is the tree type for the result. This
13482 function assumes that you cleared the MPFR flags and then
13483 calculated M to see if anything subsequently set a flag prior to
13484 entering this function. Return NULL_TREE if any checks fail. */
13485
13486 static tree
13487 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13488 {
13489 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13490 overflow/underflow occurred. If -frounding-math, proceed iff the
13491 result of calling FUNC was exact. */
13492 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13493 && (!flag_rounding_math || !inexact))
13494 {
13495 REAL_VALUE_TYPE rr;
13496
13497 real_from_mpfr (&rr, m, type, GMP_RNDN);
13498 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13499 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13500 but the mpft_t is not, then we underflowed in the
13501 conversion. */
13502 if (real_isfinite (&rr)
13503 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13504 {
13505 REAL_VALUE_TYPE rmode;
13506
13507 real_convert (&rmode, TYPE_MODE (type), &rr);
13508 /* Proceed iff the specified mode can hold the value. */
13509 if (real_identical (&rmode, &rr))
13510 return build_real (type, rmode);
13511 }
13512 }
13513 return NULL_TREE;
13514 }
13515
13516 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13517 number and no overflow/underflow occurred. INEXACT is true if M
13518 was not exactly calculated. TYPE is the tree type for the result.
13519 This function assumes that you cleared the MPFR flags and then
13520 calculated M to see if anything subsequently set a flag prior to
13521 entering this function. Return NULL_TREE if any checks fail, if
13522 FORCE_CONVERT is true, then bypass the checks. */
13523
13524 static tree
13525 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13526 {
13527 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13528 overflow/underflow occurred. If -frounding-math, proceed iff the
13529 result of calling FUNC was exact. */
13530 if (force_convert
13531 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13532 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13533 && (!flag_rounding_math || !inexact)))
13534 {
13535 REAL_VALUE_TYPE re, im;
13536
13537 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13538 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13539 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13540 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13541 but the mpft_t is not, then we underflowed in the
13542 conversion. */
13543 if (force_convert
13544 || (real_isfinite (&re) && real_isfinite (&im)
13545 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13546 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13547 {
13548 REAL_VALUE_TYPE re_mode, im_mode;
13549
13550 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13551 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13552 /* Proceed iff the specified mode can hold the value. */
13553 if (force_convert
13554 || (real_identical (&re_mode, &re)
13555 && real_identical (&im_mode, &im)))
13556 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13557 build_real (TREE_TYPE (type), im_mode));
13558 }
13559 }
13560 return NULL_TREE;
13561 }
13562
13563 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13564 FUNC on it and return the resulting value as a tree with type TYPE.
13565 If MIN and/or MAX are not NULL, then the supplied ARG must be
13566 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13567 acceptable values, otherwise they are not. The mpfr precision is
13568 set to the precision of TYPE. We assume that function FUNC returns
13569 zero if the result could be calculated exactly within the requested
13570 precision. */
13571
13572 static tree
13573 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13574 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13575 bool inclusive)
13576 {
13577 tree result = NULL_TREE;
13578
13579 STRIP_NOPS (arg);
13580
13581 /* To proceed, MPFR must exactly represent the target floating point
13582 format, which only happens when the target base equals two. */
13583 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13584 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13585 {
13586 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13587
13588 if (real_isfinite (ra)
13589 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13590 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13591 {
13592 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13593 const int prec = fmt->p;
13594 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13595 int inexact;
13596 mpfr_t m;
13597
13598 mpfr_init2 (m, prec);
13599 mpfr_from_real (m, ra, GMP_RNDN);
13600 mpfr_clear_flags ();
13601 inexact = func (m, m, rnd);
13602 result = do_mpfr_ckconv (m, type, inexact);
13603 mpfr_clear (m);
13604 }
13605 }
13606
13607 return result;
13608 }
13609
13610 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13611 FUNC on it and return the resulting value as a tree with type TYPE.
13612 The mpfr precision is set to the precision of TYPE. We assume that
13613 function FUNC returns zero if the result could be calculated
13614 exactly within the requested precision. */
13615
13616 static tree
13617 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13618 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13619 {
13620 tree result = NULL_TREE;
13621
13622 STRIP_NOPS (arg1);
13623 STRIP_NOPS (arg2);
13624
13625 /* To proceed, MPFR must exactly represent the target floating point
13626 format, which only happens when the target base equals two. */
13627 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13628 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13629 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13630 {
13631 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13632 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13633
13634 if (real_isfinite (ra1) && real_isfinite (ra2))
13635 {
13636 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13637 const int prec = fmt->p;
13638 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13639 int inexact;
13640 mpfr_t m1, m2;
13641
13642 mpfr_inits2 (prec, m1, m2, NULL);
13643 mpfr_from_real (m1, ra1, GMP_RNDN);
13644 mpfr_from_real (m2, ra2, GMP_RNDN);
13645 mpfr_clear_flags ();
13646 inexact = func (m1, m1, m2, rnd);
13647 result = do_mpfr_ckconv (m1, type, inexact);
13648 mpfr_clears (m1, m2, NULL);
13649 }
13650 }
13651
13652 return result;
13653 }
13654
13655 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13656 FUNC on it and return the resulting value as a tree with type TYPE.
13657 The mpfr precision is set to the precision of TYPE. We assume that
13658 function FUNC returns zero if the result could be calculated
13659 exactly within the requested precision. */
13660
13661 static tree
13662 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13663 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13664 {
13665 tree result = NULL_TREE;
13666
13667 STRIP_NOPS (arg1);
13668 STRIP_NOPS (arg2);
13669 STRIP_NOPS (arg3);
13670
13671 /* To proceed, MPFR must exactly represent the target floating point
13672 format, which only happens when the target base equals two. */
13673 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13674 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13675 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13676 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13677 {
13678 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13679 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13680 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13681
13682 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13683 {
13684 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13685 const int prec = fmt->p;
13686 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13687 int inexact;
13688 mpfr_t m1, m2, m3;
13689
13690 mpfr_inits2 (prec, m1, m2, m3, NULL);
13691 mpfr_from_real (m1, ra1, GMP_RNDN);
13692 mpfr_from_real (m2, ra2, GMP_RNDN);
13693 mpfr_from_real (m3, ra3, GMP_RNDN);
13694 mpfr_clear_flags ();
13695 inexact = func (m1, m1, m2, m3, rnd);
13696 result = do_mpfr_ckconv (m1, type, inexact);
13697 mpfr_clears (m1, m2, m3, NULL);
13698 }
13699 }
13700
13701 return result;
13702 }
13703
13704 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13705 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13706 If ARG_SINP and ARG_COSP are NULL then the result is returned
13707 as a complex value.
13708 The type is taken from the type of ARG and is used for setting the
13709 precision of the calculation and results. */
13710
13711 static tree
13712 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13713 {
13714 tree const type = TREE_TYPE (arg);
13715 tree result = NULL_TREE;
13716
13717 STRIP_NOPS (arg);
13718
13719 /* To proceed, MPFR must exactly represent the target floating point
13720 format, which only happens when the target base equals two. */
13721 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13722 && TREE_CODE (arg) == REAL_CST
13723 && !TREE_OVERFLOW (arg))
13724 {
13725 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13726
13727 if (real_isfinite (ra))
13728 {
13729 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13730 const int prec = fmt->p;
13731 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13732 tree result_s, result_c;
13733 int inexact;
13734 mpfr_t m, ms, mc;
13735
13736 mpfr_inits2 (prec, m, ms, mc, NULL);
13737 mpfr_from_real (m, ra, GMP_RNDN);
13738 mpfr_clear_flags ();
13739 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13740 result_s = do_mpfr_ckconv (ms, type, inexact);
13741 result_c = do_mpfr_ckconv (mc, type, inexact);
13742 mpfr_clears (m, ms, mc, NULL);
13743 if (result_s && result_c)
13744 {
13745 /* If we are to return in a complex value do so. */
13746 if (!arg_sinp && !arg_cosp)
13747 return build_complex (build_complex_type (type),
13748 result_c, result_s);
13749
13750 /* Dereference the sin/cos pointer arguments. */
13751 arg_sinp = build_fold_indirect_ref (arg_sinp);
13752 arg_cosp = build_fold_indirect_ref (arg_cosp);
13753 /* Proceed if valid pointer type were passed in. */
13754 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13755 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13756 {
13757 /* Set the values. */
13758 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13759 result_s);
13760 TREE_SIDE_EFFECTS (result_s) = 1;
13761 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13762 result_c);
13763 TREE_SIDE_EFFECTS (result_c) = 1;
13764 /* Combine the assignments into a compound expr. */
13765 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13766 result_s, result_c));
13767 }
13768 }
13769 }
13770 }
13771 return result;
13772 }
13773
13774 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13775 two-argument mpfr order N Bessel function FUNC on them and return
13776 the resulting value as a tree with type TYPE. The mpfr precision
13777 is set to the precision of TYPE. We assume that function FUNC
13778 returns zero if the result could be calculated exactly within the
13779 requested precision. */
13780 static tree
13781 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13782 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13783 const REAL_VALUE_TYPE *min, bool inclusive)
13784 {
13785 tree result = NULL_TREE;
13786
13787 STRIP_NOPS (arg1);
13788 STRIP_NOPS (arg2);
13789
13790 /* To proceed, MPFR must exactly represent the target floating point
13791 format, which only happens when the target base equals two. */
13792 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13793 && tree_fits_shwi_p (arg1)
13794 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13795 {
13796 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13797 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13798
13799 if (n == (long)n
13800 && real_isfinite (ra)
13801 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13802 {
13803 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13804 const int prec = fmt->p;
13805 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13806 int inexact;
13807 mpfr_t m;
13808
13809 mpfr_init2 (m, prec);
13810 mpfr_from_real (m, ra, GMP_RNDN);
13811 mpfr_clear_flags ();
13812 inexact = func (m, n, m, rnd);
13813 result = do_mpfr_ckconv (m, type, inexact);
13814 mpfr_clear (m);
13815 }
13816 }
13817
13818 return result;
13819 }
13820
13821 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13822 the pointer *(ARG_QUO) and return the result. The type is taken
13823 from the type of ARG0 and is used for setting the precision of the
13824 calculation and results. */
13825
13826 static tree
13827 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13828 {
13829 tree const type = TREE_TYPE (arg0);
13830 tree result = NULL_TREE;
13831
13832 STRIP_NOPS (arg0);
13833 STRIP_NOPS (arg1);
13834
13835 /* To proceed, MPFR must exactly represent the target floating point
13836 format, which only happens when the target base equals two. */
13837 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13838 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13839 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13840 {
13841 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13842 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13843
13844 if (real_isfinite (ra0) && real_isfinite (ra1))
13845 {
13846 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13847 const int prec = fmt->p;
13848 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13849 tree result_rem;
13850 long integer_quo;
13851 mpfr_t m0, m1;
13852
13853 mpfr_inits2 (prec, m0, m1, NULL);
13854 mpfr_from_real (m0, ra0, GMP_RNDN);
13855 mpfr_from_real (m1, ra1, GMP_RNDN);
13856 mpfr_clear_flags ();
13857 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13858 /* Remquo is independent of the rounding mode, so pass
13859 inexact=0 to do_mpfr_ckconv(). */
13860 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13861 mpfr_clears (m0, m1, NULL);
13862 if (result_rem)
13863 {
13864 /* MPFR calculates quo in the host's long so it may
13865 return more bits in quo than the target int can hold
13866 if sizeof(host long) > sizeof(target int). This can
13867 happen even for native compilers in LP64 mode. In
13868 these cases, modulo the quo value with the largest
13869 number that the target int can hold while leaving one
13870 bit for the sign. */
13871 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13872 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13873
13874 /* Dereference the quo pointer argument. */
13875 arg_quo = build_fold_indirect_ref (arg_quo);
13876 /* Proceed iff a valid pointer type was passed in. */
13877 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13878 {
13879 /* Set the value. */
13880 tree result_quo
13881 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13882 build_int_cst (TREE_TYPE (arg_quo),
13883 integer_quo));
13884 TREE_SIDE_EFFECTS (result_quo) = 1;
13885 /* Combine the quo assignment with the rem. */
13886 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13887 result_quo, result_rem));
13888 }
13889 }
13890 }
13891 }
13892 return result;
13893 }
13894
13895 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13896 resulting value as a tree with type TYPE. The mpfr precision is
13897 set to the precision of TYPE. We assume that this mpfr function
13898 returns zero if the result could be calculated exactly within the
13899 requested precision. In addition, the integer pointer represented
13900 by ARG_SG will be dereferenced and set to the appropriate signgam
13901 (-1,1) value. */
13902
13903 static tree
13904 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13905 {
13906 tree result = NULL_TREE;
13907
13908 STRIP_NOPS (arg);
13909
13910 /* To proceed, MPFR must exactly represent the target floating point
13911 format, which only happens when the target base equals two. Also
13912 verify ARG is a constant and that ARG_SG is an int pointer. */
13913 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13914 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13915 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13916 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13917 {
13918 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13919
13920 /* In addition to NaN and Inf, the argument cannot be zero or a
13921 negative integer. */
13922 if (real_isfinite (ra)
13923 && ra->cl != rvc_zero
13924 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13925 {
13926 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13927 const int prec = fmt->p;
13928 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13929 int inexact, sg;
13930 mpfr_t m;
13931 tree result_lg;
13932
13933 mpfr_init2 (m, prec);
13934 mpfr_from_real (m, ra, GMP_RNDN);
13935 mpfr_clear_flags ();
13936 inexact = mpfr_lgamma (m, &sg, m, rnd);
13937 result_lg = do_mpfr_ckconv (m, type, inexact);
13938 mpfr_clear (m);
13939 if (result_lg)
13940 {
13941 tree result_sg;
13942
13943 /* Dereference the arg_sg pointer argument. */
13944 arg_sg = build_fold_indirect_ref (arg_sg);
13945 /* Assign the signgam value into *arg_sg. */
13946 result_sg = fold_build2 (MODIFY_EXPR,
13947 TREE_TYPE (arg_sg), arg_sg,
13948 build_int_cst (TREE_TYPE (arg_sg), sg));
13949 TREE_SIDE_EFFECTS (result_sg) = 1;
13950 /* Combine the signgam assignment with the lgamma result. */
13951 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13952 result_sg, result_lg));
13953 }
13954 }
13955 }
13956
13957 return result;
13958 }
13959
13960 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13961 function FUNC on it and return the resulting value as a tree with
13962 type TYPE. The mpfr precision is set to the precision of TYPE. We
13963 assume that function FUNC returns zero if the result could be
13964 calculated exactly within the requested precision. */
13965
13966 static tree
13967 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13968 {
13969 tree result = NULL_TREE;
13970
13971 STRIP_NOPS (arg);
13972
13973 /* To proceed, MPFR must exactly represent the target floating point
13974 format, which only happens when the target base equals two. */
13975 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13977 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13978 {
13979 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13980 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13981
13982 if (real_isfinite (re) && real_isfinite (im))
13983 {
13984 const struct real_format *const fmt =
13985 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13986 const int prec = fmt->p;
13987 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13988 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13989 int inexact;
13990 mpc_t m;
13991
13992 mpc_init2 (m, prec);
13993 mpfr_from_real (mpc_realref (m), re, rnd);
13994 mpfr_from_real (mpc_imagref (m), im, rnd);
13995 mpfr_clear_flags ();
13996 inexact = func (m, m, crnd);
13997 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13998 mpc_clear (m);
13999 }
14000 }
14001
14002 return result;
14003 }
14004
14005 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14006 mpc function FUNC on it and return the resulting value as a tree
14007 with type TYPE. The mpfr precision is set to the precision of
14008 TYPE. We assume that function FUNC returns zero if the result
14009 could be calculated exactly within the requested precision. If
14010 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14011 in the arguments and/or results. */
14012
14013 tree
14014 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14015 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14016 {
14017 tree result = NULL_TREE;
14018
14019 STRIP_NOPS (arg0);
14020 STRIP_NOPS (arg1);
14021
14022 /* To proceed, MPFR must exactly represent the target floating point
14023 format, which only happens when the target base equals two. */
14024 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14025 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14026 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14028 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14029 {
14030 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14031 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14032 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14033 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14034
14035 if (do_nonfinite
14036 || (real_isfinite (re0) && real_isfinite (im0)
14037 && real_isfinite (re1) && real_isfinite (im1)))
14038 {
14039 const struct real_format *const fmt =
14040 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14041 const int prec = fmt->p;
14042 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14043 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14044 int inexact;
14045 mpc_t m0, m1;
14046
14047 mpc_init2 (m0, prec);
14048 mpc_init2 (m1, prec);
14049 mpfr_from_real (mpc_realref (m0), re0, rnd);
14050 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14051 mpfr_from_real (mpc_realref (m1), re1, rnd);
14052 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14053 mpfr_clear_flags ();
14054 inexact = func (m0, m0, m1, crnd);
14055 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14056 mpc_clear (m0);
14057 mpc_clear (m1);
14058 }
14059 }
14060
14061 return result;
14062 }
14063
14064 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14065 a normal call should be emitted rather than expanding the function
14066 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14067
14068 static tree
14069 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14070 {
14071 int nargs = gimple_call_num_args (stmt);
14072
14073 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14074 (nargs > 0
14075 ? gimple_call_arg_ptr (stmt, 0)
14076 : &error_mark_node), fcode);
14077 }
14078
14079 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14080 a normal call should be emitted rather than expanding the function
14081 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14082 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14083 passed as second argument. */
14084
14085 tree
14086 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14087 enum built_in_function fcode)
14088 {
14089 int nargs = gimple_call_num_args (stmt);
14090
14091 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14092 (nargs > 0
14093 ? gimple_call_arg_ptr (stmt, 0)
14094 : &error_mark_node), maxlen, fcode);
14095 }
14096
14097 /* Builtins with folding operations that operate on "..." arguments
14098 need special handling; we need to store the arguments in a convenient
14099 data structure before attempting any folding. Fortunately there are
14100 only a few builtins that fall into this category. FNDECL is the
14101 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14102 result of the function call is ignored. */
14103
14104 static tree
14105 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14106 bool ignore ATTRIBUTE_UNUSED)
14107 {
14108 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14109 tree ret = NULL_TREE;
14110
14111 switch (fcode)
14112 {
14113 case BUILT_IN_SPRINTF_CHK:
14114 case BUILT_IN_VSPRINTF_CHK:
14115 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14116 break;
14117
14118 case BUILT_IN_SNPRINTF_CHK:
14119 case BUILT_IN_VSNPRINTF_CHK:
14120 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14121
14122 default:
14123 break;
14124 }
14125 if (ret)
14126 {
14127 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14128 TREE_NO_WARNING (ret) = 1;
14129 return ret;
14130 }
14131 return NULL_TREE;
14132 }
14133
14134 /* A wrapper function for builtin folding that prevents warnings for
14135 "statement without effect" and the like, caused by removing the
14136 call node earlier than the warning is generated. */
14137
14138 tree
14139 fold_call_stmt (gimple stmt, bool ignore)
14140 {
14141 tree ret = NULL_TREE;
14142 tree fndecl = gimple_call_fndecl (stmt);
14143 location_t loc = gimple_location (stmt);
14144 if (fndecl
14145 && TREE_CODE (fndecl) == FUNCTION_DECL
14146 && DECL_BUILT_IN (fndecl)
14147 && !gimple_call_va_arg_pack_p (stmt))
14148 {
14149 int nargs = gimple_call_num_args (stmt);
14150 tree *args = (nargs > 0
14151 ? gimple_call_arg_ptr (stmt, 0)
14152 : &error_mark_node);
14153
14154 if (avoid_folding_inline_builtin (fndecl))
14155 return NULL_TREE;
14156 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14157 {
14158 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14159 }
14160 else
14161 {
14162 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14163 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14164 if (!ret)
14165 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14166 if (ret)
14167 {
14168 /* Propagate location information from original call to
14169 expansion of builtin. Otherwise things like
14170 maybe_emit_chk_warning, that operate on the expansion
14171 of a builtin, will use the wrong location information. */
14172 if (gimple_has_location (stmt))
14173 {
14174 tree realret = ret;
14175 if (TREE_CODE (ret) == NOP_EXPR)
14176 realret = TREE_OPERAND (ret, 0);
14177 if (CAN_HAVE_LOCATION_P (realret)
14178 && !EXPR_HAS_LOCATION (realret))
14179 SET_EXPR_LOCATION (realret, loc);
14180 return realret;
14181 }
14182 return ret;
14183 }
14184 }
14185 }
14186 return NULL_TREE;
14187 }
14188
14189 /* Look up the function in builtin_decl that corresponds to DECL
14190 and set ASMSPEC as its user assembler name. DECL must be a
14191 function decl that declares a builtin. */
14192
14193 void
14194 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14195 {
14196 tree builtin;
14197 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14198 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14199 && asmspec != 0);
14200
14201 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14202 set_user_assembler_name (builtin, asmspec);
14203 switch (DECL_FUNCTION_CODE (decl))
14204 {
14205 case BUILT_IN_MEMCPY:
14206 init_block_move_fn (asmspec);
14207 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14208 break;
14209 case BUILT_IN_MEMSET:
14210 init_block_clear_fn (asmspec);
14211 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14212 break;
14213 case BUILT_IN_MEMMOVE:
14214 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14215 break;
14216 case BUILT_IN_MEMCMP:
14217 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14218 break;
14219 case BUILT_IN_ABORT:
14220 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14221 break;
14222 case BUILT_IN_FFS:
14223 if (INT_TYPE_SIZE < BITS_PER_WORD)
14224 {
14225 set_user_assembler_libfunc ("ffs", asmspec);
14226 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14227 MODE_INT, 0), "ffs");
14228 }
14229 break;
14230 default:
14231 break;
14232 }
14233 }
14234
14235 /* Return true if DECL is a builtin that expands to a constant or similarly
14236 simple code. */
14237 bool
14238 is_simple_builtin (tree decl)
14239 {
14240 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14241 switch (DECL_FUNCTION_CODE (decl))
14242 {
14243 /* Builtins that expand to constants. */
14244 case BUILT_IN_CONSTANT_P:
14245 case BUILT_IN_EXPECT:
14246 case BUILT_IN_OBJECT_SIZE:
14247 case BUILT_IN_UNREACHABLE:
14248 /* Simple register moves or loads from stack. */
14249 case BUILT_IN_ASSUME_ALIGNED:
14250 case BUILT_IN_RETURN_ADDRESS:
14251 case BUILT_IN_EXTRACT_RETURN_ADDR:
14252 case BUILT_IN_FROB_RETURN_ADDR:
14253 case BUILT_IN_RETURN:
14254 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14255 case BUILT_IN_FRAME_ADDRESS:
14256 case BUILT_IN_VA_END:
14257 case BUILT_IN_STACK_SAVE:
14258 case BUILT_IN_STACK_RESTORE:
14259 /* Exception state returns or moves registers around. */
14260 case BUILT_IN_EH_FILTER:
14261 case BUILT_IN_EH_POINTER:
14262 case BUILT_IN_EH_COPY_VALUES:
14263 return true;
14264
14265 default:
14266 return false;
14267 }
14268
14269 return false;
14270 }
14271
14272 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14273 most probably expanded inline into reasonably simple code. This is a
14274 superset of is_simple_builtin. */
14275 bool
14276 is_inexpensive_builtin (tree decl)
14277 {
14278 if (!decl)
14279 return false;
14280 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14281 return true;
14282 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14283 switch (DECL_FUNCTION_CODE (decl))
14284 {
14285 case BUILT_IN_ABS:
14286 case BUILT_IN_ALLOCA:
14287 case BUILT_IN_ALLOCA_WITH_ALIGN:
14288 case BUILT_IN_BSWAP16:
14289 case BUILT_IN_BSWAP32:
14290 case BUILT_IN_BSWAP64:
14291 case BUILT_IN_CLZ:
14292 case BUILT_IN_CLZIMAX:
14293 case BUILT_IN_CLZL:
14294 case BUILT_IN_CLZLL:
14295 case BUILT_IN_CTZ:
14296 case BUILT_IN_CTZIMAX:
14297 case BUILT_IN_CTZL:
14298 case BUILT_IN_CTZLL:
14299 case BUILT_IN_FFS:
14300 case BUILT_IN_FFSIMAX:
14301 case BUILT_IN_FFSL:
14302 case BUILT_IN_FFSLL:
14303 case BUILT_IN_IMAXABS:
14304 case BUILT_IN_FINITE:
14305 case BUILT_IN_FINITEF:
14306 case BUILT_IN_FINITEL:
14307 case BUILT_IN_FINITED32:
14308 case BUILT_IN_FINITED64:
14309 case BUILT_IN_FINITED128:
14310 case BUILT_IN_FPCLASSIFY:
14311 case BUILT_IN_ISFINITE:
14312 case BUILT_IN_ISINF_SIGN:
14313 case BUILT_IN_ISINF:
14314 case BUILT_IN_ISINFF:
14315 case BUILT_IN_ISINFL:
14316 case BUILT_IN_ISINFD32:
14317 case BUILT_IN_ISINFD64:
14318 case BUILT_IN_ISINFD128:
14319 case BUILT_IN_ISNAN:
14320 case BUILT_IN_ISNANF:
14321 case BUILT_IN_ISNANL:
14322 case BUILT_IN_ISNAND32:
14323 case BUILT_IN_ISNAND64:
14324 case BUILT_IN_ISNAND128:
14325 case BUILT_IN_ISNORMAL:
14326 case BUILT_IN_ISGREATER:
14327 case BUILT_IN_ISGREATEREQUAL:
14328 case BUILT_IN_ISLESS:
14329 case BUILT_IN_ISLESSEQUAL:
14330 case BUILT_IN_ISLESSGREATER:
14331 case BUILT_IN_ISUNORDERED:
14332 case BUILT_IN_VA_ARG_PACK:
14333 case BUILT_IN_VA_ARG_PACK_LEN:
14334 case BUILT_IN_VA_COPY:
14335 case BUILT_IN_TRAP:
14336 case BUILT_IN_SAVEREGS:
14337 case BUILT_IN_POPCOUNTL:
14338 case BUILT_IN_POPCOUNTLL:
14339 case BUILT_IN_POPCOUNTIMAX:
14340 case BUILT_IN_POPCOUNT:
14341 case BUILT_IN_PARITYL:
14342 case BUILT_IN_PARITYLL:
14343 case BUILT_IN_PARITYIMAX:
14344 case BUILT_IN_PARITY:
14345 case BUILT_IN_LABS:
14346 case BUILT_IN_LLABS:
14347 case BUILT_IN_PREFETCH:
14348 return true;
14349
14350 default:
14351 return is_simple_builtin (decl);
14352 }
14353
14354 return false;
14355 }