]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
2015-07-09 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "rtl.h"
27 #include "alias.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "calls.h"
32 #include "varasm.h"
33 #include "tree-object-size.h"
34 #include "realmpfr.h"
35 #include "cfgrtl.h"
36 #include "internal-fn.h"
37 #include "flags.h"
38 #include "regs.h"
39 #include "except.h"
40 #include "insn-config.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "emit-rtl.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "libfuncs.h"
50 #include "recog.h"
51 #include "output.h"
52 #include "typeclass.h"
53 #include "tm_p.h"
54 #include "target.h"
55 #include "langhooks.h"
56 #include "tree-ssanames.h"
57 #include "tree-dfa.h"
58 #include "value-prof.h"
59 #include "diagnostic-core.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "cgraph.h"
64 #include "tree-chkp.h"
65 #include "rtl-chkp.h"
66 #include "gomp-constants.h"
67
68
69 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
70
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
75
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
82 {
83 #include "builtins.def"
84 };
85 #undef DEF_BUILTIN
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static void expand_errno_check (tree, rtx);
108 static rtx expand_builtin_mathfn (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
125 static rtx expand_builtin_memcpy (tree, rtx);
126 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
127 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
128 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
129 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
130 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
131 machine_mode, int, tree);
132 static rtx expand_builtin_strcpy (tree, rtx);
133 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
134 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, machine_mode);
138 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
140 static rtx expand_builtin_bzero (tree);
141 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
142 static rtx expand_builtin_alloca (tree, bool);
143 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static tree stabilize_va_list_loc (location_t, tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_strlen (location_t, tree, tree);
150 static tree fold_builtin_inf (location_t, tree, int);
151 static tree fold_builtin_nan (tree, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static bool integer_valued_real_p (tree);
155 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_sqrt (location_t, tree, tree);
159 static tree fold_builtin_cbrt (location_t, tree, tree);
160 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_cos (location_t, tree, tree, tree);
163 static tree fold_builtin_cosh (location_t, tree, tree, tree);
164 static tree fold_builtin_tan (tree, tree);
165 static tree fold_builtin_trunc (location_t, tree, tree);
166 static tree fold_builtin_floor (location_t, tree, tree);
167 static tree fold_builtin_ceil (location_t, tree, tree);
168 static tree fold_builtin_round (location_t, tree, tree);
169 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
170 static tree fold_builtin_bitop (tree, tree);
171 static tree fold_builtin_strchr (location_t, tree, tree, tree);
172 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
174 static tree fold_builtin_strcmp (location_t, tree, tree);
175 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
176 static tree fold_builtin_signbit (location_t, tree, tree);
177 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_isascii (location_t, tree);
179 static tree fold_builtin_toascii (location_t, tree);
180 static tree fold_builtin_isdigit (location_t, tree);
181 static tree fold_builtin_fabs (location_t, tree, tree);
182 static tree fold_builtin_abs (location_t, tree, tree);
183 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
184 enum tree_code);
185 static tree fold_builtin_0 (location_t, tree);
186 static tree fold_builtin_1 (location_t, tree, tree);
187 static tree fold_builtin_2 (location_t, tree, tree, tree);
188 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
189 static tree fold_builtin_varargs (location_t, tree, tree*, int);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196
197 static rtx expand_builtin_object_size (tree);
198 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
199 enum built_in_function);
200 static void maybe_emit_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_free_warning (tree);
203 static tree fold_builtin_object_size (tree, tree);
204
205 unsigned HOST_WIDE_INT target_newline;
206 unsigned HOST_WIDE_INT target_percent;
207 static unsigned HOST_WIDE_INT target_c;
208 static unsigned HOST_WIDE_INT target_s;
209 char target_percent_c[3];
210 char target_percent_s[3];
211 char target_percent_s_newline[4];
212 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
213 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
214 static tree do_mpfr_arg2 (tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
216 static tree do_mpfr_arg3 (tree, tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_sincos (tree, tree, tree);
219 static tree do_mpfr_bessel_n (tree, tree, tree,
220 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_remquo (tree, tree, tree);
223 static tree do_mpfr_lgamma_r (tree, tree, tree);
224 static void expand_builtin_sync_synchronize (void);
225
226 /* Return true if NAME starts with __builtin_ or __sync_. */
227
228 static bool
229 is_builtin_name (const char *name)
230 {
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 if (flag_cilkplus
238 && (!strcmp (name, "__cilkrts_detach")
239 || !strcmp (name, "__cilkrts_pop_frame")))
240 return true;
241 return false;
242 }
243
244
245 /* Return true if DECL is a function symbol representing a built-in. */
246
247 bool
248 is_builtin_fn (tree decl)
249 {
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
251 }
252
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
256
257 static bool
258 called_as_built_in (tree node)
259 {
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
265 }
266
267 /* Compute values M and N such that M divides (address of EXP - N) and such
268 that N < M. If these numbers can be determined, store M in alignp and N in
269 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
270 *alignp and any bit-offset to *bitposp.
271
272 Note that the address (and thus the alignment) computed here is based
273 on the address to which a symbol resolves, whereas DECL_ALIGN is based
274 on the address at which an object is actually located. These two
275 addresses are not always the same. For example, on ARM targets,
276 the address &foo of a Thumb function foo() has the lowest bit set,
277 whereas foo() itself starts on an even address.
278
279 If ADDR_P is true we are taking the address of the memory reference EXP
280 and thus cannot rely on the access taking place. */
281
282 static bool
283 get_object_alignment_2 (tree exp, unsigned int *alignp,
284 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
285 {
286 HOST_WIDE_INT bitsize, bitpos;
287 tree offset;
288 machine_mode mode;
289 int unsignedp, volatilep;
290 unsigned int align = BITS_PER_UNIT;
291 bool known_alignment = false;
292
293 /* Get the innermost object and the constant (bitpos) and possibly
294 variable (offset) offset of the access. */
295 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
296 &mode, &unsignedp, &volatilep, true);
297
298 /* Extract alignment information from the innermost object and
299 possibly adjust bitpos and offset. */
300 if (TREE_CODE (exp) == FUNCTION_DECL)
301 {
302 /* Function addresses can encode extra information besides their
303 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
304 allows the low bit to be used as a virtual bit, we know
305 that the address itself must be at least 2-byte aligned. */
306 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
307 align = 2 * BITS_PER_UNIT;
308 }
309 else if (TREE_CODE (exp) == LABEL_DECL)
310 ;
311 else if (TREE_CODE (exp) == CONST_DECL)
312 {
313 /* The alignment of a CONST_DECL is determined by its initializer. */
314 exp = DECL_INITIAL (exp);
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 #ifdef CONSTANT_ALIGNMENT
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319 #endif
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
339
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 #ifdef CONSTANT_ALIGNMENT
396 if (CONSTANT_CLASS_P (exp))
397 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
398 #endif
399 known_alignment = true;
400 }
401
402 /* If there is a non-constant offset part extract the maximum
403 alignment that can prevail. */
404 if (offset)
405 {
406 unsigned int trailing_zeros = tree_ctz (offset);
407 if (trailing_zeros < HOST_BITS_PER_INT)
408 {
409 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
410 if (inner)
411 align = MIN (align, inner);
412 }
413 }
414
415 *alignp = align;
416 *bitposp = bitpos & (*alignp - 1);
417 return known_alignment;
418 }
419
420 /* For a memory reference expression EXP compute values M and N such that M
421 divides (&EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Otherwise return false
423 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
424
425 bool
426 get_object_alignment_1 (tree exp, unsigned int *alignp,
427 unsigned HOST_WIDE_INT *bitposp)
428 {
429 return get_object_alignment_2 (exp, alignp, bitposp, false);
430 }
431
432 /* Return the alignment in bits of EXP, an object. */
433
434 unsigned int
435 get_object_alignment (tree exp)
436 {
437 unsigned HOST_WIDE_INT bitpos = 0;
438 unsigned int align;
439
440 get_object_alignment_1 (exp, &align, &bitpos);
441
442 /* align and bitpos now specify known low bits of the pointer.
443 ptr & (align - 1) == bitpos. */
444
445 if (bitpos != 0)
446 align = (bitpos & -bitpos);
447 return align;
448 }
449
450 /* For a pointer valued expression EXP compute values M and N such that M
451 divides (EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Return false if
453 the results are just a conservative approximation.
454
455 If EXP is not a pointer, false is returned too. */
456
457 bool
458 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
459 unsigned HOST_WIDE_INT *bitposp)
460 {
461 STRIP_NOPS (exp);
462
463 if (TREE_CODE (exp) == ADDR_EXPR)
464 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
465 alignp, bitposp, true);
466 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
467 {
468 unsigned int align;
469 unsigned HOST_WIDE_INT bitpos;
470 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
471 &align, &bitpos);
472 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
473 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
474 else
475 {
476 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
477 if (trailing_zeros < HOST_BITS_PER_INT)
478 {
479 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
480 if (inner)
481 align = MIN (align, inner);
482 }
483 }
484 *alignp = align;
485 *bitposp = bitpos & (align - 1);
486 return res;
487 }
488 else if (TREE_CODE (exp) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (exp)))
490 {
491 unsigned int ptr_align, ptr_misalign;
492 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
493
494 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
495 {
496 *bitposp = ptr_misalign * BITS_PER_UNIT;
497 *alignp = ptr_align * BITS_PER_UNIT;
498 /* We cannot really tell whether this result is an approximation. */
499 return true;
500 }
501 else
502 {
503 *bitposp = 0;
504 *alignp = BITS_PER_UNIT;
505 return false;
506 }
507 }
508 else if (TREE_CODE (exp) == INTEGER_CST)
509 {
510 *alignp = BIGGEST_ALIGNMENT;
511 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
512 & (BIGGEST_ALIGNMENT - 1));
513 return true;
514 }
515
516 *bitposp = 0;
517 *alignp = BITS_PER_UNIT;
518 return false;
519 }
520
521 /* Return the alignment in bits of EXP, a pointer valued expression.
522 The alignment returned is, by default, the alignment of the thing that
523 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
524
525 Otherwise, look at the expression to see if we can do better, i.e., if the
526 expression is actually pointing at an object whose alignment is tighter. */
527
528 unsigned int
529 get_pointer_alignment (tree exp)
530 {
531 unsigned HOST_WIDE_INT bitpos = 0;
532 unsigned int align;
533
534 get_pointer_alignment_1 (exp, &align, &bitpos);
535
536 /* align and bitpos now specify known low bits of the pointer.
537 ptr & (align - 1) == bitpos. */
538
539 if (bitpos != 0)
540 align = (bitpos & -bitpos);
541
542 return align;
543 }
544
545 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
546 way, because it could contain a zero byte in the middle.
547 TREE_STRING_LENGTH is the size of the character array, not the string.
548
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
555
556 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
557 accesses. Note that this implies the result is not going to be emitted
558 into the instruction stream.
559
560 The value returned is of type `ssizetype'.
561
562 Unfortunately, string_constant can't access the values of const char
563 arrays with initializers, so neither can we do so here. */
564
565 tree
566 c_strlen (tree src, int only_value)
567 {
568 tree offset_node;
569 HOST_WIDE_INT offset;
570 int max;
571 const char *ptr;
572 location_t loc;
573
574 STRIP_NOPS (src);
575 if (TREE_CODE (src) == COND_EXPR
576 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
577 {
578 tree len1, len2;
579
580 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
581 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
582 if (tree_int_cst_equal (len1, len2))
583 return len1;
584 }
585
586 if (TREE_CODE (src) == COMPOUND_EXPR
587 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
588 return c_strlen (TREE_OPERAND (src, 1), only_value);
589
590 loc = EXPR_LOC_OR_LOC (src, input_location);
591
592 src = string_constant (src, &offset_node);
593 if (src == 0)
594 return NULL_TREE;
595
596 max = TREE_STRING_LENGTH (src) - 1;
597 ptr = TREE_STRING_POINTER (src);
598
599 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
600 {
601 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
602 compute the offset to the following null if we don't know where to
603 start searching for it. */
604 int i;
605
606 for (i = 0; i < max; i++)
607 if (ptr[i] == 0)
608 return NULL_TREE;
609
610 /* We don't know the starting offset, but we do know that the string
611 has no internal zero bytes. We can assume that the offset falls
612 within the bounds of the string; otherwise, the programmer deserves
613 what he gets. Subtract the offset from the length of the string,
614 and return that. This would perhaps not be valid if we were dealing
615 with named arrays in addition to literal string constants. */
616
617 return size_diffop_loc (loc, size_int (max), offset_node);
618 }
619
620 /* We have a known offset into the string. Start searching there for
621 a null character if we can represent it as a single HOST_WIDE_INT. */
622 if (offset_node == 0)
623 offset = 0;
624 else if (! tree_fits_shwi_p (offset_node))
625 offset = -1;
626 else
627 offset = tree_to_shwi (offset_node);
628
629 /* If the offset is known to be out of bounds, warn, and call strlen at
630 runtime. */
631 if (offset < 0 || offset > max)
632 {
633 /* Suppress multiple warnings for propagated constant strings. */
634 if (only_value != 2
635 && !TREE_NO_WARNING (src))
636 {
637 warning_at (loc, 0, "offset outside bounds of constant string");
638 TREE_NO_WARNING (src) = 1;
639 }
640 return NULL_TREE;
641 }
642
643 /* Use strlen to search for the first zero byte. Since any strings
644 constructed with build_string will have nulls appended, we win even
645 if we get handed something like (char[4])"abcd".
646
647 Since OFFSET is our starting index into the string, no further
648 calculation is needed. */
649 return ssize_int (strlen (ptr + offset));
650 }
651
652 /* Return a char pointer for a C string if it is a string constant
653 or sum of string constant and integer constant. */
654
655 const char *
656 c_getstr (tree src)
657 {
658 tree offset_node;
659
660 src = string_constant (src, &offset_node);
661 if (src == 0)
662 return 0;
663
664 if (offset_node == 0)
665 return TREE_STRING_POINTER (src);
666 else if (!tree_fits_uhwi_p (offset_node)
667 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
668 return 0;
669
670 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
671 }
672
673 /* Return a constant integer corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
675
676 static rtx
677 c_readstr (const char *str, machine_mode mode)
678 {
679 HOST_WIDE_INT ch;
680 unsigned int i, j;
681 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
682
683 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
684 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
685 / HOST_BITS_PER_WIDE_INT;
686
687 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
688 for (i = 0; i < len; i++)
689 tmp[i] = 0;
690
691 ch = 1;
692 for (i = 0; i < GET_MODE_SIZE (mode); i++)
693 {
694 j = i;
695 if (WORDS_BIG_ENDIAN)
696 j = GET_MODE_SIZE (mode) - i - 1;
697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
699 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
700 j *= BITS_PER_UNIT;
701
702 if (ch)
703 ch = (unsigned char) str[i];
704 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
705 }
706
707 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
708 return immed_wide_int_const (c, mode);
709 }
710
711 /* Cast a target constant CST to target CHAR and if that value fits into
712 host char type, return zero and put that value into variable pointed to by
713 P. */
714
715 static int
716 target_char_cast (tree cst, char *p)
717 {
718 unsigned HOST_WIDE_INT val, hostval;
719
720 if (TREE_CODE (cst) != INTEGER_CST
721 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
722 return 1;
723
724 /* Do not care if it fits or not right here. */
725 val = TREE_INT_CST_LOW (cst);
726
727 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
728 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
729
730 hostval = val;
731 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
732 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
733
734 if (val != hostval)
735 return 1;
736
737 *p = hostval;
738 return 0;
739 }
740
741 /* Similar to save_expr, but assumes that arbitrary code is not executed
742 in between the multiple evaluations. In particular, we assume that a
743 non-addressable local variable will not be modified. */
744
745 static tree
746 builtin_save_expr (tree exp)
747 {
748 if (TREE_CODE (exp) == SSA_NAME
749 || (TREE_ADDRESSABLE (exp) == 0
750 && (TREE_CODE (exp) == PARM_DECL
751 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
752 return exp;
753
754 return save_expr (exp);
755 }
756
757 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
758 times to get the address of either a higher stack frame, or a return
759 address located within it (depending on FNDECL_CODE). */
760
761 static rtx
762 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
763 {
764 int i;
765
766 #ifdef INITIAL_FRAME_ADDRESS_RTX
767 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
768 #else
769 rtx tem;
770
771 /* For a zero count with __builtin_return_address, we don't care what
772 frame address we return, because target-specific definitions will
773 override us. Therefore frame pointer elimination is OK, and using
774 the soft frame pointer is OK.
775
776 For a nonzero count, or a zero count with __builtin_frame_address,
777 we require a stable offset from the current frame pointer to the
778 previous one, so we must use the hard frame pointer, and
779 we must disable frame pointer elimination. */
780 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 tem = frame_pointer_rtx;
782 else
783 {
784 tem = hard_frame_pointer_rtx;
785
786 /* Tell reload not to eliminate the frame pointer. */
787 crtl->accesses_prior_frames = 1;
788 }
789 #endif
790
791 /* Some machines need special handling before we can access
792 arbitrary frames. For example, on the SPARC, we must first flush
793 all register windows to the stack. */
794 #ifdef SETUP_FRAME_ADDRESSES
795 if (count > 0)
796 SETUP_FRAME_ADDRESSES ();
797 #endif
798
799 /* On the SPARC, the return address is not in the frame, it is in a
800 register. There is no way to access it off of the current frame
801 pointer, but it can be accessed off the previous frame pointer by
802 reading the value from the register window save area. */
803 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
804 count--;
805
806 /* Scan back COUNT frames to the specified frame. */
807 for (i = 0; i < count; i++)
808 {
809 /* Assume the dynamic chain pointer is in the word that the
810 frame address points to, unless otherwise specified. */
811 #ifdef DYNAMIC_CHAIN_ADDRESS
812 tem = DYNAMIC_CHAIN_ADDRESS (tem);
813 #endif
814 tem = memory_address (Pmode, tem);
815 tem = gen_frame_mem (Pmode, tem);
816 tem = copy_to_reg (tem);
817 }
818
819 /* For __builtin_frame_address, return what we've got. But, on
820 the SPARC for example, we may have to add a bias. */
821 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
822 #ifdef FRAME_ADDR_RTX
823 return FRAME_ADDR_RTX (tem);
824 #else
825 return tem;
826 #endif
827
828 /* For __builtin_return_address, get the return address from that frame. */
829 #ifdef RETURN_ADDR_RTX
830 tem = RETURN_ADDR_RTX (count, tem);
831 #else
832 tem = memory_address (Pmode,
833 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
834 tem = gen_frame_mem (Pmode, tem);
835 #endif
836 return tem;
837 }
838
839 /* Alias set used for setjmp buffer. */
840 static alias_set_type setjmp_alias_set = -1;
841
842 /* Construct the leading half of a __builtin_setjmp call. Control will
843 return to RECEIVER_LABEL. This is also called directly by the SJLJ
844 exception handling code. */
845
846 void
847 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
848 {
849 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
850 rtx stack_save;
851 rtx mem;
852
853 if (setjmp_alias_set == -1)
854 setjmp_alias_set = new_alias_set ();
855
856 buf_addr = convert_memory_address (Pmode, buf_addr);
857
858 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
859
860 /* We store the frame pointer and the address of receiver_label in
861 the buffer and use the rest of it for the stack save area, which
862 is machine-dependent. */
863
864 mem = gen_rtx_MEM (Pmode, buf_addr);
865 set_mem_alias_set (mem, setjmp_alias_set);
866 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
867
868 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
869 GET_MODE_SIZE (Pmode))),
870 set_mem_alias_set (mem, setjmp_alias_set);
871
872 emit_move_insn (validize_mem (mem),
873 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
874
875 stack_save = gen_rtx_MEM (sa_mode,
876 plus_constant (Pmode, buf_addr,
877 2 * GET_MODE_SIZE (Pmode)));
878 set_mem_alias_set (stack_save, setjmp_alias_set);
879 emit_stack_save (SAVE_NONLOCAL, &stack_save);
880
881 /* If there is further processing to do, do it. */
882 if (targetm.have_builtin_setjmp_setup ())
883 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
884
885 /* We have a nonlocal label. */
886 cfun->has_nonlocal_label = 1;
887 }
888
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
892
893 void
894 expand_builtin_setjmp_receiver (rtx receiver_label)
895 {
896 rtx chain;
897
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx);
901
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain = targetm.calls.static_chain (current_function_decl, true);
905 if (chain && REG_P (chain))
906 emit_clobber (chain);
907
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 if (! targetm.have_nonlocal_goto ())
911 {
912 /* First adjust our frame pointer to its actual value. It was
913 previously set to the start of the virtual area corresponding to
914 the stacked variables when we branched here and now needs to be
915 adjusted to the actual hardware fp value.
916
917 Assignments to virtual registers are converted by
918 instantiate_virtual_regs into the corresponding assignment
919 to the underlying register (fp in this case) that makes
920 the original assignment true.
921 So the following insn will actually be decrementing fp by
922 STARTING_FRAME_OFFSET. */
923 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
924
925 /* Restoring the frame pointer also modifies the hard frame pointer.
926 Mark it used (so that the previous assignment remains live once
927 the frame pointer is eliminated) and clobbered (to represent the
928 implicit update from the assignment). */
929 emit_use (hard_frame_pointer_rtx);
930 emit_clobber (hard_frame_pointer_rtx);
931 }
932
933 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
934 if (fixed_regs[ARG_POINTER_REGNUM])
935 {
936 #ifdef ELIMINABLE_REGS
937 /* If the argument pointer can be eliminated in favor of the
938 frame pointer, we don't need to restore it. We assume here
939 that if such an elimination is present, it can always be used.
940 This is the case on all known machines; if we don't make this
941 assumption, we do unnecessary saving on many machines. */
942 size_t i;
943 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
944
945 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
946 if (elim_regs[i].from == ARG_POINTER_REGNUM
947 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
948 break;
949
950 if (i == ARRAY_SIZE (elim_regs))
951 #endif
952 {
953 /* Now restore our arg pointer from the address at which it
954 was saved in our stack frame. */
955 emit_move_insn (crtl->args.internal_arg_pointer,
956 copy_to_reg (get_arg_pointer_save_area ()));
957 }
958 }
959 #endif
960
961 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
962 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
963 else if (targetm.have_nonlocal_goto_receiver ())
964 emit_insn (targetm.gen_nonlocal_goto_receiver ());
965 else
966 { /* Nothing */ }
967
968 /* We must not allow the code we just generated to be reordered by
969 scheduling. Specifically, the update of the frame pointer must
970 happen immediately, not later. */
971 emit_insn (gen_blockage ());
972 }
973
974 /* __builtin_longjmp is passed a pointer to an array of five words (not
975 all will be used on all machines). It operates similarly to the C
976 library function of the same name, but is more efficient. Much of
977 the code below is copied from the handling of non-local gotos. */
978
979 static void
980 expand_builtin_longjmp (rtx buf_addr, rtx value)
981 {
982 rtx fp, lab, stack;
983 rtx_insn *insn, *last;
984 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
985
986 /* DRAP is needed for stack realign if longjmp is expanded to current
987 function */
988 if (SUPPORTS_STACK_ALIGNMENT)
989 crtl->need_drap = true;
990
991 if (setjmp_alias_set == -1)
992 setjmp_alias_set = new_alias_set ();
993
994 buf_addr = convert_memory_address (Pmode, buf_addr);
995
996 buf_addr = force_reg (Pmode, buf_addr);
997
998 /* We require that the user must pass a second argument of 1, because
999 that is what builtin_setjmp will return. */
1000 gcc_assert (value == const1_rtx);
1001
1002 last = get_last_insn ();
1003 if (targetm.have_builtin_longjmp ())
1004 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1005 else
1006 {
1007 fp = gen_rtx_MEM (Pmode, buf_addr);
1008 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1009 GET_MODE_SIZE (Pmode)));
1010
1011 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1012 2 * GET_MODE_SIZE (Pmode)));
1013 set_mem_alias_set (fp, setjmp_alias_set);
1014 set_mem_alias_set (lab, setjmp_alias_set);
1015 set_mem_alias_set (stack, setjmp_alias_set);
1016
1017 /* Pick up FP, label, and SP from the block and jump. This code is
1018 from expand_goto in stmt.c; see there for detailed comments. */
1019 if (targetm.have_nonlocal_goto ())
1020 /* We have to pass a value to the nonlocal_goto pattern that will
1021 get copied into the static_chain pointer, but it does not matter
1022 what that value is, because builtin_setjmp does not use it. */
1023 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1024 else
1025 {
1026 lab = copy_to_reg (lab);
1027
1028 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1029 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1030
1031 emit_move_insn (hard_frame_pointer_rtx, fp);
1032 emit_stack_restore (SAVE_NONLOCAL, stack);
1033
1034 emit_use (hard_frame_pointer_rtx);
1035 emit_use (stack_pointer_rtx);
1036 emit_indirect_jump (lab);
1037 }
1038 }
1039
1040 /* Search backwards and mark the jump insn as a non-local goto.
1041 Note that this precludes the use of __builtin_longjmp to a
1042 __builtin_setjmp target in the same function. However, we've
1043 already cautioned the user that these functions are for
1044 internal exception handling use only. */
1045 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1046 {
1047 gcc_assert (insn != last);
1048
1049 if (JUMP_P (insn))
1050 {
1051 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1052 break;
1053 }
1054 else if (CALL_P (insn))
1055 break;
1056 }
1057 }
1058
1059 static inline bool
1060 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1061 {
1062 return (iter->i < iter->n);
1063 }
1064
1065 /* This function validates the types of a function call argument list
1066 against a specified list of tree_codes. If the last specifier is a 0,
1067 that represents an ellipses, otherwise the last specifier must be a
1068 VOID_TYPE. */
1069
1070 static bool
1071 validate_arglist (const_tree callexpr, ...)
1072 {
1073 enum tree_code code;
1074 bool res = 0;
1075 va_list ap;
1076 const_call_expr_arg_iterator iter;
1077 const_tree arg;
1078
1079 va_start (ap, callexpr);
1080 init_const_call_expr_arg_iterator (callexpr, &iter);
1081
1082 do
1083 {
1084 code = (enum tree_code) va_arg (ap, int);
1085 switch (code)
1086 {
1087 case 0:
1088 /* This signifies an ellipses, any further arguments are all ok. */
1089 res = true;
1090 goto end;
1091 case VOID_TYPE:
1092 /* This signifies an endlink, if no arguments remain, return
1093 true, otherwise return false. */
1094 res = !more_const_call_expr_args_p (&iter);
1095 goto end;
1096 default:
1097 /* If no parameters remain or the parameter's code does not
1098 match the specified code, return false. Otherwise continue
1099 checking any remaining arguments. */
1100 arg = next_const_call_expr_arg (&iter);
1101 if (!validate_arg (arg, code))
1102 goto end;
1103 break;
1104 }
1105 }
1106 while (1);
1107
1108 /* We need gotos here since we can only have one VA_CLOSE in a
1109 function. */
1110 end: ;
1111 va_end (ap);
1112
1113 return res;
1114 }
1115
1116 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1117 and the address of the save area. */
1118
1119 static rtx
1120 expand_builtin_nonlocal_goto (tree exp)
1121 {
1122 tree t_label, t_save_area;
1123 rtx r_label, r_save_area, r_fp, r_sp;
1124 rtx_insn *insn;
1125
1126 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1127 return NULL_RTX;
1128
1129 t_label = CALL_EXPR_ARG (exp, 0);
1130 t_save_area = CALL_EXPR_ARG (exp, 1);
1131
1132 r_label = expand_normal (t_label);
1133 r_label = convert_memory_address (Pmode, r_label);
1134 r_save_area = expand_normal (t_save_area);
1135 r_save_area = convert_memory_address (Pmode, r_save_area);
1136 /* Copy the address of the save location to a register just in case it was
1137 based on the frame pointer. */
1138 r_save_area = copy_to_reg (r_save_area);
1139 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1140 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1141 plus_constant (Pmode, r_save_area,
1142 GET_MODE_SIZE (Pmode)));
1143
1144 crtl->has_nonlocal_goto = 1;
1145
1146 /* ??? We no longer need to pass the static chain value, afaik. */
1147 if (targetm.have_nonlocal_goto ())
1148 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1149 else
1150 {
1151 r_label = copy_to_reg (r_label);
1152
1153 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1154 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1155
1156 /* Restore frame pointer for containing function. */
1157 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1158 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1159
1160 /* USE of hard_frame_pointer_rtx added for consistency;
1161 not clear if really needed. */
1162 emit_use (hard_frame_pointer_rtx);
1163 emit_use (stack_pointer_rtx);
1164
1165 /* If the architecture is using a GP register, we must
1166 conservatively assume that the target function makes use of it.
1167 The prologue of functions with nonlocal gotos must therefore
1168 initialize the GP register to the appropriate value, and we
1169 must then make sure that this value is live at the point
1170 of the jump. (Note that this doesn't necessarily apply
1171 to targets with a nonlocal_goto pattern; they are free
1172 to implement it in their own way. Note also that this is
1173 a no-op if the GP register is a global invariant.) */
1174 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1175 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1176 emit_use (pic_offset_table_rtx);
1177
1178 emit_indirect_jump (r_label);
1179 }
1180
1181 /* Search backwards to the jump insn and mark it as a
1182 non-local goto. */
1183 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1184 {
1185 if (JUMP_P (insn))
1186 {
1187 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1188 break;
1189 }
1190 else if (CALL_P (insn))
1191 break;
1192 }
1193
1194 return const0_rtx;
1195 }
1196
1197 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1198 (not all will be used on all machines) that was passed to __builtin_setjmp.
1199 It updates the stack pointer in that block to the current value. This is
1200 also called directly by the SJLJ exception handling code. */
1201
1202 void
1203 expand_builtin_update_setjmp_buf (rtx buf_addr)
1204 {
1205 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1206 rtx stack_save
1207 = gen_rtx_MEM (sa_mode,
1208 memory_address
1209 (sa_mode,
1210 plus_constant (Pmode, buf_addr,
1211 2 * GET_MODE_SIZE (Pmode))));
1212
1213 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1214 }
1215
1216 /* Expand a call to __builtin_prefetch. For a target that does not support
1217 data prefetch, evaluate the memory address argument in case it has side
1218 effects. */
1219
1220 static void
1221 expand_builtin_prefetch (tree exp)
1222 {
1223 tree arg0, arg1, arg2;
1224 int nargs;
1225 rtx op0, op1, op2;
1226
1227 if (!validate_arglist (exp, POINTER_TYPE, 0))
1228 return;
1229
1230 arg0 = CALL_EXPR_ARG (exp, 0);
1231
1232 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1233 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1234 locality). */
1235 nargs = call_expr_nargs (exp);
1236 if (nargs > 1)
1237 arg1 = CALL_EXPR_ARG (exp, 1);
1238 else
1239 arg1 = integer_zero_node;
1240 if (nargs > 2)
1241 arg2 = CALL_EXPR_ARG (exp, 2);
1242 else
1243 arg2 = integer_three_node;
1244
1245 /* Argument 0 is an address. */
1246 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1247
1248 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1249 if (TREE_CODE (arg1) != INTEGER_CST)
1250 {
1251 error ("second argument to %<__builtin_prefetch%> must be a constant");
1252 arg1 = integer_zero_node;
1253 }
1254 op1 = expand_normal (arg1);
1255 /* Argument 1 must be either zero or one. */
1256 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1257 {
1258 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1259 " using zero");
1260 op1 = const0_rtx;
1261 }
1262
1263 /* Argument 2 (locality) must be a compile-time constant int. */
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 {
1266 error ("third argument to %<__builtin_prefetch%> must be a constant");
1267 arg2 = integer_zero_node;
1268 }
1269 op2 = expand_normal (arg2);
1270 /* Argument 2 must be 0, 1, 2, or 3. */
1271 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1272 {
1273 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1274 op2 = const0_rtx;
1275 }
1276
1277 if (targetm.have_prefetch ())
1278 {
1279 struct expand_operand ops[3];
1280
1281 create_address_operand (&ops[0], op0);
1282 create_integer_operand (&ops[1], INTVAL (op1));
1283 create_integer_operand (&ops[2], INTVAL (op2));
1284 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1285 return;
1286 }
1287
1288 /* Don't do anything with direct references to volatile memory, but
1289 generate code to handle other side effects. */
1290 if (!MEM_P (op0) && side_effects_p (op0))
1291 emit_insn (op0);
1292 }
1293
1294 /* Get a MEM rtx for expression EXP which is the address of an operand
1295 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1296 the maximum length of the block of memory that might be accessed or
1297 NULL if unknown. */
1298
1299 static rtx
1300 get_memory_rtx (tree exp, tree len)
1301 {
1302 tree orig_exp = exp;
1303 rtx addr, mem;
1304
1305 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1306 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1307 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1308 exp = TREE_OPERAND (exp, 0);
1309
1310 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1311 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1312
1313 /* Get an expression we can use to find the attributes to assign to MEM.
1314 First remove any nops. */
1315 while (CONVERT_EXPR_P (exp)
1316 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1317 exp = TREE_OPERAND (exp, 0);
1318
1319 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1320 (as builtin stringops may alias with anything). */
1321 exp = fold_build2 (MEM_REF,
1322 build_array_type (char_type_node,
1323 build_range_type (sizetype,
1324 size_one_node, len)),
1325 exp, build_int_cst (ptr_type_node, 0));
1326
1327 /* If the MEM_REF has no acceptable address, try to get the base object
1328 from the original address we got, and build an all-aliasing
1329 unknown-sized access to that one. */
1330 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1331 set_mem_attributes (mem, exp, 0);
1332 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1333 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1334 0))))
1335 {
1336 exp = build_fold_addr_expr (exp);
1337 exp = fold_build2 (MEM_REF,
1338 build_array_type (char_type_node,
1339 build_range_type (sizetype,
1340 size_zero_node,
1341 NULL)),
1342 exp, build_int_cst (ptr_type_node, 0));
1343 set_mem_attributes (mem, exp, 0);
1344 }
1345 set_mem_alias_set (mem, 0);
1346 return mem;
1347 }
1348 \f
1349 /* Built-in functions to perform an untyped call and return. */
1350
1351 #define apply_args_mode \
1352 (this_target_builtins->x_apply_args_mode)
1353 #define apply_result_mode \
1354 (this_target_builtins->x_apply_result_mode)
1355
1356 /* Return the size required for the block returned by __builtin_apply_args,
1357 and initialize apply_args_mode. */
1358
1359 static int
1360 apply_args_size (void)
1361 {
1362 static int size = -1;
1363 int align;
1364 unsigned int regno;
1365 machine_mode mode;
1366
1367 /* The values computed by this function never change. */
1368 if (size < 0)
1369 {
1370 /* The first value is the incoming arg-pointer. */
1371 size = GET_MODE_SIZE (Pmode);
1372
1373 /* The second value is the structure value address unless this is
1374 passed as an "invisible" first argument. */
1375 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1376 size += GET_MODE_SIZE (Pmode);
1377
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if (FUNCTION_ARG_REGNO_P (regno))
1380 {
1381 mode = targetm.calls.get_raw_arg_mode (regno);
1382
1383 gcc_assert (mode != VOIDmode);
1384
1385 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1386 if (size % align != 0)
1387 size = CEIL (size, align) * align;
1388 size += GET_MODE_SIZE (mode);
1389 apply_args_mode[regno] = mode;
1390 }
1391 else
1392 {
1393 apply_args_mode[regno] = VOIDmode;
1394 }
1395 }
1396 return size;
1397 }
1398
1399 /* Return the size required for the block returned by __builtin_apply,
1400 and initialize apply_result_mode. */
1401
1402 static int
1403 apply_result_size (void)
1404 {
1405 static int size = -1;
1406 int align, regno;
1407 machine_mode mode;
1408
1409 /* The values computed by this function never change. */
1410 if (size < 0)
1411 {
1412 size = 0;
1413
1414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1415 if (targetm.calls.function_value_regno_p (regno))
1416 {
1417 mode = targetm.calls.get_raw_result_mode (regno);
1418
1419 gcc_assert (mode != VOIDmode);
1420
1421 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1422 if (size % align != 0)
1423 size = CEIL (size, align) * align;
1424 size += GET_MODE_SIZE (mode);
1425 apply_result_mode[regno] = mode;
1426 }
1427 else
1428 apply_result_mode[regno] = VOIDmode;
1429
1430 /* Allow targets that use untyped_call and untyped_return to override
1431 the size so that machine-specific information can be stored here. */
1432 #ifdef APPLY_RESULT_SIZE
1433 size = APPLY_RESULT_SIZE;
1434 #endif
1435 }
1436 return size;
1437 }
1438
1439 /* Create a vector describing the result block RESULT. If SAVEP is true,
1440 the result block is used to save the values; otherwise it is used to
1441 restore the values. */
1442
1443 static rtx
1444 result_vector (int savep, rtx result)
1445 {
1446 int regno, size, align, nelts;
1447 machine_mode mode;
1448 rtx reg, mem;
1449 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1450
1451 size = nelts = 0;
1452 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1453 if ((mode = apply_result_mode[regno]) != VOIDmode)
1454 {
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1459 mem = adjust_address (result, mode, size);
1460 savevec[nelts++] = (savep
1461 ? gen_rtx_SET (mem, reg)
1462 : gen_rtx_SET (reg, mem));
1463 size += GET_MODE_SIZE (mode);
1464 }
1465 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1466 }
1467
1468 /* Save the state required to perform an untyped call with the same
1469 arguments as were passed to the current function. */
1470
1471 static rtx
1472 expand_builtin_apply_args_1 (void)
1473 {
1474 rtx registers, tem;
1475 int size, align, regno;
1476 machine_mode mode;
1477 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1478
1479 /* Create a block where the arg-pointer, structure value address,
1480 and argument registers can be saved. */
1481 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1482
1483 /* Walk past the arg-pointer and structure value address. */
1484 size = GET_MODE_SIZE (Pmode);
1485 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1486 size += GET_MODE_SIZE (Pmode);
1487
1488 /* Save each register used in calling a function to the block. */
1489 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1490 if ((mode = apply_args_mode[regno]) != VOIDmode)
1491 {
1492 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1493 if (size % align != 0)
1494 size = CEIL (size, align) * align;
1495
1496 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1497
1498 emit_move_insn (adjust_address (registers, mode, size), tem);
1499 size += GET_MODE_SIZE (mode);
1500 }
1501
1502 /* Save the arg pointer to the block. */
1503 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1507 if (STACK_GROWS_DOWNWARD)
1508 tem
1509 = force_operand (plus_constant (Pmode, tem,
1510 crtl->args.pretend_args_size),
1511 NULL_RTX);
1512 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1513
1514 size = GET_MODE_SIZE (Pmode);
1515
1516 /* Save the structure value address unless this is passed as an
1517 "invisible" first argument. */
1518 if (struct_incoming_value)
1519 {
1520 emit_move_insn (adjust_address (registers, Pmode, size),
1521 copy_to_reg (struct_incoming_value));
1522 size += GET_MODE_SIZE (Pmode);
1523 }
1524
1525 /* Return the address of the block. */
1526 return copy_addr_to_reg (XEXP (registers, 0));
1527 }
1528
1529 /* __builtin_apply_args returns block of memory allocated on
1530 the stack into which is stored the arg pointer, structure
1531 value address, static chain, and all the registers that might
1532 possibly be used in performing a function call. The code is
1533 moved to the start of the function so the incoming values are
1534 saved. */
1535
1536 static rtx
1537 expand_builtin_apply_args (void)
1538 {
1539 /* Don't do __builtin_apply_args more than once in a function.
1540 Save the result of the first call and reuse it. */
1541 if (apply_args_value != 0)
1542 return apply_args_value;
1543 {
1544 /* When this function is called, it means that registers must be
1545 saved on entry to this function. So we migrate the
1546 call to the first insn of this function. */
1547 rtx temp;
1548
1549 start_sequence ();
1550 temp = expand_builtin_apply_args_1 ();
1551 rtx_insn *seq = get_insns ();
1552 end_sequence ();
1553
1554 apply_args_value = temp;
1555
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1561 that pseudo. */
1562 push_topmost_sequence ();
1563 if (REG_P (crtl->args.internal_arg_pointer)
1564 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1565 emit_insn_before (seq, parm_birth_insn);
1566 else
1567 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1569 return temp;
1570 }
1571 }
1572
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1575
1576 static rtx
1577 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1578 {
1579 int size, align, regno;
1580 machine_mode mode;
1581 rtx incoming_args, result, reg, dest, src;
1582 rtx_call_insn *call_insn;
1583 rtx old_stack_level = 0;
1584 rtx call_fusage = 0;
1585 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1586
1587 arguments = convert_memory_address (Pmode, arguments);
1588
1589 /* Create a block where the return registers can be saved. */
1590 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1591
1592 /* Fetch the arg pointer from the ARGUMENTS block. */
1593 incoming_args = gen_reg_rtx (Pmode);
1594 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1595 if (!STACK_GROWS_DOWNWARD)
1596 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1597 incoming_args, 0, OPTAB_LIB_WIDEN);
1598
1599 /* Push a new argument block and copy the arguments. Do not allow
1600 the (potential) memcpy call below to interfere with our stack
1601 manipulations. */
1602 do_pending_stack_adjust ();
1603 NO_DEFER_POP;
1604
1605 /* Save the stack with nonlocal if available. */
1606 if (targetm.have_save_stack_nonlocal ())
1607 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1608 else
1609 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1610
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1616
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT)
1622 crtl->need_drap = true;
1623
1624 dest = virtual_outgoing_args_rtx;
1625 if (!STACK_GROWS_DOWNWARD)
1626 {
1627 if (CONST_INT_P (argsize))
1628 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1629 else
1630 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1631 }
1632 dest = gen_rtx_MEM (BLKmode, dest);
1633 set_mem_align (dest, PARM_BOUNDARY);
1634 src = gen_rtx_MEM (BLKmode, incoming_args);
1635 set_mem_align (src, PARM_BOUNDARY);
1636 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1637
1638 /* Refer to the argument block. */
1639 apply_args_size ();
1640 arguments = gen_rtx_MEM (BLKmode, arguments);
1641 set_mem_align (arguments, PARM_BOUNDARY);
1642
1643 /* Walk past the arg-pointer and structure value address. */
1644 size = GET_MODE_SIZE (Pmode);
1645 if (struct_value)
1646 size += GET_MODE_SIZE (Pmode);
1647
1648 /* Restore each of the registers previously saved. Make USE insns
1649 for each of these registers for use in making the call. */
1650 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1651 if ((mode = apply_args_mode[regno]) != VOIDmode)
1652 {
1653 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1654 if (size % align != 0)
1655 size = CEIL (size, align) * align;
1656 reg = gen_rtx_REG (mode, regno);
1657 emit_move_insn (reg, adjust_address (arguments, mode, size));
1658 use_reg (&call_fusage, reg);
1659 size += GET_MODE_SIZE (mode);
1660 }
1661
1662 /* Restore the structure value address unless this is passed as an
1663 "invisible" first argument. */
1664 size = GET_MODE_SIZE (Pmode);
1665 if (struct_value)
1666 {
1667 rtx value = gen_reg_rtx (Pmode);
1668 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1669 emit_move_insn (struct_value, value);
1670 if (REG_P (struct_value))
1671 use_reg (&call_fusage, struct_value);
1672 size += GET_MODE_SIZE (Pmode);
1673 }
1674
1675 /* All arguments and registers used for the call are set up by now! */
1676 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1677
1678 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1679 and we don't want to load it into a register as an optimization,
1680 because prepare_call_address already did it if it should be done. */
1681 if (GET_CODE (function) != SYMBOL_REF)
1682 function = memory_address (FUNCTION_MODE, function);
1683
1684 /* Generate the actual call instruction and save the return value. */
1685 if (targetm.have_untyped_call ())
1686 {
1687 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1688 emit_call_insn (targetm.gen_untyped_call (mem, result,
1689 result_vector (1, result)));
1690 }
1691 else
1692 #ifdef HAVE_call_value
1693 if (HAVE_call_value)
1694 {
1695 rtx valreg = 0;
1696
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1703 {
1704 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1705
1706 valreg = gen_rtx_REG (mode, regno);
1707 }
1708
1709 emit_call_insn (GEN_CALL_VALUE (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1712
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1714 }
1715 else
1716 #endif
1717 gcc_unreachable ();
1718
1719 /* Find the CALL insn we just emitted, and attach the register usage
1720 information. */
1721 call_insn = last_call_insn ();
1722 add_function_usage_to (call_insn, call_fusage);
1723
1724 /* Restore the stack. */
1725 if (targetm.have_save_stack_nonlocal ())
1726 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1727 else
1728 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1729 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1730
1731 OK_DEFER_POP;
1732
1733 /* Return the address of the result block. */
1734 result = copy_addr_to_reg (XEXP (result, 0));
1735 return convert_memory_address (ptr_mode, result);
1736 }
1737
1738 /* Perform an untyped return. */
1739
1740 static void
1741 expand_builtin_return (rtx result)
1742 {
1743 int size, align, regno;
1744 machine_mode mode;
1745 rtx reg;
1746 rtx_insn *call_fusage = 0;
1747
1748 result = convert_memory_address (Pmode, result);
1749
1750 apply_result_size ();
1751 result = gen_rtx_MEM (BLKmode, result);
1752
1753 if (targetm.have_untyped_return ())
1754 {
1755 rtx vector = result_vector (0, result);
1756 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1757 emit_barrier ();
1758 return;
1759 }
1760
1761 /* Restore the return value and note that each value is used. */
1762 size = 0;
1763 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1764 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 {
1766 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1767 if (size % align != 0)
1768 size = CEIL (size, align) * align;
1769 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1770 emit_move_insn (reg, adjust_address (result, mode, size));
1771
1772 push_to_sequence (call_fusage);
1773 emit_use (reg);
1774 call_fusage = get_insns ();
1775 end_sequence ();
1776 size += GET_MODE_SIZE (mode);
1777 }
1778
1779 /* Put the USE insns before the return. */
1780 emit_insn (call_fusage);
1781
1782 /* Return whatever values was restored by jumping directly to the end
1783 of the function. */
1784 expand_naked_return ();
1785 }
1786
1787 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788
1789 static enum type_class
1790 type_to_class (tree type)
1791 {
1792 switch (TREE_CODE (type))
1793 {
1794 case VOID_TYPE: return void_type_class;
1795 case INTEGER_TYPE: return integer_type_class;
1796 case ENUMERAL_TYPE: return enumeral_type_class;
1797 case BOOLEAN_TYPE: return boolean_type_class;
1798 case POINTER_TYPE: return pointer_type_class;
1799 case REFERENCE_TYPE: return reference_type_class;
1800 case OFFSET_TYPE: return offset_type_class;
1801 case REAL_TYPE: return real_type_class;
1802 case COMPLEX_TYPE: return complex_type_class;
1803 case FUNCTION_TYPE: return function_type_class;
1804 case METHOD_TYPE: return method_type_class;
1805 case RECORD_TYPE: return record_type_class;
1806 case UNION_TYPE:
1807 case QUAL_UNION_TYPE: return union_type_class;
1808 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1809 ? string_type_class : array_type_class);
1810 case LANG_TYPE: return lang_type_class;
1811 default: return no_type_class;
1812 }
1813 }
1814
1815 /* Expand a call EXP to __builtin_classify_type. */
1816
1817 static rtx
1818 expand_builtin_classify_type (tree exp)
1819 {
1820 if (call_expr_nargs (exp))
1821 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1822 return GEN_INT (no_type_class);
1823 }
1824
1825 /* This helper macro, meant to be used in mathfn_built_in below,
1826 determines which among a set of three builtin math functions is
1827 appropriate for a given type mode. The `F' and `L' cases are
1828 automatically generated from the `double' case. */
1829 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1830 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1831 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1832 fcodel = BUILT_IN_MATHFN##L ; break;
1833 /* Similar to above, but appends _R after any F/L suffix. */
1834 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1836 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1837 fcodel = BUILT_IN_MATHFN##L_R ; break;
1838
1839 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1840 if available. If IMPLICIT is true use the implicit builtin declaration,
1841 otherwise use the explicit declaration. If we can't do the conversion,
1842 return zero. */
1843
1844 static tree
1845 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1846 {
1847 enum built_in_function fcode, fcodef, fcodel, fcode2;
1848
1849 switch (fn)
1850 {
1851 CASE_MATHFN (BUILT_IN_ACOS)
1852 CASE_MATHFN (BUILT_IN_ACOSH)
1853 CASE_MATHFN (BUILT_IN_ASIN)
1854 CASE_MATHFN (BUILT_IN_ASINH)
1855 CASE_MATHFN (BUILT_IN_ATAN)
1856 CASE_MATHFN (BUILT_IN_ATAN2)
1857 CASE_MATHFN (BUILT_IN_ATANH)
1858 CASE_MATHFN (BUILT_IN_CBRT)
1859 CASE_MATHFN (BUILT_IN_CEIL)
1860 CASE_MATHFN (BUILT_IN_CEXPI)
1861 CASE_MATHFN (BUILT_IN_COPYSIGN)
1862 CASE_MATHFN (BUILT_IN_COS)
1863 CASE_MATHFN (BUILT_IN_COSH)
1864 CASE_MATHFN (BUILT_IN_DREM)
1865 CASE_MATHFN (BUILT_IN_ERF)
1866 CASE_MATHFN (BUILT_IN_ERFC)
1867 CASE_MATHFN (BUILT_IN_EXP)
1868 CASE_MATHFN (BUILT_IN_EXP10)
1869 CASE_MATHFN (BUILT_IN_EXP2)
1870 CASE_MATHFN (BUILT_IN_EXPM1)
1871 CASE_MATHFN (BUILT_IN_FABS)
1872 CASE_MATHFN (BUILT_IN_FDIM)
1873 CASE_MATHFN (BUILT_IN_FLOOR)
1874 CASE_MATHFN (BUILT_IN_FMA)
1875 CASE_MATHFN (BUILT_IN_FMAX)
1876 CASE_MATHFN (BUILT_IN_FMIN)
1877 CASE_MATHFN (BUILT_IN_FMOD)
1878 CASE_MATHFN (BUILT_IN_FREXP)
1879 CASE_MATHFN (BUILT_IN_GAMMA)
1880 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1881 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1882 CASE_MATHFN (BUILT_IN_HYPOT)
1883 CASE_MATHFN (BUILT_IN_ILOGB)
1884 CASE_MATHFN (BUILT_IN_ICEIL)
1885 CASE_MATHFN (BUILT_IN_IFLOOR)
1886 CASE_MATHFN (BUILT_IN_INF)
1887 CASE_MATHFN (BUILT_IN_IRINT)
1888 CASE_MATHFN (BUILT_IN_IROUND)
1889 CASE_MATHFN (BUILT_IN_ISINF)
1890 CASE_MATHFN (BUILT_IN_J0)
1891 CASE_MATHFN (BUILT_IN_J1)
1892 CASE_MATHFN (BUILT_IN_JN)
1893 CASE_MATHFN (BUILT_IN_LCEIL)
1894 CASE_MATHFN (BUILT_IN_LDEXP)
1895 CASE_MATHFN (BUILT_IN_LFLOOR)
1896 CASE_MATHFN (BUILT_IN_LGAMMA)
1897 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1898 CASE_MATHFN (BUILT_IN_LLCEIL)
1899 CASE_MATHFN (BUILT_IN_LLFLOOR)
1900 CASE_MATHFN (BUILT_IN_LLRINT)
1901 CASE_MATHFN (BUILT_IN_LLROUND)
1902 CASE_MATHFN (BUILT_IN_LOG)
1903 CASE_MATHFN (BUILT_IN_LOG10)
1904 CASE_MATHFN (BUILT_IN_LOG1P)
1905 CASE_MATHFN (BUILT_IN_LOG2)
1906 CASE_MATHFN (BUILT_IN_LOGB)
1907 CASE_MATHFN (BUILT_IN_LRINT)
1908 CASE_MATHFN (BUILT_IN_LROUND)
1909 CASE_MATHFN (BUILT_IN_MODF)
1910 CASE_MATHFN (BUILT_IN_NAN)
1911 CASE_MATHFN (BUILT_IN_NANS)
1912 CASE_MATHFN (BUILT_IN_NEARBYINT)
1913 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1914 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1915 CASE_MATHFN (BUILT_IN_POW)
1916 CASE_MATHFN (BUILT_IN_POWI)
1917 CASE_MATHFN (BUILT_IN_POW10)
1918 CASE_MATHFN (BUILT_IN_REMAINDER)
1919 CASE_MATHFN (BUILT_IN_REMQUO)
1920 CASE_MATHFN (BUILT_IN_RINT)
1921 CASE_MATHFN (BUILT_IN_ROUND)
1922 CASE_MATHFN (BUILT_IN_SCALB)
1923 CASE_MATHFN (BUILT_IN_SCALBLN)
1924 CASE_MATHFN (BUILT_IN_SCALBN)
1925 CASE_MATHFN (BUILT_IN_SIGNBIT)
1926 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1927 CASE_MATHFN (BUILT_IN_SIN)
1928 CASE_MATHFN (BUILT_IN_SINCOS)
1929 CASE_MATHFN (BUILT_IN_SINH)
1930 CASE_MATHFN (BUILT_IN_SQRT)
1931 CASE_MATHFN (BUILT_IN_TAN)
1932 CASE_MATHFN (BUILT_IN_TANH)
1933 CASE_MATHFN (BUILT_IN_TGAMMA)
1934 CASE_MATHFN (BUILT_IN_TRUNC)
1935 CASE_MATHFN (BUILT_IN_Y0)
1936 CASE_MATHFN (BUILT_IN_Y1)
1937 CASE_MATHFN (BUILT_IN_YN)
1938
1939 default:
1940 return NULL_TREE;
1941 }
1942
1943 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1944 fcode2 = fcode;
1945 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1946 fcode2 = fcodef;
1947 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1948 fcode2 = fcodel;
1949 else
1950 return NULL_TREE;
1951
1952 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1953 return NULL_TREE;
1954
1955 return builtin_decl_explicit (fcode2);
1956 }
1957
1958 /* Like mathfn_built_in_1(), but always use the implicit array. */
1959
1960 tree
1961 mathfn_built_in (tree type, enum built_in_function fn)
1962 {
1963 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1964 }
1965
1966 /* If errno must be maintained, expand the RTL to check if the result,
1967 TARGET, of a built-in function call, EXP, is NaN, and if so set
1968 errno to EDOM. */
1969
1970 static void
1971 expand_errno_check (tree exp, rtx target)
1972 {
1973 rtx_code_label *lab = gen_label_rtx ();
1974
1975 /* Test the result; if it is NaN, set errno=EDOM because
1976 the argument was not in the domain. */
1977 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1978 NULL_RTX, NULL, lab,
1979 /* The jump is very likely. */
1980 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1981
1982 #ifdef TARGET_EDOM
1983 /* If this built-in doesn't throw an exception, set errno directly. */
1984 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1985 {
1986 #ifdef GEN_ERRNO_RTX
1987 rtx errno_rtx = GEN_ERRNO_RTX;
1988 #else
1989 rtx errno_rtx
1990 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1991 #endif
1992 emit_move_insn (errno_rtx,
1993 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1994 emit_label (lab);
1995 return;
1996 }
1997 #endif
1998
1999 /* Make sure the library call isn't expanded as a tail call. */
2000 CALL_EXPR_TAILCALL (exp) = 0;
2001
2002 /* We can't set errno=EDOM directly; let the library call do it.
2003 Pop the arguments right away in case the call gets deleted. */
2004 NO_DEFER_POP;
2005 expand_call (exp, target, 0);
2006 OK_DEFER_POP;
2007 emit_label (lab);
2008 }
2009
2010 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2011 Return NULL_RTX if a normal call should be emitted rather than expanding
2012 the function in-line. EXP is the expression that is a call to the builtin
2013 function; if convenient, the result should be placed in TARGET.
2014 SUBTARGET may be used as the target for computing one of EXP's operands. */
2015
2016 static rtx
2017 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2018 {
2019 optab builtin_optab;
2020 rtx op0;
2021 rtx_insn *insns;
2022 tree fndecl = get_callee_fndecl (exp);
2023 machine_mode mode;
2024 bool errno_set = false;
2025 bool try_widening = false;
2026 tree arg;
2027
2028 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2029 return NULL_RTX;
2030
2031 arg = CALL_EXPR_ARG (exp, 0);
2032
2033 switch (DECL_FUNCTION_CODE (fndecl))
2034 {
2035 CASE_FLT_FN (BUILT_IN_SQRT):
2036 errno_set = ! tree_expr_nonnegative_p (arg);
2037 try_widening = true;
2038 builtin_optab = sqrt_optab;
2039 break;
2040 CASE_FLT_FN (BUILT_IN_EXP):
2041 errno_set = true; builtin_optab = exp_optab; break;
2042 CASE_FLT_FN (BUILT_IN_EXP10):
2043 CASE_FLT_FN (BUILT_IN_POW10):
2044 errno_set = true; builtin_optab = exp10_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXP2):
2046 errno_set = true; builtin_optab = exp2_optab; break;
2047 CASE_FLT_FN (BUILT_IN_EXPM1):
2048 errno_set = true; builtin_optab = expm1_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOGB):
2050 errno_set = true; builtin_optab = logb_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG):
2052 errno_set = true; builtin_optab = log_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG10):
2054 errno_set = true; builtin_optab = log10_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG2):
2056 errno_set = true; builtin_optab = log2_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG1P):
2058 errno_set = true; builtin_optab = log1p_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ASIN):
2060 builtin_optab = asin_optab; break;
2061 CASE_FLT_FN (BUILT_IN_ACOS):
2062 builtin_optab = acos_optab; break;
2063 CASE_FLT_FN (BUILT_IN_TAN):
2064 builtin_optab = tan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN):
2066 builtin_optab = atan_optab; break;
2067 CASE_FLT_FN (BUILT_IN_FLOOR):
2068 builtin_optab = floor_optab; break;
2069 CASE_FLT_FN (BUILT_IN_CEIL):
2070 builtin_optab = ceil_optab; break;
2071 CASE_FLT_FN (BUILT_IN_TRUNC):
2072 builtin_optab = btrunc_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ROUND):
2074 builtin_optab = round_optab; break;
2075 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2076 builtin_optab = nearbyint_optab;
2077 if (flag_trapping_math)
2078 break;
2079 /* Else fallthrough and expand as rint. */
2080 CASE_FLT_FN (BUILT_IN_RINT):
2081 builtin_optab = rint_optab; break;
2082 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2083 builtin_optab = significand_optab; break;
2084 default:
2085 gcc_unreachable ();
2086 }
2087
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2090
2091 if (! flag_errno_math || ! HONOR_NANS (mode))
2092 errno_set = false;
2093
2094 /* Before working hard, check whether the instruction is available, but try
2095 to widen the mode for specific operations. */
2096 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2097 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2098 && (!errno_set || !optimize_insn_for_size_p ()))
2099 {
2100 rtx result = gen_reg_rtx (mode);
2101
2102 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2103 need to expand the argument again. This way, we will not perform
2104 side-effects more the once. */
2105 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2106
2107 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2108
2109 start_sequence ();
2110
2111 /* Compute into RESULT.
2112 Set RESULT to wherever the result comes back. */
2113 result = expand_unop (mode, builtin_optab, op0, result, 0);
2114
2115 if (result != 0)
2116 {
2117 if (errno_set)
2118 expand_errno_check (exp, result);
2119
2120 /* Output the entire sequence. */
2121 insns = get_insns ();
2122 end_sequence ();
2123 emit_insn (insns);
2124 return result;
2125 }
2126
2127 /* If we were unable to expand via the builtin, stop the sequence
2128 (without outputting the insns) and call to the library function
2129 with the stabilized argument list. */
2130 end_sequence ();
2131 }
2132
2133 return expand_call (exp, target, target == const0_rtx);
2134 }
2135
2136 /* Expand a call to the builtin binary math functions (pow and atan2).
2137 Return NULL_RTX if a normal call should be emitted rather than expanding the
2138 function in-line. EXP is the expression that is a call to the builtin
2139 function; if convenient, the result should be placed in TARGET.
2140 SUBTARGET may be used as the target for computing one of EXP's
2141 operands. */
2142
2143 static rtx
2144 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2145 {
2146 optab builtin_optab;
2147 rtx op0, op1, result;
2148 rtx_insn *insns;
2149 int op1_type = REAL_TYPE;
2150 tree fndecl = get_callee_fndecl (exp);
2151 tree arg0, arg1;
2152 machine_mode mode;
2153 bool errno_set = true;
2154
2155 switch (DECL_FUNCTION_CODE (fndecl))
2156 {
2157 CASE_FLT_FN (BUILT_IN_SCALBN):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN):
2159 CASE_FLT_FN (BUILT_IN_LDEXP):
2160 op1_type = INTEGER_TYPE;
2161 default:
2162 break;
2163 }
2164
2165 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2166 return NULL_RTX;
2167
2168 arg0 = CALL_EXPR_ARG (exp, 0);
2169 arg1 = CALL_EXPR_ARG (exp, 1);
2170
2171 switch (DECL_FUNCTION_CODE (fndecl))
2172 {
2173 CASE_FLT_FN (BUILT_IN_POW):
2174 builtin_optab = pow_optab; break;
2175 CASE_FLT_FN (BUILT_IN_ATAN2):
2176 builtin_optab = atan2_optab; break;
2177 CASE_FLT_FN (BUILT_IN_SCALB):
2178 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2179 return 0;
2180 builtin_optab = scalb_optab; break;
2181 CASE_FLT_FN (BUILT_IN_SCALBN):
2182 CASE_FLT_FN (BUILT_IN_SCALBLN):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2184 return 0;
2185 /* Fall through... */
2186 CASE_FLT_FN (BUILT_IN_LDEXP):
2187 builtin_optab = ldexp_optab; break;
2188 CASE_FLT_FN (BUILT_IN_FMOD):
2189 builtin_optab = fmod_optab; break;
2190 CASE_FLT_FN (BUILT_IN_REMAINDER):
2191 CASE_FLT_FN (BUILT_IN_DREM):
2192 builtin_optab = remainder_optab; break;
2193 default:
2194 gcc_unreachable ();
2195 }
2196
2197 /* Make a suitable register to place result in. */
2198 mode = TYPE_MODE (TREE_TYPE (exp));
2199
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2202 return NULL_RTX;
2203
2204 result = gen_reg_rtx (mode);
2205
2206 if (! flag_errno_math || ! HONOR_NANS (mode))
2207 errno_set = false;
2208
2209 if (errno_set && optimize_insn_for_size_p ())
2210 return 0;
2211
2212 /* Always stabilize the argument list. */
2213 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2214 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2215
2216 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2217 op1 = expand_normal (arg1);
2218
2219 start_sequence ();
2220
2221 /* Compute into RESULT.
2222 Set RESULT to wherever the result comes back. */
2223 result = expand_binop (mode, builtin_optab, op0, op1,
2224 result, 0, OPTAB_DIRECT);
2225
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2229 if (result == 0)
2230 {
2231 end_sequence ();
2232 return expand_call (exp, target, target == const0_rtx);
2233 }
2234
2235 if (errno_set)
2236 expand_errno_check (exp, result);
2237
2238 /* Output the entire sequence. */
2239 insns = get_insns ();
2240 end_sequence ();
2241 emit_insn (insns);
2242
2243 return result;
2244 }
2245
2246 /* Expand a call to the builtin trinary math functions (fma).
2247 Return NULL_RTX if a normal call should be emitted rather than expanding the
2248 function in-line. EXP is the expression that is a call to the builtin
2249 function; if convenient, the result should be placed in TARGET.
2250 SUBTARGET may be used as the target for computing one of EXP's
2251 operands. */
2252
2253 static rtx
2254 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2255 {
2256 optab builtin_optab;
2257 rtx op0, op1, op2, result;
2258 rtx_insn *insns;
2259 tree fndecl = get_callee_fndecl (exp);
2260 tree arg0, arg1, arg2;
2261 machine_mode mode;
2262
2263 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2264 return NULL_RTX;
2265
2266 arg0 = CALL_EXPR_ARG (exp, 0);
2267 arg1 = CALL_EXPR_ARG (exp, 1);
2268 arg2 = CALL_EXPR_ARG (exp, 2);
2269
2270 switch (DECL_FUNCTION_CODE (fndecl))
2271 {
2272 CASE_FLT_FN (BUILT_IN_FMA):
2273 builtin_optab = fma_optab; break;
2274 default:
2275 gcc_unreachable ();
2276 }
2277
2278 /* Make a suitable register to place result in. */
2279 mode = TYPE_MODE (TREE_TYPE (exp));
2280
2281 /* Before working hard, check whether the instruction is available. */
2282 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2283 return NULL_RTX;
2284
2285 result = gen_reg_rtx (mode);
2286
2287 /* Always stabilize the argument list. */
2288 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2289 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2290 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2291
2292 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2293 op1 = expand_normal (arg1);
2294 op2 = expand_normal (arg2);
2295
2296 start_sequence ();
2297
2298 /* Compute into RESULT.
2299 Set RESULT to wherever the result comes back. */
2300 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2301 result, 0);
2302
2303 /* If we were unable to expand via the builtin, stop the sequence
2304 (without outputting the insns) and call to the library function
2305 with the stabilized argument list. */
2306 if (result == 0)
2307 {
2308 end_sequence ();
2309 return expand_call (exp, target, target == const0_rtx);
2310 }
2311
2312 /* Output the entire sequence. */
2313 insns = get_insns ();
2314 end_sequence ();
2315 emit_insn (insns);
2316
2317 return result;
2318 }
2319
2320 /* Expand a call to the builtin sin and cos math functions.
2321 Return NULL_RTX if a normal call should be emitted rather than expanding the
2322 function in-line. EXP is the expression that is a call to the builtin
2323 function; if convenient, the result should be placed in TARGET.
2324 SUBTARGET may be used as the target for computing one of EXP's
2325 operands. */
2326
2327 static rtx
2328 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2329 {
2330 optab builtin_optab;
2331 rtx op0;
2332 rtx_insn *insns;
2333 tree fndecl = get_callee_fndecl (exp);
2334 machine_mode mode;
2335 tree arg;
2336
2337 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2338 return NULL_RTX;
2339
2340 arg = CALL_EXPR_ARG (exp, 0);
2341
2342 switch (DECL_FUNCTION_CODE (fndecl))
2343 {
2344 CASE_FLT_FN (BUILT_IN_SIN):
2345 CASE_FLT_FN (BUILT_IN_COS):
2346 builtin_optab = sincos_optab; break;
2347 default:
2348 gcc_unreachable ();
2349 }
2350
2351 /* Make a suitable register to place result in. */
2352 mode = TYPE_MODE (TREE_TYPE (exp));
2353
2354 /* Check if sincos insn is available, otherwise fallback
2355 to sin or cos insn. */
2356 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2357 switch (DECL_FUNCTION_CODE (fndecl))
2358 {
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 builtin_optab = sin_optab; break;
2361 CASE_FLT_FN (BUILT_IN_COS):
2362 builtin_optab = cos_optab; break;
2363 default:
2364 gcc_unreachable ();
2365 }
2366
2367 /* Before working hard, check whether the instruction is available. */
2368 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2369 {
2370 rtx result = gen_reg_rtx (mode);
2371
2372 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2373 need to expand the argument again. This way, we will not perform
2374 side-effects more the once. */
2375 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2376
2377 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2378
2379 start_sequence ();
2380
2381 /* Compute into RESULT.
2382 Set RESULT to wherever the result comes back. */
2383 if (builtin_optab == sincos_optab)
2384 {
2385 int ok;
2386
2387 switch (DECL_FUNCTION_CODE (fndecl))
2388 {
2389 CASE_FLT_FN (BUILT_IN_SIN):
2390 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2391 break;
2392 CASE_FLT_FN (BUILT_IN_COS):
2393 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2394 break;
2395 default:
2396 gcc_unreachable ();
2397 }
2398 gcc_assert (ok);
2399 }
2400 else
2401 result = expand_unop (mode, builtin_optab, op0, result, 0);
2402
2403 if (result != 0)
2404 {
2405 /* Output the entire sequence. */
2406 insns = get_insns ();
2407 end_sequence ();
2408 emit_insn (insns);
2409 return result;
2410 }
2411
2412 /* If we were unable to expand via the builtin, stop the sequence
2413 (without outputting the insns) and call to the library function
2414 with the stabilized argument list. */
2415 end_sequence ();
2416 }
2417
2418 return expand_call (exp, target, target == const0_rtx);
2419 }
2420
2421 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2422 return an RTL instruction code that implements the functionality.
2423 If that isn't possible or available return CODE_FOR_nothing. */
2424
2425 static enum insn_code
2426 interclass_mathfn_icode (tree arg, tree fndecl)
2427 {
2428 bool errno_set = false;
2429 optab builtin_optab = unknown_optab;
2430 machine_mode mode;
2431
2432 switch (DECL_FUNCTION_CODE (fndecl))
2433 {
2434 CASE_FLT_FN (BUILT_IN_ILOGB):
2435 errno_set = true; builtin_optab = ilogb_optab; break;
2436 CASE_FLT_FN (BUILT_IN_ISINF):
2437 builtin_optab = isinf_optab; break;
2438 case BUILT_IN_ISNORMAL:
2439 case BUILT_IN_ISFINITE:
2440 CASE_FLT_FN (BUILT_IN_FINITE):
2441 case BUILT_IN_FINITED32:
2442 case BUILT_IN_FINITED64:
2443 case BUILT_IN_FINITED128:
2444 case BUILT_IN_ISINFD32:
2445 case BUILT_IN_ISINFD64:
2446 case BUILT_IN_ISINFD128:
2447 /* These builtins have no optabs (yet). */
2448 break;
2449 default:
2450 gcc_unreachable ();
2451 }
2452
2453 /* There's no easy way to detect the case we need to set EDOM. */
2454 if (flag_errno_math && errno_set)
2455 return CODE_FOR_nothing;
2456
2457 /* Optab mode depends on the mode of the input argument. */
2458 mode = TYPE_MODE (TREE_TYPE (arg));
2459
2460 if (builtin_optab)
2461 return optab_handler (builtin_optab, mode);
2462 return CODE_FOR_nothing;
2463 }
2464
2465 /* Expand a call to one of the builtin math functions that operate on
2466 floating point argument and output an integer result (ilogb, isinf,
2467 isnan, etc).
2468 Return 0 if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2470 function; if convenient, the result should be placed in TARGET. */
2471
2472 static rtx
2473 expand_builtin_interclass_mathfn (tree exp, rtx target)
2474 {
2475 enum insn_code icode = CODE_FOR_nothing;
2476 rtx op0;
2477 tree fndecl = get_callee_fndecl (exp);
2478 machine_mode mode;
2479 tree arg;
2480
2481 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2482 return NULL_RTX;
2483
2484 arg = CALL_EXPR_ARG (exp, 0);
2485 icode = interclass_mathfn_icode (arg, fndecl);
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2487
2488 if (icode != CODE_FOR_nothing)
2489 {
2490 struct expand_operand ops[1];
2491 rtx_insn *last = get_last_insn ();
2492 tree orig_arg = arg;
2493
2494 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2495 need to expand the argument again. This way, we will not perform
2496 side-effects more the once. */
2497 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2498
2499 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2500
2501 if (mode != GET_MODE (op0))
2502 op0 = convert_to_mode (mode, op0, 0);
2503
2504 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2505 if (maybe_legitimize_operands (icode, 0, 1, ops)
2506 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2507 return ops[0].value;
2508
2509 delete_insns_since (last);
2510 CALL_EXPR_ARG (exp, 0) = orig_arg;
2511 }
2512
2513 return NULL_RTX;
2514 }
2515
2516 /* Expand a call to the builtin sincos math function.
2517 Return NULL_RTX if a normal call should be emitted rather than expanding the
2518 function in-line. EXP is the expression that is a call to the builtin
2519 function. */
2520
2521 static rtx
2522 expand_builtin_sincos (tree exp)
2523 {
2524 rtx op0, op1, op2, target1, target2;
2525 machine_mode mode;
2526 tree arg, sinp, cosp;
2527 int result;
2528 location_t loc = EXPR_LOCATION (exp);
2529 tree alias_type, alias_off;
2530
2531 if (!validate_arglist (exp, REAL_TYPE,
2532 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2533 return NULL_RTX;
2534
2535 arg = CALL_EXPR_ARG (exp, 0);
2536 sinp = CALL_EXPR_ARG (exp, 1);
2537 cosp = CALL_EXPR_ARG (exp, 2);
2538
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (arg));
2541
2542 /* Check if sincos insn is available, otherwise emit the call. */
2543 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2544 return NULL_RTX;
2545
2546 target1 = gen_reg_rtx (mode);
2547 target2 = gen_reg_rtx (mode);
2548
2549 op0 = expand_normal (arg);
2550 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2551 alias_off = build_int_cst (alias_type, 0);
2552 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 sinp, alias_off));
2554 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2555 cosp, alias_off));
2556
2557 /* Compute into target1 and target2.
2558 Set TARGET to wherever the result comes back. */
2559 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2560 gcc_assert (result);
2561
2562 /* Move target1 and target2 to the memory locations indicated
2563 by op1 and op2. */
2564 emit_move_insn (op1, target1);
2565 emit_move_insn (op2, target2);
2566
2567 return const0_rtx;
2568 }
2569
2570 /* Expand a call to the internal cexpi builtin to the sincos math function.
2571 EXP is the expression that is a call to the builtin function; if convenient,
2572 the result should be placed in TARGET. */
2573
2574 static rtx
2575 expand_builtin_cexpi (tree exp, rtx target)
2576 {
2577 tree fndecl = get_callee_fndecl (exp);
2578 tree arg, type;
2579 machine_mode mode;
2580 rtx op0, op1, op2;
2581 location_t loc = EXPR_LOCATION (exp);
2582
2583 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2584 return NULL_RTX;
2585
2586 arg = CALL_EXPR_ARG (exp, 0);
2587 type = TREE_TYPE (arg);
2588 mode = TYPE_MODE (TREE_TYPE (arg));
2589
2590 /* Try expanding via a sincos optab, fall back to emitting a libcall
2591 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2592 is only generated from sincos, cexp or if we have either of them. */
2593 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2594 {
2595 op1 = gen_reg_rtx (mode);
2596 op2 = gen_reg_rtx (mode);
2597
2598 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2599
2600 /* Compute into op1 and op2. */
2601 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2602 }
2603 else if (targetm.libc_has_function (function_sincos))
2604 {
2605 tree call, fn = NULL_TREE;
2606 tree top1, top2;
2607 rtx op1a, op2a;
2608
2609 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2614 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2615 else
2616 gcc_unreachable ();
2617
2618 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op1a = copy_addr_to_reg (XEXP (op1, 0));
2621 op2a = copy_addr_to_reg (XEXP (op2, 0));
2622 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2623 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2624
2625 /* Make sure not to fold the sincos call again. */
2626 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2627 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2628 call, 3, arg, top1, top2));
2629 }
2630 else
2631 {
2632 tree call, fn = NULL_TREE, narg;
2633 tree ctype = build_complex_type (type);
2634
2635 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2639 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2640 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2641 else
2642 gcc_unreachable ();
2643
2644 /* If we don't have a decl for cexp create one. This is the
2645 friendliest fallback if the user calls __builtin_cexpi
2646 without full target C99 function support. */
2647 if (fn == NULL_TREE)
2648 {
2649 tree fntype;
2650 const char *name = NULL;
2651
2652 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2653 name = "cexpf";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2655 name = "cexp";
2656 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2657 name = "cexpl";
2658
2659 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2660 fn = build_fn_decl (name, fntype);
2661 }
2662
2663 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2664 build_real (type, dconst0), arg);
2665
2666 /* Make sure not to fold the cexp call again. */
2667 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2668 return expand_expr (build_call_nary (ctype, call, 1, narg),
2669 target, VOIDmode, EXPAND_NORMAL);
2670 }
2671
2672 /* Now build the proper return type. */
2673 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2674 make_tree (TREE_TYPE (arg), op2),
2675 make_tree (TREE_TYPE (arg), op1)),
2676 target, VOIDmode, EXPAND_NORMAL);
2677 }
2678
2679 /* Conveniently construct a function call expression. FNDECL names the
2680 function to be called, N is the number of arguments, and the "..."
2681 parameters are the argument expressions. Unlike build_call_exr
2682 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2683
2684 static tree
2685 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2686 {
2687 va_list ap;
2688 tree fntype = TREE_TYPE (fndecl);
2689 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2690
2691 va_start (ap, n);
2692 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2693 va_end (ap);
2694 SET_EXPR_LOCATION (fn, loc);
2695 return fn;
2696 }
2697
2698 /* Expand a call to one of the builtin rounding functions gcc defines
2699 as an extension (lfloor and lceil). As these are gcc extensions we
2700 do not need to worry about setting errno to EDOM.
2701 If expanding via optab fails, lower expression to (int)(floor(x)).
2702 EXP is the expression that is a call to the builtin function;
2703 if convenient, the result should be placed in TARGET. */
2704
2705 static rtx
2706 expand_builtin_int_roundingfn (tree exp, rtx target)
2707 {
2708 convert_optab builtin_optab;
2709 rtx op0, tmp;
2710 rtx_insn *insns;
2711 tree fndecl = get_callee_fndecl (exp);
2712 enum built_in_function fallback_fn;
2713 tree fallback_fndecl;
2714 machine_mode mode;
2715 tree arg;
2716
2717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 gcc_unreachable ();
2719
2720 arg = CALL_EXPR_ARG (exp, 0);
2721
2722 switch (DECL_FUNCTION_CODE (fndecl))
2723 {
2724 CASE_FLT_FN (BUILT_IN_ICEIL):
2725 CASE_FLT_FN (BUILT_IN_LCEIL):
2726 CASE_FLT_FN (BUILT_IN_LLCEIL):
2727 builtin_optab = lceil_optab;
2728 fallback_fn = BUILT_IN_CEIL;
2729 break;
2730
2731 CASE_FLT_FN (BUILT_IN_IFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LFLOOR):
2733 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2734 builtin_optab = lfloor_optab;
2735 fallback_fn = BUILT_IN_FLOOR;
2736 break;
2737
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 /* Make a suitable register to place result in. */
2743 mode = TYPE_MODE (TREE_TYPE (exp));
2744
2745 target = gen_reg_rtx (mode);
2746
2747 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2748 need to expand the argument again. This way, we will not perform
2749 side-effects more the once. */
2750 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2751
2752 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2753
2754 start_sequence ();
2755
2756 /* Compute into TARGET. */
2757 if (expand_sfix_optab (target, op0, builtin_optab))
2758 {
2759 /* Output the entire sequence. */
2760 insns = get_insns ();
2761 end_sequence ();
2762 emit_insn (insns);
2763 return target;
2764 }
2765
2766 /* If we were unable to expand via the builtin, stop the sequence
2767 (without outputting the insns). */
2768 end_sequence ();
2769
2770 /* Fall back to floating point rounding optab. */
2771 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2772
2773 /* For non-C99 targets we may end up without a fallback fndecl here
2774 if the user called __builtin_lfloor directly. In this case emit
2775 a call to the floor/ceil variants nevertheless. This should result
2776 in the best user experience for not full C99 targets. */
2777 if (fallback_fndecl == NULL_TREE)
2778 {
2779 tree fntype;
2780 const char *name = NULL;
2781
2782 switch (DECL_FUNCTION_CODE (fndecl))
2783 {
2784 case BUILT_IN_ICEIL:
2785 case BUILT_IN_LCEIL:
2786 case BUILT_IN_LLCEIL:
2787 name = "ceil";
2788 break;
2789 case BUILT_IN_ICEILF:
2790 case BUILT_IN_LCEILF:
2791 case BUILT_IN_LLCEILF:
2792 name = "ceilf";
2793 break;
2794 case BUILT_IN_ICEILL:
2795 case BUILT_IN_LCEILL:
2796 case BUILT_IN_LLCEILL:
2797 name = "ceill";
2798 break;
2799 case BUILT_IN_IFLOOR:
2800 case BUILT_IN_LFLOOR:
2801 case BUILT_IN_LLFLOOR:
2802 name = "floor";
2803 break;
2804 case BUILT_IN_IFLOORF:
2805 case BUILT_IN_LFLOORF:
2806 case BUILT_IN_LLFLOORF:
2807 name = "floorf";
2808 break;
2809 case BUILT_IN_IFLOORL:
2810 case BUILT_IN_LFLOORL:
2811 case BUILT_IN_LLFLOORL:
2812 name = "floorl";
2813 break;
2814 default:
2815 gcc_unreachable ();
2816 }
2817
2818 fntype = build_function_type_list (TREE_TYPE (arg),
2819 TREE_TYPE (arg), NULL_TREE);
2820 fallback_fndecl = build_fn_decl (name, fntype);
2821 }
2822
2823 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2824
2825 tmp = expand_normal (exp);
2826 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2827
2828 /* Truncate the result of floating point optab to integer
2829 via expand_fix (). */
2830 target = gen_reg_rtx (mode);
2831 expand_fix (target, tmp, 0);
2832
2833 return target;
2834 }
2835
2836 /* Expand a call to one of the builtin math functions doing integer
2837 conversion (lrint).
2838 Return 0 if a normal call should be emitted rather than expanding the
2839 function in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2841
2842 static rtx
2843 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2844 {
2845 convert_optab builtin_optab;
2846 rtx op0;
2847 rtx_insn *insns;
2848 tree fndecl = get_callee_fndecl (exp);
2849 tree arg;
2850 machine_mode mode;
2851 enum built_in_function fallback_fn = BUILT_IN_NONE;
2852
2853 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2854 gcc_unreachable ();
2855
2856 arg = CALL_EXPR_ARG (exp, 0);
2857
2858 switch (DECL_FUNCTION_CODE (fndecl))
2859 {
2860 CASE_FLT_FN (BUILT_IN_IRINT):
2861 fallback_fn = BUILT_IN_LRINT;
2862 /* FALLTHRU */
2863 CASE_FLT_FN (BUILT_IN_LRINT):
2864 CASE_FLT_FN (BUILT_IN_LLRINT):
2865 builtin_optab = lrint_optab;
2866 break;
2867
2868 CASE_FLT_FN (BUILT_IN_IROUND):
2869 fallback_fn = BUILT_IN_LROUND;
2870 /* FALLTHRU */
2871 CASE_FLT_FN (BUILT_IN_LROUND):
2872 CASE_FLT_FN (BUILT_IN_LLROUND):
2873 builtin_optab = lround_optab;
2874 break;
2875
2876 default:
2877 gcc_unreachable ();
2878 }
2879
2880 /* There's no easy way to detect the case we need to set EDOM. */
2881 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2882 return NULL_RTX;
2883
2884 /* Make a suitable register to place result in. */
2885 mode = TYPE_MODE (TREE_TYPE (exp));
2886
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (!flag_errno_math)
2889 {
2890 rtx result = gen_reg_rtx (mode);
2891
2892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2893 need to expand the argument again. This way, we will not perform
2894 side-effects more the once. */
2895 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2896
2897 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2898
2899 start_sequence ();
2900
2901 if (expand_sfix_optab (result, op0, builtin_optab))
2902 {
2903 /* Output the entire sequence. */
2904 insns = get_insns ();
2905 end_sequence ();
2906 emit_insn (insns);
2907 return result;
2908 }
2909
2910 /* If we were unable to expand via the builtin, stop the sequence
2911 (without outputting the insns) and call to the library function
2912 with the stabilized argument list. */
2913 end_sequence ();
2914 }
2915
2916 if (fallback_fn != BUILT_IN_NONE)
2917 {
2918 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2919 targets, (int) round (x) should never be transformed into
2920 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2921 a call to lround in the hope that the target provides at least some
2922 C99 functions. This should result in the best user experience for
2923 not full C99 targets. */
2924 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2925 fallback_fn, 0);
2926
2927 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2928 fallback_fndecl, 1, arg);
2929
2930 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2931 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2932 return convert_to_mode (mode, target, 0);
2933 }
2934
2935 return expand_call (exp, target, target == const0_rtx);
2936 }
2937
2938 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2942
2943 static rtx
2944 expand_builtin_powi (tree exp, rtx target)
2945 {
2946 tree arg0, arg1;
2947 rtx op0, op1;
2948 machine_mode mode;
2949 machine_mode mode2;
2950
2951 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2952 return NULL_RTX;
2953
2954 arg0 = CALL_EXPR_ARG (exp, 0);
2955 arg1 = CALL_EXPR_ARG (exp, 1);
2956 mode = TYPE_MODE (TREE_TYPE (exp));
2957
2958 /* Emit a libcall to libgcc. */
2959
2960 /* Mode of the 2nd argument must match that of an int. */
2961 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2962
2963 if (target == NULL_RTX)
2964 target = gen_reg_rtx (mode);
2965
2966 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2967 if (GET_MODE (op0) != mode)
2968 op0 = convert_to_mode (mode, op0, 0);
2969 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2970 if (GET_MODE (op1) != mode2)
2971 op1 = convert_to_mode (mode2, op1, 0);
2972
2973 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2974 target, LCT_CONST, mode, 2,
2975 op0, mode, op1, mode2);
2976
2977 return target;
2978 }
2979
2980 /* Expand expression EXP which is a call to the strlen builtin. Return
2981 NULL_RTX if we failed the caller should emit a normal call, otherwise
2982 try to get the result in TARGET, if convenient. */
2983
2984 static rtx
2985 expand_builtin_strlen (tree exp, rtx target,
2986 machine_mode target_mode)
2987 {
2988 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2989 return NULL_RTX;
2990 else
2991 {
2992 struct expand_operand ops[4];
2993 rtx pat;
2994 tree len;
2995 tree src = CALL_EXPR_ARG (exp, 0);
2996 rtx src_reg;
2997 rtx_insn *before_strlen;
2998 machine_mode insn_mode = target_mode;
2999 enum insn_code icode = CODE_FOR_nothing;
3000 unsigned int align;
3001
3002 /* If the length can be computed at compile-time, return it. */
3003 len = c_strlen (src, 0);
3004 if (len)
3005 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3006
3007 /* If the length can be computed at compile-time and is constant
3008 integer, but there are side-effects in src, evaluate
3009 src for side-effects, then return len.
3010 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3011 can be optimized into: i++; x = 3; */
3012 len = c_strlen (src, 1);
3013 if (len && TREE_CODE (len) == INTEGER_CST)
3014 {
3015 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3017 }
3018
3019 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3020
3021 /* If SRC is not a pointer type, don't do this operation inline. */
3022 if (align == 0)
3023 return NULL_RTX;
3024
3025 /* Bail out if we can't compute strlen in the right mode. */
3026 while (insn_mode != VOIDmode)
3027 {
3028 icode = optab_handler (strlen_optab, insn_mode);
3029 if (icode != CODE_FOR_nothing)
3030 break;
3031
3032 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3033 }
3034 if (insn_mode == VOIDmode)
3035 return NULL_RTX;
3036
3037 /* Make a place to hold the source address. We will not expand
3038 the actual source until we are sure that the expansion will
3039 not fail -- there are trees that cannot be expanded twice. */
3040 src_reg = gen_reg_rtx (Pmode);
3041
3042 /* Mark the beginning of the strlen sequence so we can emit the
3043 source operand later. */
3044 before_strlen = get_last_insn ();
3045
3046 create_output_operand (&ops[0], target, insn_mode);
3047 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3048 create_integer_operand (&ops[2], 0);
3049 create_integer_operand (&ops[3], align);
3050 if (!maybe_expand_insn (icode, 4, ops))
3051 return NULL_RTX;
3052
3053 /* Now that we are assured of success, expand the source. */
3054 start_sequence ();
3055 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3056 if (pat != src_reg)
3057 {
3058 #ifdef POINTERS_EXTEND_UNSIGNED
3059 if (GET_MODE (pat) != Pmode)
3060 pat = convert_to_mode (Pmode, pat,
3061 POINTERS_EXTEND_UNSIGNED);
3062 #endif
3063 emit_move_insn (src_reg, pat);
3064 }
3065 pat = get_insns ();
3066 end_sequence ();
3067
3068 if (before_strlen)
3069 emit_insn_after (pat, before_strlen);
3070 else
3071 emit_insn_before (pat, get_insns ());
3072
3073 /* Return the value in the proper mode for this function. */
3074 if (GET_MODE (ops[0].value) == target_mode)
3075 target = ops[0].value;
3076 else if (target != 0)
3077 convert_move (target, ops[0].value, 0);
3078 else
3079 target = convert_to_mode (target_mode, ops[0].value, 0);
3080
3081 return target;
3082 }
3083 }
3084
3085 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3086 bytes from constant string DATA + OFFSET and return it as target
3087 constant. */
3088
3089 static rtx
3090 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3091 machine_mode mode)
3092 {
3093 const char *str = (const char *) data;
3094
3095 gcc_assert (offset >= 0
3096 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3097 <= strlen (str) + 1));
3098
3099 return c_readstr (str + offset, mode);
3100 }
3101
3102 /* LEN specify length of the block of memcpy/memset operation.
3103 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3104 In some cases we can make very likely guess on max size, then we
3105 set it into PROBABLE_MAX_SIZE. */
3106
3107 static void
3108 determine_block_size (tree len, rtx len_rtx,
3109 unsigned HOST_WIDE_INT *min_size,
3110 unsigned HOST_WIDE_INT *max_size,
3111 unsigned HOST_WIDE_INT *probable_max_size)
3112 {
3113 if (CONST_INT_P (len_rtx))
3114 {
3115 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3116 return;
3117 }
3118 else
3119 {
3120 wide_int min, max;
3121 enum value_range_type range_type = VR_UNDEFINED;
3122
3123 /* Determine bounds from the type. */
3124 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3125 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3126 else
3127 *min_size = 0;
3128 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3129 *probable_max_size = *max_size
3130 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3131 else
3132 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3133
3134 if (TREE_CODE (len) == SSA_NAME)
3135 range_type = get_range_info (len, &min, &max);
3136 if (range_type == VR_RANGE)
3137 {
3138 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3139 *min_size = min.to_uhwi ();
3140 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3141 *probable_max_size = *max_size = max.to_uhwi ();
3142 }
3143 else if (range_type == VR_ANTI_RANGE)
3144 {
3145 /* Anti range 0...N lets us to determine minimal size to N+1. */
3146 if (min == 0)
3147 {
3148 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3149 *min_size = max.to_uhwi () + 1;
3150 }
3151 /* Code like
3152
3153 int n;
3154 if (n < 100)
3155 memcpy (a, b, n)
3156
3157 Produce anti range allowing negative values of N. We still
3158 can use the information and make a guess that N is not negative.
3159 */
3160 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3161 *probable_max_size = min.to_uhwi () - 1;
3162 }
3163 }
3164 gcc_checking_assert (*max_size <=
3165 (unsigned HOST_WIDE_INT)
3166 GET_MODE_MASK (GET_MODE (len_rtx)));
3167 }
3168
3169 /* Helper function to do the actual work for expand_builtin_memcpy. */
3170
3171 static rtx
3172 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3173 {
3174 const char *src_str;
3175 unsigned int src_align = get_pointer_alignment (src);
3176 unsigned int dest_align = get_pointer_alignment (dest);
3177 rtx dest_mem, src_mem, dest_addr, len_rtx;
3178 HOST_WIDE_INT expected_size = -1;
3179 unsigned int expected_align = 0;
3180 unsigned HOST_WIDE_INT min_size;
3181 unsigned HOST_WIDE_INT max_size;
3182 unsigned HOST_WIDE_INT probable_max_size;
3183
3184 /* If DEST is not a pointer type, call the normal function. */
3185 if (dest_align == 0)
3186 return NULL_RTX;
3187
3188 /* If either SRC is not a pointer type, don't do this
3189 operation in-line. */
3190 if (src_align == 0)
3191 return NULL_RTX;
3192
3193 if (currently_expanding_gimple_stmt)
3194 stringop_block_profile (currently_expanding_gimple_stmt,
3195 &expected_align, &expected_size);
3196
3197 if (expected_align < dest_align)
3198 expected_align = dest_align;
3199 dest_mem = get_memory_rtx (dest, len);
3200 set_mem_align (dest_mem, dest_align);
3201 len_rtx = expand_normal (len);
3202 determine_block_size (len, len_rtx, &min_size, &max_size,
3203 &probable_max_size);
3204 src_str = c_getstr (src);
3205
3206 /* If SRC is a string constant and block move would be done
3207 by pieces, we can avoid loading the string from memory
3208 and only stored the computed constants. */
3209 if (src_str
3210 && CONST_INT_P (len_rtx)
3211 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3212 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3213 CONST_CAST (char *, src_str),
3214 dest_align, false))
3215 {
3216 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3217 builtin_memcpy_read_str,
3218 CONST_CAST (char *, src_str),
3219 dest_align, false, 0);
3220 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3221 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3222 return dest_mem;
3223 }
3224
3225 src_mem = get_memory_rtx (src, len);
3226 set_mem_align (src_mem, src_align);
3227
3228 /* Copy word part most expediently. */
3229 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3230 CALL_EXPR_TAILCALL (exp)
3231 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3232 expected_align, expected_size,
3233 min_size, max_size, probable_max_size);
3234
3235 if (dest_addr == 0)
3236 {
3237 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3238 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3239 }
3240
3241 return dest_addr;
3242 }
3243
3244 /* Expand a call EXP to the memcpy builtin.
3245 Return NULL_RTX if we failed, the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3248
3249 static rtx
3250 expand_builtin_memcpy (tree exp, rtx target)
3251 {
3252 if (!validate_arglist (exp,
3253 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3254 return NULL_RTX;
3255 else
3256 {
3257 tree dest = CALL_EXPR_ARG (exp, 0);
3258 tree src = CALL_EXPR_ARG (exp, 1);
3259 tree len = CALL_EXPR_ARG (exp, 2);
3260 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3261 }
3262 }
3263
3264 /* Expand an instrumented call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3268
3269 static rtx
3270 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3274 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3275 INTEGER_TYPE, VOID_TYPE))
3276 return NULL_RTX;
3277 else
3278 {
3279 tree dest = CALL_EXPR_ARG (exp, 0);
3280 tree src = CALL_EXPR_ARG (exp, 2);
3281 tree len = CALL_EXPR_ARG (exp, 4);
3282 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3283
3284 /* Return src bounds with the result. */
3285 if (res)
3286 {
3287 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3288 expand_normal (CALL_EXPR_ARG (exp, 1)));
3289 res = chkp_join_splitted_slot (res, bnd);
3290 }
3291 return res;
3292 }
3293 }
3294
3295 /* Expand a call EXP to the mempcpy builtin.
3296 Return NULL_RTX if we failed; the caller should emit a normal call,
3297 otherwise try to get the result in TARGET, if convenient (and in
3298 mode MODE if that's convenient). If ENDP is 0 return the
3299 destination pointer, if ENDP is 1 return the end pointer ala
3300 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3301 stpcpy. */
3302
3303 static rtx
3304 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3305 {
3306 if (!validate_arglist (exp,
3307 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3308 return NULL_RTX;
3309 else
3310 {
3311 tree dest = CALL_EXPR_ARG (exp, 0);
3312 tree src = CALL_EXPR_ARG (exp, 1);
3313 tree len = CALL_EXPR_ARG (exp, 2);
3314 return expand_builtin_mempcpy_args (dest, src, len,
3315 target, mode, /*endp=*/ 1,
3316 exp);
3317 }
3318 }
3319
3320 /* Expand an instrumented call EXP to the mempcpy builtin.
3321 Return NULL_RTX if we failed, the caller should emit a normal call,
3322 otherwise try to get the result in TARGET, if convenient (and in
3323 mode MODE if that's convenient). */
3324
3325 static rtx
3326 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3327 {
3328 if (!validate_arglist (exp,
3329 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3330 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3331 INTEGER_TYPE, VOID_TYPE))
3332 return NULL_RTX;
3333 else
3334 {
3335 tree dest = CALL_EXPR_ARG (exp, 0);
3336 tree src = CALL_EXPR_ARG (exp, 2);
3337 tree len = CALL_EXPR_ARG (exp, 4);
3338 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3339 mode, 1, exp);
3340
3341 /* Return src bounds with the result. */
3342 if (res)
3343 {
3344 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3345 expand_normal (CALL_EXPR_ARG (exp, 1)));
3346 res = chkp_join_splitted_slot (res, bnd);
3347 }
3348 return res;
3349 }
3350 }
3351
3352 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3353 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3354 so that this can also be called without constructing an actual CALL_EXPR.
3355 The other arguments and return value are the same as for
3356 expand_builtin_mempcpy. */
3357
3358 static rtx
3359 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3360 rtx target, machine_mode mode, int endp,
3361 tree orig_exp)
3362 {
3363 tree fndecl = get_callee_fndecl (orig_exp);
3364
3365 /* If return value is ignored, transform mempcpy into memcpy. */
3366 if (target == const0_rtx
3367 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3368 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3369 {
3370 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3371 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3372 dest, src, len);
3373 return expand_expr (result, target, mode, EXPAND_NORMAL);
3374 }
3375 else if (target == const0_rtx
3376 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3377 {
3378 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3379 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3380 dest, src, len);
3381 return expand_expr (result, target, mode, EXPAND_NORMAL);
3382 }
3383 else
3384 {
3385 const char *src_str;
3386 unsigned int src_align = get_pointer_alignment (src);
3387 unsigned int dest_align = get_pointer_alignment (dest);
3388 rtx dest_mem, src_mem, len_rtx;
3389
3390 /* If either SRC or DEST is not a pointer type, don't do this
3391 operation in-line. */
3392 if (dest_align == 0 || src_align == 0)
3393 return NULL_RTX;
3394
3395 /* If LEN is not constant, call the normal function. */
3396 if (! tree_fits_uhwi_p (len))
3397 return NULL_RTX;
3398
3399 len_rtx = expand_normal (len);
3400 src_str = c_getstr (src);
3401
3402 /* If SRC is a string constant and block move would be done
3403 by pieces, we can avoid loading the string from memory
3404 and only stored the computed constants. */
3405 if (src_str
3406 && CONST_INT_P (len_rtx)
3407 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3408 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3409 CONST_CAST (char *, src_str),
3410 dest_align, false))
3411 {
3412 dest_mem = get_memory_rtx (dest, len);
3413 set_mem_align (dest_mem, dest_align);
3414 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3415 builtin_memcpy_read_str,
3416 CONST_CAST (char *, src_str),
3417 dest_align, false, endp);
3418 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3419 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3420 return dest_mem;
3421 }
3422
3423 if (CONST_INT_P (len_rtx)
3424 && can_move_by_pieces (INTVAL (len_rtx),
3425 MIN (dest_align, src_align)))
3426 {
3427 dest_mem = get_memory_rtx (dest, len);
3428 set_mem_align (dest_mem, dest_align);
3429 src_mem = get_memory_rtx (src, len);
3430 set_mem_align (src_mem, src_align);
3431 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3432 MIN (dest_align, src_align), endp);
3433 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3434 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435 return dest_mem;
3436 }
3437
3438 return NULL_RTX;
3439 }
3440 }
3441
3442 #ifndef HAVE_movstr
3443 # define HAVE_movstr 0
3444 # define CODE_FOR_movstr CODE_FOR_nothing
3445 #endif
3446
3447 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3448 we failed, the caller should emit a normal call, otherwise try to
3449 get the result in TARGET, if convenient. If ENDP is 0 return the
3450 destination pointer, if ENDP is 1 return the end pointer ala
3451 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3452 stpcpy. */
3453
3454 static rtx
3455 expand_movstr (tree dest, tree src, rtx target, int endp)
3456 {
3457 struct expand_operand ops[3];
3458 rtx dest_mem;
3459 rtx src_mem;
3460
3461 if (!HAVE_movstr)
3462 return NULL_RTX;
3463
3464 dest_mem = get_memory_rtx (dest, NULL);
3465 src_mem = get_memory_rtx (src, NULL);
3466 if (!endp)
3467 {
3468 target = force_reg (Pmode, XEXP (dest_mem, 0));
3469 dest_mem = replace_equiv_address (dest_mem, target);
3470 }
3471
3472 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3473 create_fixed_operand (&ops[1], dest_mem);
3474 create_fixed_operand (&ops[2], src_mem);
3475 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3476 return NULL_RTX;
3477
3478 if (endp && target != const0_rtx)
3479 {
3480 target = ops[0].value;
3481 /* movstr is supposed to set end to the address of the NUL
3482 terminator. If the caller requested a mempcpy-like return value,
3483 adjust it. */
3484 if (endp == 1)
3485 {
3486 rtx tem = plus_constant (GET_MODE (target),
3487 gen_lowpart (GET_MODE (target), target), 1);
3488 emit_move_insn (target, force_operand (tem, NULL_RTX));
3489 }
3490 }
3491 return target;
3492 }
3493
3494 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3495 NULL_RTX if we failed the caller should emit a normal call, otherwise
3496 try to get the result in TARGET, if convenient (and in mode MODE if that's
3497 convenient). */
3498
3499 static rtx
3500 expand_builtin_strcpy (tree exp, rtx target)
3501 {
3502 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3503 {
3504 tree dest = CALL_EXPR_ARG (exp, 0);
3505 tree src = CALL_EXPR_ARG (exp, 1);
3506 return expand_builtin_strcpy_args (dest, src, target);
3507 }
3508 return NULL_RTX;
3509 }
3510
3511 /* Helper function to do the actual work for expand_builtin_strcpy. The
3512 arguments to the builtin_strcpy call DEST and SRC are broken out
3513 so that this can also be called without constructing an actual CALL_EXPR.
3514 The other arguments and return value are the same as for
3515 expand_builtin_strcpy. */
3516
3517 static rtx
3518 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3519 {
3520 return expand_movstr (dest, src, target, /*endp=*/0);
3521 }
3522
3523 /* Expand a call EXP to the stpcpy builtin.
3524 Return NULL_RTX if we failed the caller should emit a normal call,
3525 otherwise try to get the result in TARGET, if convenient (and in
3526 mode MODE if that's convenient). */
3527
3528 static rtx
3529 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3530 {
3531 tree dst, src;
3532 location_t loc = EXPR_LOCATION (exp);
3533
3534 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3535 return NULL_RTX;
3536
3537 dst = CALL_EXPR_ARG (exp, 0);
3538 src = CALL_EXPR_ARG (exp, 1);
3539
3540 /* If return value is ignored, transform stpcpy into strcpy. */
3541 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3542 {
3543 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3544 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3545 return expand_expr (result, target, mode, EXPAND_NORMAL);
3546 }
3547 else
3548 {
3549 tree len, lenp1;
3550 rtx ret;
3551
3552 /* Ensure we get an actual string whose length can be evaluated at
3553 compile-time, not an expression containing a string. This is
3554 because the latter will potentially produce pessimized code
3555 when used to produce the return value. */
3556 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3557 return expand_movstr (dst, src, target, /*endp=*/2);
3558
3559 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3560 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3561 target, mode, /*endp=*/2,
3562 exp);
3563
3564 if (ret)
3565 return ret;
3566
3567 if (TREE_CODE (len) == INTEGER_CST)
3568 {
3569 rtx len_rtx = expand_normal (len);
3570
3571 if (CONST_INT_P (len_rtx))
3572 {
3573 ret = expand_builtin_strcpy_args (dst, src, target);
3574
3575 if (ret)
3576 {
3577 if (! target)
3578 {
3579 if (mode != VOIDmode)
3580 target = gen_reg_rtx (mode);
3581 else
3582 target = gen_reg_rtx (GET_MODE (ret));
3583 }
3584 if (GET_MODE (target) != GET_MODE (ret))
3585 ret = gen_lowpart (GET_MODE (target), ret);
3586
3587 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3588 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3589 gcc_assert (ret);
3590
3591 return target;
3592 }
3593 }
3594 }
3595
3596 return expand_movstr (dst, src, target, /*endp=*/2);
3597 }
3598 }
3599
3600 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3601 bytes from constant string DATA + OFFSET and return it as target
3602 constant. */
3603
3604 rtx
3605 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3606 machine_mode mode)
3607 {
3608 const char *str = (const char *) data;
3609
3610 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3611 return const0_rtx;
3612
3613 return c_readstr (str + offset, mode);
3614 }
3615
3616 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3617 NULL_RTX if we failed the caller should emit a normal call. */
3618
3619 static rtx
3620 expand_builtin_strncpy (tree exp, rtx target)
3621 {
3622 location_t loc = EXPR_LOCATION (exp);
3623
3624 if (validate_arglist (exp,
3625 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3626 {
3627 tree dest = CALL_EXPR_ARG (exp, 0);
3628 tree src = CALL_EXPR_ARG (exp, 1);
3629 tree len = CALL_EXPR_ARG (exp, 2);
3630 tree slen = c_strlen (src, 1);
3631
3632 /* We must be passed a constant len and src parameter. */
3633 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3634 return NULL_RTX;
3635
3636 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3637
3638 /* We're required to pad with trailing zeros if the requested
3639 len is greater than strlen(s2)+1. In that case try to
3640 use store_by_pieces, if it fails, punt. */
3641 if (tree_int_cst_lt (slen, len))
3642 {
3643 unsigned int dest_align = get_pointer_alignment (dest);
3644 const char *p = c_getstr (src);
3645 rtx dest_mem;
3646
3647 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3648 || !can_store_by_pieces (tree_to_uhwi (len),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p),
3651 dest_align, false))
3652 return NULL_RTX;
3653
3654 dest_mem = get_memory_rtx (dest, len);
3655 store_by_pieces (dest_mem, tree_to_uhwi (len),
3656 builtin_strncpy_read_str,
3657 CONST_CAST (char *, p), dest_align, false, 0);
3658 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3659 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3660 return dest_mem;
3661 }
3662 }
3663 return NULL_RTX;
3664 }
3665
3666 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3667 bytes from constant string DATA + OFFSET and return it as target
3668 constant. */
3669
3670 rtx
3671 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3672 machine_mode mode)
3673 {
3674 const char *c = (const char *) data;
3675 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3676
3677 memset (p, *c, GET_MODE_SIZE (mode));
3678
3679 return c_readstr (p, mode);
3680 }
3681
3682 /* Callback routine for store_by_pieces. Return the RTL of a register
3683 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3684 char value given in the RTL register data. For example, if mode is
3685 4 bytes wide, return the RTL for 0x01010101*data. */
3686
3687 static rtx
3688 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3689 machine_mode mode)
3690 {
3691 rtx target, coeff;
3692 size_t size;
3693 char *p;
3694
3695 size = GET_MODE_SIZE (mode);
3696 if (size == 1)
3697 return (rtx) data;
3698
3699 p = XALLOCAVEC (char, size);
3700 memset (p, 1, size);
3701 coeff = c_readstr (p, mode);
3702
3703 target = convert_to_mode (mode, (rtx) data, 1);
3704 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3705 return force_reg (mode, target);
3706 }
3707
3708 /* Expand expression EXP, which is a call to the memset builtin. Return
3709 NULL_RTX if we failed the caller should emit a normal call, otherwise
3710 try to get the result in TARGET, if convenient (and in mode MODE if that's
3711 convenient). */
3712
3713 static rtx
3714 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3715 {
3716 if (!validate_arglist (exp,
3717 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3718 return NULL_RTX;
3719 else
3720 {
3721 tree dest = CALL_EXPR_ARG (exp, 0);
3722 tree val = CALL_EXPR_ARG (exp, 1);
3723 tree len = CALL_EXPR_ARG (exp, 2);
3724 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3725 }
3726 }
3727
3728 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3729 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3730 try to get the result in TARGET, if convenient (and in mode MODE if that's
3731 convenient). */
3732
3733 static rtx
3734 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3735 {
3736 if (!validate_arglist (exp,
3737 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3738 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3739 return NULL_RTX;
3740 else
3741 {
3742 tree dest = CALL_EXPR_ARG (exp, 0);
3743 tree val = CALL_EXPR_ARG (exp, 2);
3744 tree len = CALL_EXPR_ARG (exp, 3);
3745 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3746
3747 /* Return src bounds with the result. */
3748 if (res)
3749 {
3750 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3751 expand_normal (CALL_EXPR_ARG (exp, 1)));
3752 res = chkp_join_splitted_slot (res, bnd);
3753 }
3754 return res;
3755 }
3756 }
3757
3758 /* Helper function to do the actual work for expand_builtin_memset. The
3759 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3760 so that this can also be called without constructing an actual CALL_EXPR.
3761 The other arguments and return value are the same as for
3762 expand_builtin_memset. */
3763
3764 static rtx
3765 expand_builtin_memset_args (tree dest, tree val, tree len,
3766 rtx target, machine_mode mode, tree orig_exp)
3767 {
3768 tree fndecl, fn;
3769 enum built_in_function fcode;
3770 machine_mode val_mode;
3771 char c;
3772 unsigned int dest_align;
3773 rtx dest_mem, dest_addr, len_rtx;
3774 HOST_WIDE_INT expected_size = -1;
3775 unsigned int expected_align = 0;
3776 unsigned HOST_WIDE_INT min_size;
3777 unsigned HOST_WIDE_INT max_size;
3778 unsigned HOST_WIDE_INT probable_max_size;
3779
3780 dest_align = get_pointer_alignment (dest);
3781
3782 /* If DEST is not a pointer type, don't do this operation in-line. */
3783 if (dest_align == 0)
3784 return NULL_RTX;
3785
3786 if (currently_expanding_gimple_stmt)
3787 stringop_block_profile (currently_expanding_gimple_stmt,
3788 &expected_align, &expected_size);
3789
3790 if (expected_align < dest_align)
3791 expected_align = dest_align;
3792
3793 /* If the LEN parameter is zero, return DEST. */
3794 if (integer_zerop (len))
3795 {
3796 /* Evaluate and ignore VAL in case it has side-effects. */
3797 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3798 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3799 }
3800
3801 /* Stabilize the arguments in case we fail. */
3802 dest = builtin_save_expr (dest);
3803 val = builtin_save_expr (val);
3804 len = builtin_save_expr (len);
3805
3806 len_rtx = expand_normal (len);
3807 determine_block_size (len, len_rtx, &min_size, &max_size,
3808 &probable_max_size);
3809 dest_mem = get_memory_rtx (dest, len);
3810 val_mode = TYPE_MODE (unsigned_char_type_node);
3811
3812 if (TREE_CODE (val) != INTEGER_CST)
3813 {
3814 rtx val_rtx;
3815
3816 val_rtx = expand_normal (val);
3817 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3818
3819 /* Assume that we can memset by pieces if we can store
3820 * the coefficients by pieces (in the required modes).
3821 * We can't pass builtin_memset_gen_str as that emits RTL. */
3822 c = 1;
3823 if (tree_fits_uhwi_p (len)
3824 && can_store_by_pieces (tree_to_uhwi (len),
3825 builtin_memset_read_str, &c, dest_align,
3826 true))
3827 {
3828 val_rtx = force_reg (val_mode, val_rtx);
3829 store_by_pieces (dest_mem, tree_to_uhwi (len),
3830 builtin_memset_gen_str, val_rtx, dest_align,
3831 true, 0);
3832 }
3833 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3834 dest_align, expected_align,
3835 expected_size, min_size, max_size,
3836 probable_max_size))
3837 goto do_libcall;
3838
3839 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3840 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3841 return dest_mem;
3842 }
3843
3844 if (target_char_cast (val, &c))
3845 goto do_libcall;
3846
3847 if (c)
3848 {
3849 if (tree_fits_uhwi_p (len)
3850 && can_store_by_pieces (tree_to_uhwi (len),
3851 builtin_memset_read_str, &c, dest_align,
3852 true))
3853 store_by_pieces (dest_mem, tree_to_uhwi (len),
3854 builtin_memset_read_str, &c, dest_align, true, 0);
3855 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3856 gen_int_mode (c, val_mode),
3857 dest_align, expected_align,
3858 expected_size, min_size, max_size,
3859 probable_max_size))
3860 goto do_libcall;
3861
3862 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3863 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3864 return dest_mem;
3865 }
3866
3867 set_mem_align (dest_mem, dest_align);
3868 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3869 CALL_EXPR_TAILCALL (orig_exp)
3870 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3871 expected_align, expected_size,
3872 min_size, max_size,
3873 probable_max_size);
3874
3875 if (dest_addr == 0)
3876 {
3877 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3878 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3879 }
3880
3881 return dest_addr;
3882
3883 do_libcall:
3884 fndecl = get_callee_fndecl (orig_exp);
3885 fcode = DECL_FUNCTION_CODE (fndecl);
3886 if (fcode == BUILT_IN_MEMSET
3887 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3888 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3889 dest, val, len);
3890 else if (fcode == BUILT_IN_BZERO)
3891 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3892 dest, len);
3893 else
3894 gcc_unreachable ();
3895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3897 return expand_call (fn, target, target == const0_rtx);
3898 }
3899
3900 /* Expand expression EXP, which is a call to the bzero builtin. Return
3901 NULL_RTX if we failed the caller should emit a normal call. */
3902
3903 static rtx
3904 expand_builtin_bzero (tree exp)
3905 {
3906 tree dest, size;
3907 location_t loc = EXPR_LOCATION (exp);
3908
3909 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3910 return NULL_RTX;
3911
3912 dest = CALL_EXPR_ARG (exp, 0);
3913 size = CALL_EXPR_ARG (exp, 1);
3914
3915 /* New argument list transforming bzero(ptr x, int y) to
3916 memset(ptr x, int 0, size_t y). This is done this way
3917 so that if it isn't expanded inline, we fallback to
3918 calling bzero instead of memset. */
3919
3920 return expand_builtin_memset_args (dest, integer_zero_node,
3921 fold_convert_loc (loc,
3922 size_type_node, size),
3923 const0_rtx, VOIDmode, exp);
3924 }
3925
3926 /* Expand expression EXP, which is a call to the memcmp built-in function.
3927 Return NULL_RTX if we failed and the caller should emit a normal call,
3928 otherwise try to get the result in TARGET, if convenient (and in mode
3929 MODE, if that's convenient). */
3930
3931 static rtx
3932 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3933 ATTRIBUTE_UNUSED machine_mode mode)
3934 {
3935 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3936
3937 if (!validate_arglist (exp,
3938 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3939 return NULL_RTX;
3940
3941 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3942 implementing memcmp because it will stop if it encounters two
3943 zero bytes. */
3944 #if defined HAVE_cmpmemsi
3945 {
3946 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3947 rtx result;
3948 rtx insn;
3949 tree arg1 = CALL_EXPR_ARG (exp, 0);
3950 tree arg2 = CALL_EXPR_ARG (exp, 1);
3951 tree len = CALL_EXPR_ARG (exp, 2);
3952
3953 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3954 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3955 machine_mode insn_mode;
3956
3957 if (HAVE_cmpmemsi)
3958 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3959 else
3960 return NULL_RTX;
3961
3962 /* If we don't have POINTER_TYPE, call the function. */
3963 if (arg1_align == 0 || arg2_align == 0)
3964 return NULL_RTX;
3965
3966 /* Make a place to write the result of the instruction. */
3967 result = target;
3968 if (! (result != 0
3969 && REG_P (result) && GET_MODE (result) == insn_mode
3970 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3971 result = gen_reg_rtx (insn_mode);
3972
3973 arg1_rtx = get_memory_rtx (arg1, len);
3974 arg2_rtx = get_memory_rtx (arg2, len);
3975 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3976
3977 /* Set MEM_SIZE as appropriate. */
3978 if (CONST_INT_P (arg3_rtx))
3979 {
3980 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3981 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3982 }
3983
3984 if (HAVE_cmpmemsi)
3985 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3986 GEN_INT (MIN (arg1_align, arg2_align)));
3987 else
3988 gcc_unreachable ();
3989
3990 if (insn)
3991 emit_insn (insn);
3992 else
3993 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3994 TYPE_MODE (integer_type_node), 3,
3995 XEXP (arg1_rtx, 0), Pmode,
3996 XEXP (arg2_rtx, 0), Pmode,
3997 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3998 TYPE_UNSIGNED (sizetype)),
3999 TYPE_MODE (sizetype));
4000
4001 /* Return the value in the proper mode for this function. */
4002 mode = TYPE_MODE (TREE_TYPE (exp));
4003 if (GET_MODE (result) == mode)
4004 return result;
4005 else if (target != 0)
4006 {
4007 convert_move (target, result, 0);
4008 return target;
4009 }
4010 else
4011 return convert_to_mode (mode, result, 0);
4012 }
4013 #endif /* HAVE_cmpmemsi. */
4014
4015 return NULL_RTX;
4016 }
4017
4018 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4019 if we failed the caller should emit a normal call, otherwise try to get
4020 the result in TARGET, if convenient. */
4021
4022 static rtx
4023 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4024 {
4025 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4027
4028 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4029 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4030 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4031 {
4032 rtx arg1_rtx, arg2_rtx;
4033 rtx result, insn = NULL_RTX;
4034 tree fndecl, fn;
4035 tree arg1 = CALL_EXPR_ARG (exp, 0);
4036 tree arg2 = CALL_EXPR_ARG (exp, 1);
4037
4038 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4039 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4040
4041 /* If we don't have POINTER_TYPE, call the function. */
4042 if (arg1_align == 0 || arg2_align == 0)
4043 return NULL_RTX;
4044
4045 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4046 arg1 = builtin_save_expr (arg1);
4047 arg2 = builtin_save_expr (arg2);
4048
4049 arg1_rtx = get_memory_rtx (arg1, NULL);
4050 arg2_rtx = get_memory_rtx (arg2, NULL);
4051
4052 #ifdef HAVE_cmpstrsi
4053 /* Try to call cmpstrsi. */
4054 if (HAVE_cmpstrsi)
4055 {
4056 machine_mode insn_mode
4057 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4058
4059 /* Make a place to write the result of the instruction. */
4060 result = target;
4061 if (! (result != 0
4062 && REG_P (result) && GET_MODE (result) == insn_mode
4063 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4064 result = gen_reg_rtx (insn_mode);
4065
4066 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4067 GEN_INT (MIN (arg1_align, arg2_align)));
4068 }
4069 #endif
4070 #ifdef HAVE_cmpstrnsi
4071 /* Try to determine at least one length and call cmpstrnsi. */
4072 if (!insn && HAVE_cmpstrnsi)
4073 {
4074 tree len;
4075 rtx arg3_rtx;
4076
4077 machine_mode insn_mode
4078 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4079 tree len1 = c_strlen (arg1, 1);
4080 tree len2 = c_strlen (arg2, 1);
4081
4082 if (len1)
4083 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4084 if (len2)
4085 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4086
4087 /* If we don't have a constant length for the first, use the length
4088 of the second, if we know it. We don't require a constant for
4089 this case; some cost analysis could be done if both are available
4090 but neither is constant. For now, assume they're equally cheap,
4091 unless one has side effects. If both strings have constant lengths,
4092 use the smaller. */
4093
4094 if (!len1)
4095 len = len2;
4096 else if (!len2)
4097 len = len1;
4098 else if (TREE_SIDE_EFFECTS (len1))
4099 len = len2;
4100 else if (TREE_SIDE_EFFECTS (len2))
4101 len = len1;
4102 else if (TREE_CODE (len1) != INTEGER_CST)
4103 len = len2;
4104 else if (TREE_CODE (len2) != INTEGER_CST)
4105 len = len1;
4106 else if (tree_int_cst_lt (len1, len2))
4107 len = len1;
4108 else
4109 len = len2;
4110
4111 /* If both arguments have side effects, we cannot optimize. */
4112 if (!len || TREE_SIDE_EFFECTS (len))
4113 goto do_libcall;
4114
4115 arg3_rtx = expand_normal (len);
4116
4117 /* Make a place to write the result of the instruction. */
4118 result = target;
4119 if (! (result != 0
4120 && REG_P (result) && GET_MODE (result) == insn_mode
4121 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4122 result = gen_reg_rtx (insn_mode);
4123
4124 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4125 GEN_INT (MIN (arg1_align, arg2_align)));
4126 }
4127 #endif
4128
4129 if (insn)
4130 {
4131 machine_mode mode;
4132 emit_insn (insn);
4133
4134 /* Return the value in the proper mode for this function. */
4135 mode = TYPE_MODE (TREE_TYPE (exp));
4136 if (GET_MODE (result) == mode)
4137 return result;
4138 if (target == 0)
4139 return convert_to_mode (mode, result, 0);
4140 convert_move (target, result, 0);
4141 return target;
4142 }
4143
4144 /* Expand the library call ourselves using a stabilized argument
4145 list to avoid re-evaluating the function's arguments twice. */
4146 #ifdef HAVE_cmpstrnsi
4147 do_libcall:
4148 #endif
4149 fndecl = get_callee_fndecl (exp);
4150 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4151 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4152 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4153 return expand_call (fn, target, target == const0_rtx);
4154 }
4155 #endif
4156 return NULL_RTX;
4157 }
4158
4159 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4160 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4161 the result in TARGET, if convenient. */
4162
4163 static rtx
4164 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4165 ATTRIBUTE_UNUSED machine_mode mode)
4166 {
4167 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4168
4169 if (!validate_arglist (exp,
4170 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4171 return NULL_RTX;
4172
4173 /* If c_strlen can determine an expression for one of the string
4174 lengths, and it doesn't have side effects, then emit cmpstrnsi
4175 using length MIN(strlen(string)+1, arg3). */
4176 #ifdef HAVE_cmpstrnsi
4177 if (HAVE_cmpstrnsi)
4178 {
4179 tree len, len1, len2;
4180 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4181 rtx result, insn;
4182 tree fndecl, fn;
4183 tree arg1 = CALL_EXPR_ARG (exp, 0);
4184 tree arg2 = CALL_EXPR_ARG (exp, 1);
4185 tree arg3 = CALL_EXPR_ARG (exp, 2);
4186
4187 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4188 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4189 machine_mode insn_mode
4190 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4191
4192 len1 = c_strlen (arg1, 1);
4193 len2 = c_strlen (arg2, 1);
4194
4195 if (len1)
4196 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4197 if (len2)
4198 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4199
4200 /* If we don't have a constant length for the first, use the length
4201 of the second, if we know it. We don't require a constant for
4202 this case; some cost analysis could be done if both are available
4203 but neither is constant. For now, assume they're equally cheap,
4204 unless one has side effects. If both strings have constant lengths,
4205 use the smaller. */
4206
4207 if (!len1)
4208 len = len2;
4209 else if (!len2)
4210 len = len1;
4211 else if (TREE_SIDE_EFFECTS (len1))
4212 len = len2;
4213 else if (TREE_SIDE_EFFECTS (len2))
4214 len = len1;
4215 else if (TREE_CODE (len1) != INTEGER_CST)
4216 len = len2;
4217 else if (TREE_CODE (len2) != INTEGER_CST)
4218 len = len1;
4219 else if (tree_int_cst_lt (len1, len2))
4220 len = len1;
4221 else
4222 len = len2;
4223
4224 /* If both arguments have side effects, we cannot optimize. */
4225 if (!len || TREE_SIDE_EFFECTS (len))
4226 return NULL_RTX;
4227
4228 /* The actual new length parameter is MIN(len,arg3). */
4229 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4230 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4231
4232 /* If we don't have POINTER_TYPE, call the function. */
4233 if (arg1_align == 0 || arg2_align == 0)
4234 return NULL_RTX;
4235
4236 /* Make a place to write the result of the instruction. */
4237 result = target;
4238 if (! (result != 0
4239 && REG_P (result) && GET_MODE (result) == insn_mode
4240 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4241 result = gen_reg_rtx (insn_mode);
4242
4243 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4244 arg1 = builtin_save_expr (arg1);
4245 arg2 = builtin_save_expr (arg2);
4246 len = builtin_save_expr (len);
4247
4248 arg1_rtx = get_memory_rtx (arg1, len);
4249 arg2_rtx = get_memory_rtx (arg2, len);
4250 arg3_rtx = expand_normal (len);
4251 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4252 GEN_INT (MIN (arg1_align, arg2_align)));
4253 if (insn)
4254 {
4255 emit_insn (insn);
4256
4257 /* Return the value in the proper mode for this function. */
4258 mode = TYPE_MODE (TREE_TYPE (exp));
4259 if (GET_MODE (result) == mode)
4260 return result;
4261 if (target == 0)
4262 return convert_to_mode (mode, result, 0);
4263 convert_move (target, result, 0);
4264 return target;
4265 }
4266
4267 /* Expand the library call ourselves using a stabilized argument
4268 list to avoid re-evaluating the function's arguments twice. */
4269 fndecl = get_callee_fndecl (exp);
4270 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4271 arg1, arg2, len);
4272 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4273 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4274 return expand_call (fn, target, target == const0_rtx);
4275 }
4276 #endif
4277 return NULL_RTX;
4278 }
4279
4280 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4281 if that's convenient. */
4282
4283 rtx
4284 expand_builtin_saveregs (void)
4285 {
4286 rtx val;
4287 rtx_insn *seq;
4288
4289 /* Don't do __builtin_saveregs more than once in a function.
4290 Save the result of the first call and reuse it. */
4291 if (saveregs_value != 0)
4292 return saveregs_value;
4293
4294 /* When this function is called, it means that registers must be
4295 saved on entry to this function. So we migrate the call to the
4296 first insn of this function. */
4297
4298 start_sequence ();
4299
4300 /* Do whatever the machine needs done in this case. */
4301 val = targetm.calls.expand_builtin_saveregs ();
4302
4303 seq = get_insns ();
4304 end_sequence ();
4305
4306 saveregs_value = val;
4307
4308 /* Put the insns after the NOTE that starts the function. If this
4309 is inside a start_sequence, make the outer-level insn chain current, so
4310 the code is placed at the start of the function. */
4311 push_topmost_sequence ();
4312 emit_insn_after (seq, entry_of_function ());
4313 pop_topmost_sequence ();
4314
4315 return val;
4316 }
4317
4318 /* Expand a call to __builtin_next_arg. */
4319
4320 static rtx
4321 expand_builtin_next_arg (void)
4322 {
4323 /* Checking arguments is already done in fold_builtin_next_arg
4324 that must be called before this function. */
4325 return expand_binop (ptr_mode, add_optab,
4326 crtl->args.internal_arg_pointer,
4327 crtl->args.arg_offset_rtx,
4328 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4329 }
4330
4331 /* Make it easier for the backends by protecting the valist argument
4332 from multiple evaluations. */
4333
4334 static tree
4335 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4336 {
4337 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4338
4339 /* The current way of determining the type of valist is completely
4340 bogus. We should have the information on the va builtin instead. */
4341 if (!vatype)
4342 vatype = targetm.fn_abi_va_list (cfun->decl);
4343
4344 if (TREE_CODE (vatype) == ARRAY_TYPE)
4345 {
4346 if (TREE_SIDE_EFFECTS (valist))
4347 valist = save_expr (valist);
4348
4349 /* For this case, the backends will be expecting a pointer to
4350 vatype, but it's possible we've actually been given an array
4351 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4352 So fix it. */
4353 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4354 {
4355 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4356 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4357 }
4358 }
4359 else
4360 {
4361 tree pt = build_pointer_type (vatype);
4362
4363 if (! needs_lvalue)
4364 {
4365 if (! TREE_SIDE_EFFECTS (valist))
4366 return valist;
4367
4368 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4369 TREE_SIDE_EFFECTS (valist) = 1;
4370 }
4371
4372 if (TREE_SIDE_EFFECTS (valist))
4373 valist = save_expr (valist);
4374 valist = fold_build2_loc (loc, MEM_REF,
4375 vatype, valist, build_int_cst (pt, 0));
4376 }
4377
4378 return valist;
4379 }
4380
4381 /* The "standard" definition of va_list is void*. */
4382
4383 tree
4384 std_build_builtin_va_list (void)
4385 {
4386 return ptr_type_node;
4387 }
4388
4389 /* The "standard" abi va_list is va_list_type_node. */
4390
4391 tree
4392 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4393 {
4394 return va_list_type_node;
4395 }
4396
4397 /* The "standard" type of va_list is va_list_type_node. */
4398
4399 tree
4400 std_canonical_va_list_type (tree type)
4401 {
4402 tree wtype, htype;
4403
4404 if (INDIRECT_REF_P (type))
4405 type = TREE_TYPE (type);
4406 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4407 type = TREE_TYPE (type);
4408 wtype = va_list_type_node;
4409 htype = type;
4410 /* Treat structure va_list types. */
4411 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4412 htype = TREE_TYPE (htype);
4413 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4414 {
4415 /* If va_list is an array type, the argument may have decayed
4416 to a pointer type, e.g. by being passed to another function.
4417 In that case, unwrap both types so that we can compare the
4418 underlying records. */
4419 if (TREE_CODE (htype) == ARRAY_TYPE
4420 || POINTER_TYPE_P (htype))
4421 {
4422 wtype = TREE_TYPE (wtype);
4423 htype = TREE_TYPE (htype);
4424 }
4425 }
4426 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4427 return va_list_type_node;
4428
4429 return NULL_TREE;
4430 }
4431
4432 /* The "standard" implementation of va_start: just assign `nextarg' to
4433 the variable. */
4434
4435 void
4436 std_expand_builtin_va_start (tree valist, rtx nextarg)
4437 {
4438 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4439 convert_move (va_r, nextarg, 0);
4440
4441 /* We do not have any valid bounds for the pointer, so
4442 just store zero bounds for it. */
4443 if (chkp_function_instrumented_p (current_function_decl))
4444 chkp_expand_bounds_reset_for_mem (valist,
4445 make_tree (TREE_TYPE (valist),
4446 nextarg));
4447 }
4448
4449 /* Expand EXP, a call to __builtin_va_start. */
4450
4451 static rtx
4452 expand_builtin_va_start (tree exp)
4453 {
4454 rtx nextarg;
4455 tree valist;
4456 location_t loc = EXPR_LOCATION (exp);
4457
4458 if (call_expr_nargs (exp) < 2)
4459 {
4460 error_at (loc, "too few arguments to function %<va_start%>");
4461 return const0_rtx;
4462 }
4463
4464 if (fold_builtin_next_arg (exp, true))
4465 return const0_rtx;
4466
4467 nextarg = expand_builtin_next_arg ();
4468 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4469
4470 if (targetm.expand_builtin_va_start)
4471 targetm.expand_builtin_va_start (valist, nextarg);
4472 else
4473 std_expand_builtin_va_start (valist, nextarg);
4474
4475 return const0_rtx;
4476 }
4477
4478 /* Expand EXP, a call to __builtin_va_end. */
4479
4480 static rtx
4481 expand_builtin_va_end (tree exp)
4482 {
4483 tree valist = CALL_EXPR_ARG (exp, 0);
4484
4485 /* Evaluate for side effects, if needed. I hate macros that don't
4486 do that. */
4487 if (TREE_SIDE_EFFECTS (valist))
4488 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4489
4490 return const0_rtx;
4491 }
4492
4493 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4494 builtin rather than just as an assignment in stdarg.h because of the
4495 nastiness of array-type va_list types. */
4496
4497 static rtx
4498 expand_builtin_va_copy (tree exp)
4499 {
4500 tree dst, src, t;
4501 location_t loc = EXPR_LOCATION (exp);
4502
4503 dst = CALL_EXPR_ARG (exp, 0);
4504 src = CALL_EXPR_ARG (exp, 1);
4505
4506 dst = stabilize_va_list_loc (loc, dst, 1);
4507 src = stabilize_va_list_loc (loc, src, 0);
4508
4509 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4510
4511 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4512 {
4513 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4514 TREE_SIDE_EFFECTS (t) = 1;
4515 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4516 }
4517 else
4518 {
4519 rtx dstb, srcb, size;
4520
4521 /* Evaluate to pointers. */
4522 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4523 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4524 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4525 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4526
4527 dstb = convert_memory_address (Pmode, dstb);
4528 srcb = convert_memory_address (Pmode, srcb);
4529
4530 /* "Dereference" to BLKmode memories. */
4531 dstb = gen_rtx_MEM (BLKmode, dstb);
4532 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4533 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4534 srcb = gen_rtx_MEM (BLKmode, srcb);
4535 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4536 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4537
4538 /* Copy. */
4539 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4540 }
4541
4542 return const0_rtx;
4543 }
4544
4545 /* Expand a call to one of the builtin functions __builtin_frame_address or
4546 __builtin_return_address. */
4547
4548 static rtx
4549 expand_builtin_frame_address (tree fndecl, tree exp)
4550 {
4551 /* The argument must be a nonnegative integer constant.
4552 It counts the number of frames to scan up the stack.
4553 The value is the return address saved in that frame. */
4554 if (call_expr_nargs (exp) == 0)
4555 /* Warning about missing arg was already issued. */
4556 return const0_rtx;
4557 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4558 {
4559 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4560 error ("invalid argument to %<__builtin_frame_address%>");
4561 else
4562 error ("invalid argument to %<__builtin_return_address%>");
4563 return const0_rtx;
4564 }
4565 else
4566 {
4567 rtx tem
4568 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4569 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4570
4571 /* Some ports cannot access arbitrary stack frames. */
4572 if (tem == NULL)
4573 {
4574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4575 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4576 else
4577 warning (0, "unsupported argument to %<__builtin_return_address%>");
4578 return const0_rtx;
4579 }
4580
4581 /* For __builtin_frame_address, return what we've got. */
4582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4583 return tem;
4584
4585 if (!REG_P (tem)
4586 && ! CONSTANT_P (tem))
4587 tem = copy_addr_to_reg (tem);
4588 return tem;
4589 }
4590 }
4591
4592 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4593 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4594 is the same as for allocate_dynamic_stack_space. */
4595
4596 static rtx
4597 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4598 {
4599 rtx op0;
4600 rtx result;
4601 bool valid_arglist;
4602 unsigned int align;
4603 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4604 == BUILT_IN_ALLOCA_WITH_ALIGN);
4605
4606 valid_arglist
4607 = (alloca_with_align
4608 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4609 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4610
4611 if (!valid_arglist)
4612 return NULL_RTX;
4613
4614 /* Compute the argument. */
4615 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4616
4617 /* Compute the alignment. */
4618 align = (alloca_with_align
4619 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4620 : BIGGEST_ALIGNMENT);
4621
4622 /* Allocate the desired space. */
4623 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4624 result = convert_memory_address (ptr_mode, result);
4625
4626 return result;
4627 }
4628
4629 /* Expand a call to bswap builtin in EXP.
4630 Return NULL_RTX if a normal call should be emitted rather than expanding the
4631 function in-line. If convenient, the result should be placed in TARGET.
4632 SUBTARGET may be used as the target for computing one of EXP's operands. */
4633
4634 static rtx
4635 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4636 rtx subtarget)
4637 {
4638 tree arg;
4639 rtx op0;
4640
4641 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4642 return NULL_RTX;
4643
4644 arg = CALL_EXPR_ARG (exp, 0);
4645 op0 = expand_expr (arg,
4646 subtarget && GET_MODE (subtarget) == target_mode
4647 ? subtarget : NULL_RTX,
4648 target_mode, EXPAND_NORMAL);
4649 if (GET_MODE (op0) != target_mode)
4650 op0 = convert_to_mode (target_mode, op0, 1);
4651
4652 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4653
4654 gcc_assert (target);
4655
4656 return convert_to_mode (target_mode, target, 1);
4657 }
4658
4659 /* Expand a call to a unary builtin in EXP.
4660 Return NULL_RTX if a normal call should be emitted rather than expanding the
4661 function in-line. If convenient, the result should be placed in TARGET.
4662 SUBTARGET may be used as the target for computing one of EXP's operands. */
4663
4664 static rtx
4665 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4666 rtx subtarget, optab op_optab)
4667 {
4668 rtx op0;
4669
4670 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4671 return NULL_RTX;
4672
4673 /* Compute the argument. */
4674 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4675 (subtarget
4676 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4677 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4678 VOIDmode, EXPAND_NORMAL);
4679 /* Compute op, into TARGET if possible.
4680 Set TARGET to wherever the result comes back. */
4681 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4682 op_optab, op0, target, op_optab != clrsb_optab);
4683 gcc_assert (target);
4684
4685 return convert_to_mode (target_mode, target, 0);
4686 }
4687
4688 /* Expand a call to __builtin_expect. We just return our argument
4689 as the builtin_expect semantic should've been already executed by
4690 tree branch prediction pass. */
4691
4692 static rtx
4693 expand_builtin_expect (tree exp, rtx target)
4694 {
4695 tree arg;
4696
4697 if (call_expr_nargs (exp) < 2)
4698 return const0_rtx;
4699 arg = CALL_EXPR_ARG (exp, 0);
4700
4701 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4702 /* When guessing was done, the hints should be already stripped away. */
4703 gcc_assert (!flag_guess_branch_prob
4704 || optimize == 0 || seen_error ());
4705 return target;
4706 }
4707
4708 /* Expand a call to __builtin_assume_aligned. We just return our first
4709 argument as the builtin_assume_aligned semantic should've been already
4710 executed by CCP. */
4711
4712 static rtx
4713 expand_builtin_assume_aligned (tree exp, rtx target)
4714 {
4715 if (call_expr_nargs (exp) < 2)
4716 return const0_rtx;
4717 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4718 EXPAND_NORMAL);
4719 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4720 && (call_expr_nargs (exp) < 3
4721 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4722 return target;
4723 }
4724
4725 void
4726 expand_builtin_trap (void)
4727 {
4728 if (targetm.have_trap ())
4729 {
4730 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4731 /* For trap insns when not accumulating outgoing args force
4732 REG_ARGS_SIZE note to prevent crossjumping of calls with
4733 different args sizes. */
4734 if (!ACCUMULATE_OUTGOING_ARGS)
4735 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4736 }
4737 else
4738 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4739 emit_barrier ();
4740 }
4741
4742 /* Expand a call to __builtin_unreachable. We do nothing except emit
4743 a barrier saying that control flow will not pass here.
4744
4745 It is the responsibility of the program being compiled to ensure
4746 that control flow does never reach __builtin_unreachable. */
4747 static void
4748 expand_builtin_unreachable (void)
4749 {
4750 emit_barrier ();
4751 }
4752
4753 /* Expand EXP, a call to fabs, fabsf or fabsl.
4754 Return NULL_RTX if a normal call should be emitted rather than expanding
4755 the function inline. If convenient, the result should be placed
4756 in TARGET. SUBTARGET may be used as the target for computing
4757 the operand. */
4758
4759 static rtx
4760 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4761 {
4762 machine_mode mode;
4763 tree arg;
4764 rtx op0;
4765
4766 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4767 return NULL_RTX;
4768
4769 arg = CALL_EXPR_ARG (exp, 0);
4770 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4771 mode = TYPE_MODE (TREE_TYPE (arg));
4772 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4773 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4774 }
4775
4776 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4777 Return NULL is a normal call should be emitted rather than expanding the
4778 function inline. If convenient, the result should be placed in TARGET.
4779 SUBTARGET may be used as the target for computing the operand. */
4780
4781 static rtx
4782 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4783 {
4784 rtx op0, op1;
4785 tree arg;
4786
4787 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4788 return NULL_RTX;
4789
4790 arg = CALL_EXPR_ARG (exp, 0);
4791 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4792
4793 arg = CALL_EXPR_ARG (exp, 1);
4794 op1 = expand_normal (arg);
4795
4796 return expand_copysign (op0, op1, target);
4797 }
4798
4799 /* Expand a call to __builtin___clear_cache. */
4800
4801 static rtx
4802 expand_builtin___clear_cache (tree exp)
4803 {
4804 if (!targetm.code_for_clear_cache)
4805 {
4806 #ifdef CLEAR_INSN_CACHE
4807 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4808 does something. Just do the default expansion to a call to
4809 __clear_cache(). */
4810 return NULL_RTX;
4811 #else
4812 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4813 does nothing. There is no need to call it. Do nothing. */
4814 return const0_rtx;
4815 #endif /* CLEAR_INSN_CACHE */
4816 }
4817
4818 /* We have a "clear_cache" insn, and it will handle everything. */
4819 tree begin, end;
4820 rtx begin_rtx, end_rtx;
4821
4822 /* We must not expand to a library call. If we did, any
4823 fallback library function in libgcc that might contain a call to
4824 __builtin___clear_cache() would recurse infinitely. */
4825 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4826 {
4827 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4828 return const0_rtx;
4829 }
4830
4831 if (targetm.have_clear_cache ())
4832 {
4833 struct expand_operand ops[2];
4834
4835 begin = CALL_EXPR_ARG (exp, 0);
4836 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4837
4838 end = CALL_EXPR_ARG (exp, 1);
4839 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4840
4841 create_address_operand (&ops[0], begin_rtx);
4842 create_address_operand (&ops[1], end_rtx);
4843 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4844 return const0_rtx;
4845 }
4846 return const0_rtx;
4847 }
4848
4849 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4850
4851 static rtx
4852 round_trampoline_addr (rtx tramp)
4853 {
4854 rtx temp, addend, mask;
4855
4856 /* If we don't need too much alignment, we'll have been guaranteed
4857 proper alignment by get_trampoline_type. */
4858 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4859 return tramp;
4860
4861 /* Round address up to desired boundary. */
4862 temp = gen_reg_rtx (Pmode);
4863 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4864 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4865
4866 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4867 temp, 0, OPTAB_LIB_WIDEN);
4868 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4869 temp, 0, OPTAB_LIB_WIDEN);
4870
4871 return tramp;
4872 }
4873
4874 static rtx
4875 expand_builtin_init_trampoline (tree exp, bool onstack)
4876 {
4877 tree t_tramp, t_func, t_chain;
4878 rtx m_tramp, r_tramp, r_chain, tmp;
4879
4880 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4881 POINTER_TYPE, VOID_TYPE))
4882 return NULL_RTX;
4883
4884 t_tramp = CALL_EXPR_ARG (exp, 0);
4885 t_func = CALL_EXPR_ARG (exp, 1);
4886 t_chain = CALL_EXPR_ARG (exp, 2);
4887
4888 r_tramp = expand_normal (t_tramp);
4889 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4890 MEM_NOTRAP_P (m_tramp) = 1;
4891
4892 /* If ONSTACK, the TRAMP argument should be the address of a field
4893 within the local function's FRAME decl. Either way, let's see if
4894 we can fill in the MEM_ATTRs for this memory. */
4895 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4896 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4897
4898 /* Creator of a heap trampoline is responsible for making sure the
4899 address is aligned to at least STACK_BOUNDARY. Normally malloc
4900 will ensure this anyhow. */
4901 tmp = round_trampoline_addr (r_tramp);
4902 if (tmp != r_tramp)
4903 {
4904 m_tramp = change_address (m_tramp, BLKmode, tmp);
4905 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4906 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4907 }
4908
4909 /* The FUNC argument should be the address of the nested function.
4910 Extract the actual function decl to pass to the hook. */
4911 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4912 t_func = TREE_OPERAND (t_func, 0);
4913 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4914
4915 r_chain = expand_normal (t_chain);
4916
4917 /* Generate insns to initialize the trampoline. */
4918 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4919
4920 if (onstack)
4921 {
4922 trampolines_created = 1;
4923
4924 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4925 "trampoline generated for nested function %qD", t_func);
4926 }
4927
4928 return const0_rtx;
4929 }
4930
4931 static rtx
4932 expand_builtin_adjust_trampoline (tree exp)
4933 {
4934 rtx tramp;
4935
4936 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4937 return NULL_RTX;
4938
4939 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4940 tramp = round_trampoline_addr (tramp);
4941 if (targetm.calls.trampoline_adjust_address)
4942 tramp = targetm.calls.trampoline_adjust_address (tramp);
4943
4944 return tramp;
4945 }
4946
4947 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4948 function. The function first checks whether the back end provides
4949 an insn to implement signbit for the respective mode. If not, it
4950 checks whether the floating point format of the value is such that
4951 the sign bit can be extracted. If that is not the case, the
4952 function returns NULL_RTX to indicate that a normal call should be
4953 emitted rather than expanding the function in-line. EXP is the
4954 expression that is a call to the builtin function; if convenient,
4955 the result should be placed in TARGET. */
4956 static rtx
4957 expand_builtin_signbit (tree exp, rtx target)
4958 {
4959 const struct real_format *fmt;
4960 machine_mode fmode, imode, rmode;
4961 tree arg;
4962 int word, bitpos;
4963 enum insn_code icode;
4964 rtx temp;
4965 location_t loc = EXPR_LOCATION (exp);
4966
4967 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4968 return NULL_RTX;
4969
4970 arg = CALL_EXPR_ARG (exp, 0);
4971 fmode = TYPE_MODE (TREE_TYPE (arg));
4972 rmode = TYPE_MODE (TREE_TYPE (exp));
4973 fmt = REAL_MODE_FORMAT (fmode);
4974
4975 arg = builtin_save_expr (arg);
4976
4977 /* Expand the argument yielding a RTX expression. */
4978 temp = expand_normal (arg);
4979
4980 /* Check if the back end provides an insn that handles signbit for the
4981 argument's mode. */
4982 icode = optab_handler (signbit_optab, fmode);
4983 if (icode != CODE_FOR_nothing)
4984 {
4985 rtx_insn *last = get_last_insn ();
4986 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4987 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4988 return target;
4989 delete_insns_since (last);
4990 }
4991
4992 /* For floating point formats without a sign bit, implement signbit
4993 as "ARG < 0.0". */
4994 bitpos = fmt->signbit_ro;
4995 if (bitpos < 0)
4996 {
4997 /* But we can't do this if the format supports signed zero. */
4998 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4999 return NULL_RTX;
5000
5001 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5002 build_real (TREE_TYPE (arg), dconst0));
5003 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5004 }
5005
5006 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5007 {
5008 imode = int_mode_for_mode (fmode);
5009 if (imode == BLKmode)
5010 return NULL_RTX;
5011 temp = gen_lowpart (imode, temp);
5012 }
5013 else
5014 {
5015 imode = word_mode;
5016 /* Handle targets with different FP word orders. */
5017 if (FLOAT_WORDS_BIG_ENDIAN)
5018 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5019 else
5020 word = bitpos / BITS_PER_WORD;
5021 temp = operand_subword_force (temp, word, fmode);
5022 bitpos = bitpos % BITS_PER_WORD;
5023 }
5024
5025 /* Force the intermediate word_mode (or narrower) result into a
5026 register. This avoids attempting to create paradoxical SUBREGs
5027 of floating point modes below. */
5028 temp = force_reg (imode, temp);
5029
5030 /* If the bitpos is within the "result mode" lowpart, the operation
5031 can be implement with a single bitwise AND. Otherwise, we need
5032 a right shift and an AND. */
5033
5034 if (bitpos < GET_MODE_BITSIZE (rmode))
5035 {
5036 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5037
5038 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5039 temp = gen_lowpart (rmode, temp);
5040 temp = expand_binop (rmode, and_optab, temp,
5041 immed_wide_int_const (mask, rmode),
5042 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5043 }
5044 else
5045 {
5046 /* Perform a logical right shift to place the signbit in the least
5047 significant bit, then truncate the result to the desired mode
5048 and mask just this bit. */
5049 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5050 temp = gen_lowpart (rmode, temp);
5051 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5052 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5053 }
5054
5055 return temp;
5056 }
5057
5058 /* Expand fork or exec calls. TARGET is the desired target of the
5059 call. EXP is the call. FN is the
5060 identificator of the actual function. IGNORE is nonzero if the
5061 value is to be ignored. */
5062
5063 static rtx
5064 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5065 {
5066 tree id, decl;
5067 tree call;
5068
5069 /* If we are not profiling, just call the function. */
5070 if (!profile_arc_flag)
5071 return NULL_RTX;
5072
5073 /* Otherwise call the wrapper. This should be equivalent for the rest of
5074 compiler, so the code does not diverge, and the wrapper may run the
5075 code necessary for keeping the profiling sane. */
5076
5077 switch (DECL_FUNCTION_CODE (fn))
5078 {
5079 case BUILT_IN_FORK:
5080 id = get_identifier ("__gcov_fork");
5081 break;
5082
5083 case BUILT_IN_EXECL:
5084 id = get_identifier ("__gcov_execl");
5085 break;
5086
5087 case BUILT_IN_EXECV:
5088 id = get_identifier ("__gcov_execv");
5089 break;
5090
5091 case BUILT_IN_EXECLP:
5092 id = get_identifier ("__gcov_execlp");
5093 break;
5094
5095 case BUILT_IN_EXECLE:
5096 id = get_identifier ("__gcov_execle");
5097 break;
5098
5099 case BUILT_IN_EXECVP:
5100 id = get_identifier ("__gcov_execvp");
5101 break;
5102
5103 case BUILT_IN_EXECVE:
5104 id = get_identifier ("__gcov_execve");
5105 break;
5106
5107 default:
5108 gcc_unreachable ();
5109 }
5110
5111 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5112 FUNCTION_DECL, id, TREE_TYPE (fn));
5113 DECL_EXTERNAL (decl) = 1;
5114 TREE_PUBLIC (decl) = 1;
5115 DECL_ARTIFICIAL (decl) = 1;
5116 TREE_NOTHROW (decl) = 1;
5117 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5118 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5119 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5120 return expand_call (call, target, ignore);
5121 }
5122
5123
5124 \f
5125 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5126 the pointer in these functions is void*, the tree optimizers may remove
5127 casts. The mode computed in expand_builtin isn't reliable either, due
5128 to __sync_bool_compare_and_swap.
5129
5130 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5131 group of builtins. This gives us log2 of the mode size. */
5132
5133 static inline machine_mode
5134 get_builtin_sync_mode (int fcode_diff)
5135 {
5136 /* The size is not negotiable, so ask not to get BLKmode in return
5137 if the target indicates that a smaller size would be better. */
5138 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5139 }
5140
5141 /* Expand the memory expression LOC and return the appropriate memory operand
5142 for the builtin_sync operations. */
5143
5144 static rtx
5145 get_builtin_sync_mem (tree loc, machine_mode mode)
5146 {
5147 rtx addr, mem;
5148
5149 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5150 addr = convert_memory_address (Pmode, addr);
5151
5152 /* Note that we explicitly do not want any alias information for this
5153 memory, so that we kill all other live memories. Otherwise we don't
5154 satisfy the full barrier semantics of the intrinsic. */
5155 mem = validize_mem (gen_rtx_MEM (mode, addr));
5156
5157 /* The alignment needs to be at least according to that of the mode. */
5158 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5159 get_pointer_alignment (loc)));
5160 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5161 MEM_VOLATILE_P (mem) = 1;
5162
5163 return mem;
5164 }
5165
5166 /* Make sure an argument is in the right mode.
5167 EXP is the tree argument.
5168 MODE is the mode it should be in. */
5169
5170 static rtx
5171 expand_expr_force_mode (tree exp, machine_mode mode)
5172 {
5173 rtx val;
5174 machine_mode old_mode;
5175
5176 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5177 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5178 of CONST_INTs, where we know the old_mode only from the call argument. */
5179
5180 old_mode = GET_MODE (val);
5181 if (old_mode == VOIDmode)
5182 old_mode = TYPE_MODE (TREE_TYPE (exp));
5183 val = convert_modes (mode, old_mode, val, 1);
5184 return val;
5185 }
5186
5187
5188 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5189 EXP is the CALL_EXPR. CODE is the rtx code
5190 that corresponds to the arithmetic or logical operation from the name;
5191 an exception here is that NOT actually means NAND. TARGET is an optional
5192 place for us to store the results; AFTER is true if this is the
5193 fetch_and_xxx form. */
5194
5195 static rtx
5196 expand_builtin_sync_operation (machine_mode mode, tree exp,
5197 enum rtx_code code, bool after,
5198 rtx target)
5199 {
5200 rtx val, mem;
5201 location_t loc = EXPR_LOCATION (exp);
5202
5203 if (code == NOT && warn_sync_nand)
5204 {
5205 tree fndecl = get_callee_fndecl (exp);
5206 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5207
5208 static bool warned_f_a_n, warned_n_a_f;
5209
5210 switch (fcode)
5211 {
5212 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5216 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5217 if (warned_f_a_n)
5218 break;
5219
5220 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5221 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5222 warned_f_a_n = true;
5223 break;
5224
5225 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5229 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5230 if (warned_n_a_f)
5231 break;
5232
5233 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5234 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5235 warned_n_a_f = true;
5236 break;
5237
5238 default:
5239 gcc_unreachable ();
5240 }
5241 }
5242
5243 /* Expand the operands. */
5244 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5245 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5246
5247 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5248 after);
5249 }
5250
5251 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5252 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5253 true if this is the boolean form. TARGET is a place for us to store the
5254 results; this is NOT optional if IS_BOOL is true. */
5255
5256 static rtx
5257 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5258 bool is_bool, rtx target)
5259 {
5260 rtx old_val, new_val, mem;
5261 rtx *pbool, *poval;
5262
5263 /* Expand the operands. */
5264 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5265 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5266 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5267
5268 pbool = poval = NULL;
5269 if (target != const0_rtx)
5270 {
5271 if (is_bool)
5272 pbool = &target;
5273 else
5274 poval = &target;
5275 }
5276 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5277 false, MEMMODEL_SYNC_SEQ_CST,
5278 MEMMODEL_SYNC_SEQ_CST))
5279 return NULL_RTX;
5280
5281 return target;
5282 }
5283
5284 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5285 general form is actually an atomic exchange, and some targets only
5286 support a reduced form with the second argument being a constant 1.
5287 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5288 the results. */
5289
5290 static rtx
5291 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5292 rtx target)
5293 {
5294 rtx val, mem;
5295
5296 /* Expand the operands. */
5297 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5298 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5299
5300 return expand_sync_lock_test_and_set (target, mem, val);
5301 }
5302
5303 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5304
5305 static void
5306 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5307 {
5308 rtx mem;
5309
5310 /* Expand the operands. */
5311 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5312
5313 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5314 }
5315
5316 /* Given an integer representing an ``enum memmodel'', verify its
5317 correctness and return the memory model enum. */
5318
5319 static enum memmodel
5320 get_memmodel (tree exp)
5321 {
5322 rtx op;
5323 unsigned HOST_WIDE_INT val;
5324
5325 /* If the parameter is not a constant, it's a run time value so we'll just
5326 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5327 if (TREE_CODE (exp) != INTEGER_CST)
5328 return MEMMODEL_SEQ_CST;
5329
5330 op = expand_normal (exp);
5331
5332 val = INTVAL (op);
5333 if (targetm.memmodel_check)
5334 val = targetm.memmodel_check (val);
5335 else if (val & ~MEMMODEL_MASK)
5336 {
5337 warning (OPT_Winvalid_memory_model,
5338 "Unknown architecture specifier in memory model to builtin.");
5339 return MEMMODEL_SEQ_CST;
5340 }
5341
5342 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5343 if (memmodel_base (val) >= MEMMODEL_LAST)
5344 {
5345 warning (OPT_Winvalid_memory_model,
5346 "invalid memory model argument to builtin");
5347 return MEMMODEL_SEQ_CST;
5348 }
5349
5350 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5351 be conservative and promote consume to acquire. */
5352 if (val == MEMMODEL_CONSUME)
5353 val = MEMMODEL_ACQUIRE;
5354
5355 return (enum memmodel) val;
5356 }
5357
5358 /* Expand the __atomic_exchange intrinsic:
5359 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5360 EXP is the CALL_EXPR.
5361 TARGET is an optional place for us to store the results. */
5362
5363 static rtx
5364 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5365 {
5366 rtx val, mem;
5367 enum memmodel model;
5368
5369 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5370
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5373
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5377
5378 return expand_atomic_exchange (target, mem, val, model);
5379 }
5380
5381 /* Expand the __atomic_compare_exchange intrinsic:
5382 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5383 TYPE desired, BOOL weak,
5384 enum memmodel success,
5385 enum memmodel failure)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5388
5389 static rtx
5390 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5391 rtx target)
5392 {
5393 rtx expect, desired, mem, oldval;
5394 rtx_code_label *label;
5395 enum memmodel success, failure;
5396 tree weak;
5397 bool is_weak;
5398
5399 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5400 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5401
5402 if (failure > success)
5403 {
5404 warning (OPT_Winvalid_memory_model,
5405 "failure memory model cannot be stronger than success memory "
5406 "model for %<__atomic_compare_exchange%>");
5407 success = MEMMODEL_SEQ_CST;
5408 }
5409
5410 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5411 {
5412 warning (OPT_Winvalid_memory_model,
5413 "invalid failure memory model for "
5414 "%<__atomic_compare_exchange%>");
5415 failure = MEMMODEL_SEQ_CST;
5416 success = MEMMODEL_SEQ_CST;
5417 }
5418
5419
5420 if (!flag_inline_atomics)
5421 return NULL_RTX;
5422
5423 /* Expand the operands. */
5424 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5425
5426 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5427 expect = convert_memory_address (Pmode, expect);
5428 expect = gen_rtx_MEM (mode, expect);
5429 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5430
5431 weak = CALL_EXPR_ARG (exp, 3);
5432 is_weak = false;
5433 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5434 is_weak = true;
5435
5436 if (target == const0_rtx)
5437 target = NULL;
5438
5439 /* Lest the rtl backend create a race condition with an imporoper store
5440 to memory, always create a new pseudo for OLDVAL. */
5441 oldval = NULL;
5442
5443 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5444 is_weak, success, failure))
5445 return NULL_RTX;
5446
5447 /* Conditionally store back to EXPECT, lest we create a race condition
5448 with an improper store to memory. */
5449 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5450 the normal case where EXPECT is totally private, i.e. a register. At
5451 which point the store can be unconditional. */
5452 label = gen_label_rtx ();
5453 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5454 GET_MODE (target), 1, label);
5455 emit_move_insn (expect, oldval);
5456 emit_label (label);
5457
5458 return target;
5459 }
5460
5461 /* Expand the __atomic_load intrinsic:
5462 TYPE __atomic_load (TYPE *object, enum memmodel)
5463 EXP is the CALL_EXPR.
5464 TARGET is an optional place for us to store the results. */
5465
5466 static rtx
5467 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5468 {
5469 rtx mem;
5470 enum memmodel model;
5471
5472 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5473 if (is_mm_release (model) || is_mm_acq_rel (model))
5474 {
5475 warning (OPT_Winvalid_memory_model,
5476 "invalid memory model for %<__atomic_load%>");
5477 model = MEMMODEL_SEQ_CST;
5478 }
5479
5480 if (!flag_inline_atomics)
5481 return NULL_RTX;
5482
5483 /* Expand the operand. */
5484 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5485
5486 return expand_atomic_load (target, mem, model);
5487 }
5488
5489
5490 /* Expand the __atomic_store intrinsic:
5491 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5492 EXP is the CALL_EXPR.
5493 TARGET is an optional place for us to store the results. */
5494
5495 static rtx
5496 expand_builtin_atomic_store (machine_mode mode, tree exp)
5497 {
5498 rtx mem, val;
5499 enum memmodel model;
5500
5501 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5502 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5503 || is_mm_release (model)))
5504 {
5505 warning (OPT_Winvalid_memory_model,
5506 "invalid memory model for %<__atomic_store%>");
5507 model = MEMMODEL_SEQ_CST;
5508 }
5509
5510 if (!flag_inline_atomics)
5511 return NULL_RTX;
5512
5513 /* Expand the operands. */
5514 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5515 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5516
5517 return expand_atomic_store (mem, val, model, false);
5518 }
5519
5520 /* Expand the __atomic_fetch_XXX intrinsic:
5521 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5522 EXP is the CALL_EXPR.
5523 TARGET is an optional place for us to store the results.
5524 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5525 FETCH_AFTER is true if returning the result of the operation.
5526 FETCH_AFTER is false if returning the value before the operation.
5527 IGNORE is true if the result is not used.
5528 EXT_CALL is the correct builtin for an external call if this cannot be
5529 resolved to an instruction sequence. */
5530
5531 static rtx
5532 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5533 enum rtx_code code, bool fetch_after,
5534 bool ignore, enum built_in_function ext_call)
5535 {
5536 rtx val, mem, ret;
5537 enum memmodel model;
5538 tree fndecl;
5539 tree addr;
5540
5541 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5542
5543 /* Expand the operands. */
5544 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5545 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5546
5547 /* Only try generating instructions if inlining is turned on. */
5548 if (flag_inline_atomics)
5549 {
5550 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5551 if (ret)
5552 return ret;
5553 }
5554
5555 /* Return if a different routine isn't needed for the library call. */
5556 if (ext_call == BUILT_IN_NONE)
5557 return NULL_RTX;
5558
5559 /* Change the call to the specified function. */
5560 fndecl = get_callee_fndecl (exp);
5561 addr = CALL_EXPR_FN (exp);
5562 STRIP_NOPS (addr);
5563
5564 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5565 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5566
5567 /* Expand the call here so we can emit trailing code. */
5568 ret = expand_call (exp, target, ignore);
5569
5570 /* Replace the original function just in case it matters. */
5571 TREE_OPERAND (addr, 0) = fndecl;
5572
5573 /* Then issue the arithmetic correction to return the right result. */
5574 if (!ignore)
5575 {
5576 if (code == NOT)
5577 {
5578 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5579 OPTAB_LIB_WIDEN);
5580 ret = expand_simple_unop (mode, NOT, ret, target, true);
5581 }
5582 else
5583 ret = expand_simple_binop (mode, code, ret, val, target, true,
5584 OPTAB_LIB_WIDEN);
5585 }
5586 return ret;
5587 }
5588
5589
5590 #ifndef HAVE_atomic_clear
5591 # define HAVE_atomic_clear 0
5592 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5593 #endif
5594
5595 /* Expand an atomic clear operation.
5596 void _atomic_clear (BOOL *obj, enum memmodel)
5597 EXP is the call expression. */
5598
5599 static rtx
5600 expand_builtin_atomic_clear (tree exp)
5601 {
5602 machine_mode mode;
5603 rtx mem, ret;
5604 enum memmodel model;
5605
5606 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5607 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5608 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5609
5610 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5611 {
5612 warning (OPT_Winvalid_memory_model,
5613 "invalid memory model for %<__atomic_store%>");
5614 model = MEMMODEL_SEQ_CST;
5615 }
5616
5617 if (HAVE_atomic_clear)
5618 {
5619 emit_insn (gen_atomic_clear (mem, model));
5620 return const0_rtx;
5621 }
5622
5623 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5624 Failing that, a store is issued by __atomic_store. The only way this can
5625 fail is if the bool type is larger than a word size. Unlikely, but
5626 handle it anyway for completeness. Assume a single threaded model since
5627 there is no atomic support in this case, and no barriers are required. */
5628 ret = expand_atomic_store (mem, const0_rtx, model, true);
5629 if (!ret)
5630 emit_move_insn (mem, const0_rtx);
5631 return const0_rtx;
5632 }
5633
5634 /* Expand an atomic test_and_set operation.
5635 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5636 EXP is the call expression. */
5637
5638 static rtx
5639 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5640 {
5641 rtx mem;
5642 enum memmodel model;
5643 machine_mode mode;
5644
5645 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5646 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5647 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5648
5649 return expand_atomic_test_and_set (target, mem, model);
5650 }
5651
5652
5653 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5654 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5655
5656 static tree
5657 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5658 {
5659 int size;
5660 machine_mode mode;
5661 unsigned int mode_align, type_align;
5662
5663 if (TREE_CODE (arg0) != INTEGER_CST)
5664 return NULL_TREE;
5665
5666 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5667 mode = mode_for_size (size, MODE_INT, 0);
5668 mode_align = GET_MODE_ALIGNMENT (mode);
5669
5670 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5671 type_align = mode_align;
5672 else
5673 {
5674 tree ttype = TREE_TYPE (arg1);
5675
5676 /* This function is usually invoked and folded immediately by the front
5677 end before anything else has a chance to look at it. The pointer
5678 parameter at this point is usually cast to a void *, so check for that
5679 and look past the cast. */
5680 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5681 && VOID_TYPE_P (TREE_TYPE (ttype)))
5682 arg1 = TREE_OPERAND (arg1, 0);
5683
5684 ttype = TREE_TYPE (arg1);
5685 gcc_assert (POINTER_TYPE_P (ttype));
5686
5687 /* Get the underlying type of the object. */
5688 ttype = TREE_TYPE (ttype);
5689 type_align = TYPE_ALIGN (ttype);
5690 }
5691
5692 /* If the object has smaller alignment, the the lock free routines cannot
5693 be used. */
5694 if (type_align < mode_align)
5695 return boolean_false_node;
5696
5697 /* Check if a compare_and_swap pattern exists for the mode which represents
5698 the required size. The pattern is not allowed to fail, so the existence
5699 of the pattern indicates support is present. */
5700 if (can_compare_and_swap_p (mode, true))
5701 return boolean_true_node;
5702 else
5703 return boolean_false_node;
5704 }
5705
5706 /* Return true if the parameters to call EXP represent an object which will
5707 always generate lock free instructions. The first argument represents the
5708 size of the object, and the second parameter is a pointer to the object
5709 itself. If NULL is passed for the object, then the result is based on
5710 typical alignment for an object of the specified size. Otherwise return
5711 false. */
5712
5713 static rtx
5714 expand_builtin_atomic_always_lock_free (tree exp)
5715 {
5716 tree size;
5717 tree arg0 = CALL_EXPR_ARG (exp, 0);
5718 tree arg1 = CALL_EXPR_ARG (exp, 1);
5719
5720 if (TREE_CODE (arg0) != INTEGER_CST)
5721 {
5722 error ("non-constant argument 1 to __atomic_always_lock_free");
5723 return const0_rtx;
5724 }
5725
5726 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5727 if (size == boolean_true_node)
5728 return const1_rtx;
5729 return const0_rtx;
5730 }
5731
5732 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5733 is lock free on this architecture. */
5734
5735 static tree
5736 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5737 {
5738 if (!flag_inline_atomics)
5739 return NULL_TREE;
5740
5741 /* If it isn't always lock free, don't generate a result. */
5742 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5743 return boolean_true_node;
5744
5745 return NULL_TREE;
5746 }
5747
5748 /* Return true if the parameters to call EXP represent an object which will
5749 always generate lock free instructions. The first argument represents the
5750 size of the object, and the second parameter is a pointer to the object
5751 itself. If NULL is passed for the object, then the result is based on
5752 typical alignment for an object of the specified size. Otherwise return
5753 NULL*/
5754
5755 static rtx
5756 expand_builtin_atomic_is_lock_free (tree exp)
5757 {
5758 tree size;
5759 tree arg0 = CALL_EXPR_ARG (exp, 0);
5760 tree arg1 = CALL_EXPR_ARG (exp, 1);
5761
5762 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5763 {
5764 error ("non-integer argument 1 to __atomic_is_lock_free");
5765 return NULL_RTX;
5766 }
5767
5768 if (!flag_inline_atomics)
5769 return NULL_RTX;
5770
5771 /* If the value is known at compile time, return the RTX for it. */
5772 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5773 if (size == boolean_true_node)
5774 return const1_rtx;
5775
5776 return NULL_RTX;
5777 }
5778
5779 /* Expand the __atomic_thread_fence intrinsic:
5780 void __atomic_thread_fence (enum memmodel)
5781 EXP is the CALL_EXPR. */
5782
5783 static void
5784 expand_builtin_atomic_thread_fence (tree exp)
5785 {
5786 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5787 expand_mem_thread_fence (model);
5788 }
5789
5790 /* Expand the __atomic_signal_fence intrinsic:
5791 void __atomic_signal_fence (enum memmodel)
5792 EXP is the CALL_EXPR. */
5793
5794 static void
5795 expand_builtin_atomic_signal_fence (tree exp)
5796 {
5797 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5798 expand_mem_signal_fence (model);
5799 }
5800
5801 /* Expand the __sync_synchronize intrinsic. */
5802
5803 static void
5804 expand_builtin_sync_synchronize (void)
5805 {
5806 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5807 }
5808
5809 static rtx
5810 expand_builtin_thread_pointer (tree exp, rtx target)
5811 {
5812 enum insn_code icode;
5813 if (!validate_arglist (exp, VOID_TYPE))
5814 return const0_rtx;
5815 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5816 if (icode != CODE_FOR_nothing)
5817 {
5818 struct expand_operand op;
5819 /* If the target is not sutitable then create a new target. */
5820 if (target == NULL_RTX
5821 || !REG_P (target)
5822 || GET_MODE (target) != Pmode)
5823 target = gen_reg_rtx (Pmode);
5824 create_output_operand (&op, target, Pmode);
5825 expand_insn (icode, 1, &op);
5826 return target;
5827 }
5828 error ("__builtin_thread_pointer is not supported on this target");
5829 return const0_rtx;
5830 }
5831
5832 static void
5833 expand_builtin_set_thread_pointer (tree exp)
5834 {
5835 enum insn_code icode;
5836 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5837 return;
5838 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5839 if (icode != CODE_FOR_nothing)
5840 {
5841 struct expand_operand op;
5842 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5843 Pmode, EXPAND_NORMAL);
5844 create_input_operand (&op, val, Pmode);
5845 expand_insn (icode, 1, &op);
5846 return;
5847 }
5848 error ("__builtin_set_thread_pointer is not supported on this target");
5849 }
5850
5851 \f
5852 /* Emit code to restore the current value of stack. */
5853
5854 static void
5855 expand_stack_restore (tree var)
5856 {
5857 rtx_insn *prev;
5858 rtx sa = expand_normal (var);
5859
5860 sa = convert_memory_address (Pmode, sa);
5861
5862 prev = get_last_insn ();
5863 emit_stack_restore (SAVE_BLOCK, sa);
5864
5865 record_new_stack_level ();
5866
5867 fixup_args_size_notes (prev, get_last_insn (), 0);
5868 }
5869
5870 /* Emit code to save the current value of stack. */
5871
5872 static rtx
5873 expand_stack_save (void)
5874 {
5875 rtx ret = NULL_RTX;
5876
5877 emit_stack_save (SAVE_BLOCK, &ret);
5878 return ret;
5879 }
5880
5881
5882 /* Expand OpenACC acc_on_device.
5883
5884 This has to happen late (that is, not in early folding; expand_builtin_*,
5885 rather than fold_builtin_*), as we have to act differently for host and
5886 acceleration device (ACCEL_COMPILER conditional). */
5887
5888 static rtx
5889 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5890 rtx target ATTRIBUTE_UNUSED)
5891 {
5892 #ifdef ACCEL_COMPILER
5893 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5894 return NULL_RTX;
5895
5896 tree arg = CALL_EXPR_ARG (exp, 0);
5897
5898 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5899 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5900 rtx v = expand_normal (arg), v1, v2;
5901 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5902 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5903 machine_mode target_mode = TYPE_MODE (integer_type_node);
5904 if (!target || !register_operand (target, target_mode))
5905 target = gen_reg_rtx (target_mode);
5906 emit_move_insn (target, const1_rtx);
5907 rtx_code_label *done_label = gen_label_rtx ();
5908 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5909 NULL, done_label, PROB_EVEN);
5910 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5911 NULL, done_label, PROB_EVEN);
5912 emit_move_insn (target, const0_rtx);
5913 emit_label (done_label);
5914
5915 return target;
5916 #else
5917 return NULL;
5918 #endif
5919 }
5920
5921
5922 /* Expand an expression EXP that calls a built-in function,
5923 with result going to TARGET if that's convenient
5924 (and in mode MODE if that's convenient).
5925 SUBTARGET may be used as the target for computing one of EXP's operands.
5926 IGNORE is nonzero if the value is to be ignored. */
5927
5928 rtx
5929 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5930 int ignore)
5931 {
5932 tree fndecl = get_callee_fndecl (exp);
5933 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5934 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5935 int flags;
5936
5937 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5938 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5939
5940 /* When ASan is enabled, we don't want to expand some memory/string
5941 builtins and rely on libsanitizer's hooks. This allows us to avoid
5942 redundant checks and be sure, that possible overflow will be detected
5943 by ASan. */
5944
5945 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5946 return expand_call (exp, target, ignore);
5947
5948 /* When not optimizing, generate calls to library functions for a certain
5949 set of builtins. */
5950 if (!optimize
5951 && !called_as_built_in (fndecl)
5952 && fcode != BUILT_IN_FORK
5953 && fcode != BUILT_IN_EXECL
5954 && fcode != BUILT_IN_EXECV
5955 && fcode != BUILT_IN_EXECLP
5956 && fcode != BUILT_IN_EXECLE
5957 && fcode != BUILT_IN_EXECVP
5958 && fcode != BUILT_IN_EXECVE
5959 && fcode != BUILT_IN_ALLOCA
5960 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5961 && fcode != BUILT_IN_FREE
5962 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5963 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5964 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5965 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5966 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5967 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5968 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5969 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5970 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5971 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5972 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5973 && fcode != BUILT_IN_CHKP_BNDRET)
5974 return expand_call (exp, target, ignore);
5975
5976 /* The built-in function expanders test for target == const0_rtx
5977 to determine whether the function's result will be ignored. */
5978 if (ignore)
5979 target = const0_rtx;
5980
5981 /* If the result of a pure or const built-in function is ignored, and
5982 none of its arguments are volatile, we can avoid expanding the
5983 built-in call and just evaluate the arguments for side-effects. */
5984 if (target == const0_rtx
5985 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5986 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5987 {
5988 bool volatilep = false;
5989 tree arg;
5990 call_expr_arg_iterator iter;
5991
5992 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5993 if (TREE_THIS_VOLATILE (arg))
5994 {
5995 volatilep = true;
5996 break;
5997 }
5998
5999 if (! volatilep)
6000 {
6001 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6002 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6003 return const0_rtx;
6004 }
6005 }
6006
6007 /* expand_builtin_with_bounds is supposed to be used for
6008 instrumented builtin calls. */
6009 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6010
6011 switch (fcode)
6012 {
6013 CASE_FLT_FN (BUILT_IN_FABS):
6014 case BUILT_IN_FABSD32:
6015 case BUILT_IN_FABSD64:
6016 case BUILT_IN_FABSD128:
6017 target = expand_builtin_fabs (exp, target, subtarget);
6018 if (target)
6019 return target;
6020 break;
6021
6022 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6023 target = expand_builtin_copysign (exp, target, subtarget);
6024 if (target)
6025 return target;
6026 break;
6027
6028 /* Just do a normal library call if we were unable to fold
6029 the values. */
6030 CASE_FLT_FN (BUILT_IN_CABS):
6031 break;
6032
6033 CASE_FLT_FN (BUILT_IN_EXP):
6034 CASE_FLT_FN (BUILT_IN_EXP10):
6035 CASE_FLT_FN (BUILT_IN_POW10):
6036 CASE_FLT_FN (BUILT_IN_EXP2):
6037 CASE_FLT_FN (BUILT_IN_EXPM1):
6038 CASE_FLT_FN (BUILT_IN_LOGB):
6039 CASE_FLT_FN (BUILT_IN_LOG):
6040 CASE_FLT_FN (BUILT_IN_LOG10):
6041 CASE_FLT_FN (BUILT_IN_LOG2):
6042 CASE_FLT_FN (BUILT_IN_LOG1P):
6043 CASE_FLT_FN (BUILT_IN_TAN):
6044 CASE_FLT_FN (BUILT_IN_ASIN):
6045 CASE_FLT_FN (BUILT_IN_ACOS):
6046 CASE_FLT_FN (BUILT_IN_ATAN):
6047 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6048 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6049 because of possible accuracy problems. */
6050 if (! flag_unsafe_math_optimizations)
6051 break;
6052 CASE_FLT_FN (BUILT_IN_SQRT):
6053 CASE_FLT_FN (BUILT_IN_FLOOR):
6054 CASE_FLT_FN (BUILT_IN_CEIL):
6055 CASE_FLT_FN (BUILT_IN_TRUNC):
6056 CASE_FLT_FN (BUILT_IN_ROUND):
6057 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6058 CASE_FLT_FN (BUILT_IN_RINT):
6059 target = expand_builtin_mathfn (exp, target, subtarget);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_FLT_FN (BUILT_IN_FMA):
6065 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6069
6070 CASE_FLT_FN (BUILT_IN_ILOGB):
6071 if (! flag_unsafe_math_optimizations)
6072 break;
6073 CASE_FLT_FN (BUILT_IN_ISINF):
6074 CASE_FLT_FN (BUILT_IN_FINITE):
6075 case BUILT_IN_ISFINITE:
6076 case BUILT_IN_ISNORMAL:
6077 target = expand_builtin_interclass_mathfn (exp, target);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_FLT_FN (BUILT_IN_ICEIL):
6083 CASE_FLT_FN (BUILT_IN_LCEIL):
6084 CASE_FLT_FN (BUILT_IN_LLCEIL):
6085 CASE_FLT_FN (BUILT_IN_LFLOOR):
6086 CASE_FLT_FN (BUILT_IN_IFLOOR):
6087 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6088 target = expand_builtin_int_roundingfn (exp, target);
6089 if (target)
6090 return target;
6091 break;
6092
6093 CASE_FLT_FN (BUILT_IN_IRINT):
6094 CASE_FLT_FN (BUILT_IN_LRINT):
6095 CASE_FLT_FN (BUILT_IN_LLRINT):
6096 CASE_FLT_FN (BUILT_IN_IROUND):
6097 CASE_FLT_FN (BUILT_IN_LROUND):
6098 CASE_FLT_FN (BUILT_IN_LLROUND):
6099 target = expand_builtin_int_roundingfn_2 (exp, target);
6100 if (target)
6101 return target;
6102 break;
6103
6104 CASE_FLT_FN (BUILT_IN_POWI):
6105 target = expand_builtin_powi (exp, target);
6106 if (target)
6107 return target;
6108 break;
6109
6110 CASE_FLT_FN (BUILT_IN_ATAN2):
6111 CASE_FLT_FN (BUILT_IN_LDEXP):
6112 CASE_FLT_FN (BUILT_IN_SCALB):
6113 CASE_FLT_FN (BUILT_IN_SCALBN):
6114 CASE_FLT_FN (BUILT_IN_SCALBLN):
6115 if (! flag_unsafe_math_optimizations)
6116 break;
6117
6118 CASE_FLT_FN (BUILT_IN_FMOD):
6119 CASE_FLT_FN (BUILT_IN_REMAINDER):
6120 CASE_FLT_FN (BUILT_IN_DREM):
6121 CASE_FLT_FN (BUILT_IN_POW):
6122 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6123 if (target)
6124 return target;
6125 break;
6126
6127 CASE_FLT_FN (BUILT_IN_CEXPI):
6128 target = expand_builtin_cexpi (exp, target);
6129 gcc_assert (target);
6130 return target;
6131
6132 CASE_FLT_FN (BUILT_IN_SIN):
6133 CASE_FLT_FN (BUILT_IN_COS):
6134 if (! flag_unsafe_math_optimizations)
6135 break;
6136 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6137 if (target)
6138 return target;
6139 break;
6140
6141 CASE_FLT_FN (BUILT_IN_SINCOS):
6142 if (! flag_unsafe_math_optimizations)
6143 break;
6144 target = expand_builtin_sincos (exp);
6145 if (target)
6146 return target;
6147 break;
6148
6149 case BUILT_IN_APPLY_ARGS:
6150 return expand_builtin_apply_args ();
6151
6152 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6153 FUNCTION with a copy of the parameters described by
6154 ARGUMENTS, and ARGSIZE. It returns a block of memory
6155 allocated on the stack into which is stored all the registers
6156 that might possibly be used for returning the result of a
6157 function. ARGUMENTS is the value returned by
6158 __builtin_apply_args. ARGSIZE is the number of bytes of
6159 arguments that must be copied. ??? How should this value be
6160 computed? We'll also need a safe worst case value for varargs
6161 functions. */
6162 case BUILT_IN_APPLY:
6163 if (!validate_arglist (exp, POINTER_TYPE,
6164 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6165 && !validate_arglist (exp, REFERENCE_TYPE,
6166 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6167 return const0_rtx;
6168 else
6169 {
6170 rtx ops[3];
6171
6172 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6173 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6174 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6175
6176 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6177 }
6178
6179 /* __builtin_return (RESULT) causes the function to return the
6180 value described by RESULT. RESULT is address of the block of
6181 memory returned by __builtin_apply. */
6182 case BUILT_IN_RETURN:
6183 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6184 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6185 return const0_rtx;
6186
6187 case BUILT_IN_SAVEREGS:
6188 return expand_builtin_saveregs ();
6189
6190 case BUILT_IN_VA_ARG_PACK:
6191 /* All valid uses of __builtin_va_arg_pack () are removed during
6192 inlining. */
6193 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6194 return const0_rtx;
6195
6196 case BUILT_IN_VA_ARG_PACK_LEN:
6197 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6198 inlining. */
6199 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6200 return const0_rtx;
6201
6202 /* Return the address of the first anonymous stack arg. */
6203 case BUILT_IN_NEXT_ARG:
6204 if (fold_builtin_next_arg (exp, false))
6205 return const0_rtx;
6206 return expand_builtin_next_arg ();
6207
6208 case BUILT_IN_CLEAR_CACHE:
6209 target = expand_builtin___clear_cache (exp);
6210 if (target)
6211 return target;
6212 break;
6213
6214 case BUILT_IN_CLASSIFY_TYPE:
6215 return expand_builtin_classify_type (exp);
6216
6217 case BUILT_IN_CONSTANT_P:
6218 return const0_rtx;
6219
6220 case BUILT_IN_FRAME_ADDRESS:
6221 case BUILT_IN_RETURN_ADDRESS:
6222 return expand_builtin_frame_address (fndecl, exp);
6223
6224 /* Returns the address of the area where the structure is returned.
6225 0 otherwise. */
6226 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6227 if (call_expr_nargs (exp) != 0
6228 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6229 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6230 return const0_rtx;
6231 else
6232 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6233
6234 case BUILT_IN_ALLOCA:
6235 case BUILT_IN_ALLOCA_WITH_ALIGN:
6236 /* If the allocation stems from the declaration of a variable-sized
6237 object, it cannot accumulate. */
6238 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_STACK_SAVE:
6244 return expand_stack_save ();
6245
6246 case BUILT_IN_STACK_RESTORE:
6247 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6248 return const0_rtx;
6249
6250 case BUILT_IN_BSWAP16:
6251 case BUILT_IN_BSWAP32:
6252 case BUILT_IN_BSWAP64:
6253 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6254 if (target)
6255 return target;
6256 break;
6257
6258 CASE_INT_FN (BUILT_IN_FFS):
6259 target = expand_builtin_unop (target_mode, exp, target,
6260 subtarget, ffs_optab);
6261 if (target)
6262 return target;
6263 break;
6264
6265 CASE_INT_FN (BUILT_IN_CLZ):
6266 target = expand_builtin_unop (target_mode, exp, target,
6267 subtarget, clz_optab);
6268 if (target)
6269 return target;
6270 break;
6271
6272 CASE_INT_FN (BUILT_IN_CTZ):
6273 target = expand_builtin_unop (target_mode, exp, target,
6274 subtarget, ctz_optab);
6275 if (target)
6276 return target;
6277 break;
6278
6279 CASE_INT_FN (BUILT_IN_CLRSB):
6280 target = expand_builtin_unop (target_mode, exp, target,
6281 subtarget, clrsb_optab);
6282 if (target)
6283 return target;
6284 break;
6285
6286 CASE_INT_FN (BUILT_IN_POPCOUNT):
6287 target = expand_builtin_unop (target_mode, exp, target,
6288 subtarget, popcount_optab);
6289 if (target)
6290 return target;
6291 break;
6292
6293 CASE_INT_FN (BUILT_IN_PARITY):
6294 target = expand_builtin_unop (target_mode, exp, target,
6295 subtarget, parity_optab);
6296 if (target)
6297 return target;
6298 break;
6299
6300 case BUILT_IN_STRLEN:
6301 target = expand_builtin_strlen (exp, target, target_mode);
6302 if (target)
6303 return target;
6304 break;
6305
6306 case BUILT_IN_STRCPY:
6307 target = expand_builtin_strcpy (exp, target);
6308 if (target)
6309 return target;
6310 break;
6311
6312 case BUILT_IN_STRNCPY:
6313 target = expand_builtin_strncpy (exp, target);
6314 if (target)
6315 return target;
6316 break;
6317
6318 case BUILT_IN_STPCPY:
6319 target = expand_builtin_stpcpy (exp, target, mode);
6320 if (target)
6321 return target;
6322 break;
6323
6324 case BUILT_IN_MEMCPY:
6325 target = expand_builtin_memcpy (exp, target);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_MEMPCPY:
6331 target = expand_builtin_mempcpy (exp, target, mode);
6332 if (target)
6333 return target;
6334 break;
6335
6336 case BUILT_IN_MEMSET:
6337 target = expand_builtin_memset (exp, target, mode);
6338 if (target)
6339 return target;
6340 break;
6341
6342 case BUILT_IN_BZERO:
6343 target = expand_builtin_bzero (exp);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_STRCMP:
6349 target = expand_builtin_strcmp (exp, target);
6350 if (target)
6351 return target;
6352 break;
6353
6354 case BUILT_IN_STRNCMP:
6355 target = expand_builtin_strncmp (exp, target, mode);
6356 if (target)
6357 return target;
6358 break;
6359
6360 case BUILT_IN_BCMP:
6361 case BUILT_IN_MEMCMP:
6362 target = expand_builtin_memcmp (exp, target, mode);
6363 if (target)
6364 return target;
6365 break;
6366
6367 case BUILT_IN_SETJMP:
6368 /* This should have been lowered to the builtins below. */
6369 gcc_unreachable ();
6370
6371 case BUILT_IN_SETJMP_SETUP:
6372 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6373 and the receiver label. */
6374 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6375 {
6376 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6377 VOIDmode, EXPAND_NORMAL);
6378 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6379 rtx_insn *label_r = label_rtx (label);
6380
6381 /* This is copied from the handling of non-local gotos. */
6382 expand_builtin_setjmp_setup (buf_addr, label_r);
6383 nonlocal_goto_handler_labels
6384 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6385 nonlocal_goto_handler_labels);
6386 /* ??? Do not let expand_label treat us as such since we would
6387 not want to be both on the list of non-local labels and on
6388 the list of forced labels. */
6389 FORCED_LABEL (label) = 0;
6390 return const0_rtx;
6391 }
6392 break;
6393
6394 case BUILT_IN_SETJMP_RECEIVER:
6395 /* __builtin_setjmp_receiver is passed the receiver label. */
6396 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6397 {
6398 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6399 rtx_insn *label_r = label_rtx (label);
6400
6401 expand_builtin_setjmp_receiver (label_r);
6402 return const0_rtx;
6403 }
6404 break;
6405
6406 /* __builtin_longjmp is passed a pointer to an array of five words.
6407 It's similar to the C library longjmp function but works with
6408 __builtin_setjmp above. */
6409 case BUILT_IN_LONGJMP:
6410 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6411 {
6412 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6413 VOIDmode, EXPAND_NORMAL);
6414 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6415
6416 if (value != const1_rtx)
6417 {
6418 error ("%<__builtin_longjmp%> second argument must be 1");
6419 return const0_rtx;
6420 }
6421
6422 expand_builtin_longjmp (buf_addr, value);
6423 return const0_rtx;
6424 }
6425 break;
6426
6427 case BUILT_IN_NONLOCAL_GOTO:
6428 target = expand_builtin_nonlocal_goto (exp);
6429 if (target)
6430 return target;
6431 break;
6432
6433 /* This updates the setjmp buffer that is its argument with the value
6434 of the current stack pointer. */
6435 case BUILT_IN_UPDATE_SETJMP_BUF:
6436 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6437 {
6438 rtx buf_addr
6439 = expand_normal (CALL_EXPR_ARG (exp, 0));
6440
6441 expand_builtin_update_setjmp_buf (buf_addr);
6442 return const0_rtx;
6443 }
6444 break;
6445
6446 case BUILT_IN_TRAP:
6447 expand_builtin_trap ();
6448 return const0_rtx;
6449
6450 case BUILT_IN_UNREACHABLE:
6451 expand_builtin_unreachable ();
6452 return const0_rtx;
6453
6454 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6455 case BUILT_IN_SIGNBITD32:
6456 case BUILT_IN_SIGNBITD64:
6457 case BUILT_IN_SIGNBITD128:
6458 target = expand_builtin_signbit (exp, target);
6459 if (target)
6460 return target;
6461 break;
6462
6463 /* Various hooks for the DWARF 2 __throw routine. */
6464 case BUILT_IN_UNWIND_INIT:
6465 expand_builtin_unwind_init ();
6466 return const0_rtx;
6467 case BUILT_IN_DWARF_CFA:
6468 return virtual_cfa_rtx;
6469 #ifdef DWARF2_UNWIND_INFO
6470 case BUILT_IN_DWARF_SP_COLUMN:
6471 return expand_builtin_dwarf_sp_column ();
6472 case BUILT_IN_INIT_DWARF_REG_SIZES:
6473 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6474 return const0_rtx;
6475 #endif
6476 case BUILT_IN_FROB_RETURN_ADDR:
6477 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6478 case BUILT_IN_EXTRACT_RETURN_ADDR:
6479 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6480 case BUILT_IN_EH_RETURN:
6481 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6482 CALL_EXPR_ARG (exp, 1));
6483 return const0_rtx;
6484 case BUILT_IN_EH_RETURN_DATA_REGNO:
6485 return expand_builtin_eh_return_data_regno (exp);
6486 case BUILT_IN_EXTEND_POINTER:
6487 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6488 case BUILT_IN_EH_POINTER:
6489 return expand_builtin_eh_pointer (exp);
6490 case BUILT_IN_EH_FILTER:
6491 return expand_builtin_eh_filter (exp);
6492 case BUILT_IN_EH_COPY_VALUES:
6493 return expand_builtin_eh_copy_values (exp);
6494
6495 case BUILT_IN_VA_START:
6496 return expand_builtin_va_start (exp);
6497 case BUILT_IN_VA_END:
6498 return expand_builtin_va_end (exp);
6499 case BUILT_IN_VA_COPY:
6500 return expand_builtin_va_copy (exp);
6501 case BUILT_IN_EXPECT:
6502 return expand_builtin_expect (exp, target);
6503 case BUILT_IN_ASSUME_ALIGNED:
6504 return expand_builtin_assume_aligned (exp, target);
6505 case BUILT_IN_PREFETCH:
6506 expand_builtin_prefetch (exp);
6507 return const0_rtx;
6508
6509 case BUILT_IN_INIT_TRAMPOLINE:
6510 return expand_builtin_init_trampoline (exp, true);
6511 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6512 return expand_builtin_init_trampoline (exp, false);
6513 case BUILT_IN_ADJUST_TRAMPOLINE:
6514 return expand_builtin_adjust_trampoline (exp);
6515
6516 case BUILT_IN_FORK:
6517 case BUILT_IN_EXECL:
6518 case BUILT_IN_EXECV:
6519 case BUILT_IN_EXECLP:
6520 case BUILT_IN_EXECLE:
6521 case BUILT_IN_EXECVP:
6522 case BUILT_IN_EXECVE:
6523 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6524 if (target)
6525 return target;
6526 break;
6527
6528 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6529 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6530 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6531 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6532 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6533 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6534 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6535 if (target)
6536 return target;
6537 break;
6538
6539 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6540 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6541 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6542 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6543 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6545 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6546 if (target)
6547 return target;
6548 break;
6549
6550 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6551 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6552 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6553 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6554 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6555 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6556 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6557 if (target)
6558 return target;
6559 break;
6560
6561 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6562 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6563 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6564 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6565 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6566 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6567 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6568 if (target)
6569 return target;
6570 break;
6571
6572 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6573 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6574 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6575 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6576 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6578 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6579 if (target)
6580 return target;
6581 break;
6582
6583 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6584 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6585 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6586 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6587 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6589 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6590 if (target)
6591 return target;
6592 break;
6593
6594 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6595 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6596 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6597 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6598 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6600 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6606 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6607 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6608 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6609 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6611 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6612 if (target)
6613 return target;
6614 break;
6615
6616 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6617 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6618 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6619 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6620 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6622 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6623 if (target)
6624 return target;
6625 break;
6626
6627 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6628 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6629 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6630 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6631 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6632 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6633 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6634 if (target)
6635 return target;
6636 break;
6637
6638 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6639 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6640 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6641 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6642 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6644 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6645 if (target)
6646 return target;
6647 break;
6648
6649 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6650 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6651 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6652 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6653 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6655 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6656 if (target)
6657 return target;
6658 break;
6659
6660 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6661 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6662 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6663 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6664 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6665 if (mode == VOIDmode)
6666 mode = TYPE_MODE (boolean_type_node);
6667 if (!target || !register_operand (target, mode))
6668 target = gen_reg_rtx (mode);
6669
6670 mode = get_builtin_sync_mode
6671 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6672 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6673 if (target)
6674 return target;
6675 break;
6676
6677 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6678 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6679 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6681 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6682 mode = get_builtin_sync_mode
6683 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6684 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6685 if (target)
6686 return target;
6687 break;
6688
6689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6693 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6695 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6696 if (target)
6697 return target;
6698 break;
6699
6700 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6701 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6702 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6703 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6704 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6706 expand_builtin_sync_lock_release (mode, exp);
6707 return const0_rtx;
6708
6709 case BUILT_IN_SYNC_SYNCHRONIZE:
6710 expand_builtin_sync_synchronize ();
6711 return const0_rtx;
6712
6713 case BUILT_IN_ATOMIC_EXCHANGE_1:
6714 case BUILT_IN_ATOMIC_EXCHANGE_2:
6715 case BUILT_IN_ATOMIC_EXCHANGE_4:
6716 case BUILT_IN_ATOMIC_EXCHANGE_8:
6717 case BUILT_IN_ATOMIC_EXCHANGE_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6719 target = expand_builtin_atomic_exchange (mode, exp, target);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6729 {
6730 unsigned int nargs, z;
6731 vec<tree, va_gc> *vec;
6732
6733 mode =
6734 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6735 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6736 if (target)
6737 return target;
6738
6739 /* If this is turned into an external library call, the weak parameter
6740 must be dropped to match the expected parameter list. */
6741 nargs = call_expr_nargs (exp);
6742 vec_alloc (vec, nargs - 1);
6743 for (z = 0; z < 3; z++)
6744 vec->quick_push (CALL_EXPR_ARG (exp, z));
6745 /* Skip the boolean weak parameter. */
6746 for (z = 4; z < 6; z++)
6747 vec->quick_push (CALL_EXPR_ARG (exp, z));
6748 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6749 break;
6750 }
6751
6752 case BUILT_IN_ATOMIC_LOAD_1:
6753 case BUILT_IN_ATOMIC_LOAD_2:
6754 case BUILT_IN_ATOMIC_LOAD_4:
6755 case BUILT_IN_ATOMIC_LOAD_8:
6756 case BUILT_IN_ATOMIC_LOAD_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6758 target = expand_builtin_atomic_load (mode, exp, target);
6759 if (target)
6760 return target;
6761 break;
6762
6763 case BUILT_IN_ATOMIC_STORE_1:
6764 case BUILT_IN_ATOMIC_STORE_2:
6765 case BUILT_IN_ATOMIC_STORE_4:
6766 case BUILT_IN_ATOMIC_STORE_8:
6767 case BUILT_IN_ATOMIC_STORE_16:
6768 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6769 target = expand_builtin_atomic_store (mode, exp);
6770 if (target)
6771 return const0_rtx;
6772 break;
6773
6774 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6775 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6776 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6777 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6778 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6779 {
6780 enum built_in_function lib;
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6782 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6783 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6784 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6785 ignore, lib);
6786 if (target)
6787 return target;
6788 break;
6789 }
6790 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6791 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6792 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6793 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6794 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6795 {
6796 enum built_in_function lib;
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6798 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6799 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6800 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6801 ignore, lib);
6802 if (target)
6803 return target;
6804 break;
6805 }
6806 case BUILT_IN_ATOMIC_AND_FETCH_1:
6807 case BUILT_IN_ATOMIC_AND_FETCH_2:
6808 case BUILT_IN_ATOMIC_AND_FETCH_4:
6809 case BUILT_IN_ATOMIC_AND_FETCH_8:
6810 case BUILT_IN_ATOMIC_AND_FETCH_16:
6811 {
6812 enum built_in_function lib;
6813 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6814 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6815 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6816 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6817 ignore, lib);
6818 if (target)
6819 return target;
6820 break;
6821 }
6822 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6823 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6824 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6825 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6826 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6827 {
6828 enum built_in_function lib;
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6830 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6831 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6832 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6833 ignore, lib);
6834 if (target)
6835 return target;
6836 break;
6837 }
6838 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6839 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6840 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6841 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6842 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6843 {
6844 enum built_in_function lib;
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6846 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6847 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6848 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6849 ignore, lib);
6850 if (target)
6851 return target;
6852 break;
6853 }
6854 case BUILT_IN_ATOMIC_OR_FETCH_1:
6855 case BUILT_IN_ATOMIC_OR_FETCH_2:
6856 case BUILT_IN_ATOMIC_OR_FETCH_4:
6857 case BUILT_IN_ATOMIC_OR_FETCH_8:
6858 case BUILT_IN_ATOMIC_OR_FETCH_16:
6859 {
6860 enum built_in_function lib;
6861 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6862 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6863 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6864 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6865 ignore, lib);
6866 if (target)
6867 return target;
6868 break;
6869 }
6870 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6871 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6872 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6873 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6874 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6875 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6876 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6877 ignore, BUILT_IN_NONE);
6878 if (target)
6879 return target;
6880 break;
6881
6882 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6883 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6884 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6885 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6886 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6887 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6888 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6889 ignore, BUILT_IN_NONE);
6890 if (target)
6891 return target;
6892 break;
6893
6894 case BUILT_IN_ATOMIC_FETCH_AND_1:
6895 case BUILT_IN_ATOMIC_FETCH_AND_2:
6896 case BUILT_IN_ATOMIC_FETCH_AND_4:
6897 case BUILT_IN_ATOMIC_FETCH_AND_8:
6898 case BUILT_IN_ATOMIC_FETCH_AND_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6900 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6901 ignore, BUILT_IN_NONE);
6902 if (target)
6903 return target;
6904 break;
6905
6906 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6907 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6908 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6909 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6910 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6912 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6913 ignore, BUILT_IN_NONE);
6914 if (target)
6915 return target;
6916 break;
6917
6918 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6919 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6920 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6921 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6922 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6924 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6925 ignore, BUILT_IN_NONE);
6926 if (target)
6927 return target;
6928 break;
6929
6930 case BUILT_IN_ATOMIC_FETCH_OR_1:
6931 case BUILT_IN_ATOMIC_FETCH_OR_2:
6932 case BUILT_IN_ATOMIC_FETCH_OR_4:
6933 case BUILT_IN_ATOMIC_FETCH_OR_8:
6934 case BUILT_IN_ATOMIC_FETCH_OR_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6936 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6937 ignore, BUILT_IN_NONE);
6938 if (target)
6939 return target;
6940 break;
6941
6942 case BUILT_IN_ATOMIC_TEST_AND_SET:
6943 return expand_builtin_atomic_test_and_set (exp, target);
6944
6945 case BUILT_IN_ATOMIC_CLEAR:
6946 return expand_builtin_atomic_clear (exp);
6947
6948 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6949 return expand_builtin_atomic_always_lock_free (exp);
6950
6951 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6952 target = expand_builtin_atomic_is_lock_free (exp);
6953 if (target)
6954 return target;
6955 break;
6956
6957 case BUILT_IN_ATOMIC_THREAD_FENCE:
6958 expand_builtin_atomic_thread_fence (exp);
6959 return const0_rtx;
6960
6961 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6962 expand_builtin_atomic_signal_fence (exp);
6963 return const0_rtx;
6964
6965 case BUILT_IN_OBJECT_SIZE:
6966 return expand_builtin_object_size (exp);
6967
6968 case BUILT_IN_MEMCPY_CHK:
6969 case BUILT_IN_MEMPCPY_CHK:
6970 case BUILT_IN_MEMMOVE_CHK:
6971 case BUILT_IN_MEMSET_CHK:
6972 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6973 if (target)
6974 return target;
6975 break;
6976
6977 case BUILT_IN_STRCPY_CHK:
6978 case BUILT_IN_STPCPY_CHK:
6979 case BUILT_IN_STRNCPY_CHK:
6980 case BUILT_IN_STPNCPY_CHK:
6981 case BUILT_IN_STRCAT_CHK:
6982 case BUILT_IN_STRNCAT_CHK:
6983 case BUILT_IN_SNPRINTF_CHK:
6984 case BUILT_IN_VSNPRINTF_CHK:
6985 maybe_emit_chk_warning (exp, fcode);
6986 break;
6987
6988 case BUILT_IN_SPRINTF_CHK:
6989 case BUILT_IN_VSPRINTF_CHK:
6990 maybe_emit_sprintf_chk_warning (exp, fcode);
6991 break;
6992
6993 case BUILT_IN_FREE:
6994 if (warn_free_nonheap_object)
6995 maybe_emit_free_warning (exp);
6996 break;
6997
6998 case BUILT_IN_THREAD_POINTER:
6999 return expand_builtin_thread_pointer (exp, target);
7000
7001 case BUILT_IN_SET_THREAD_POINTER:
7002 expand_builtin_set_thread_pointer (exp);
7003 return const0_rtx;
7004
7005 case BUILT_IN_CILK_DETACH:
7006 expand_builtin_cilk_detach (exp);
7007 return const0_rtx;
7008
7009 case BUILT_IN_CILK_POP_FRAME:
7010 expand_builtin_cilk_pop_frame (exp);
7011 return const0_rtx;
7012
7013 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7014 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7015 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7016 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7017 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7018 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7019 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7020 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7021 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7022 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7023 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7024 /* We allow user CHKP builtins if Pointer Bounds
7025 Checker is off. */
7026 if (!chkp_function_instrumented_p (current_function_decl))
7027 {
7028 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7029 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7030 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7031 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7032 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7033 return expand_normal (CALL_EXPR_ARG (exp, 0));
7034 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7035 return expand_normal (size_zero_node);
7036 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7037 return expand_normal (size_int (-1));
7038 else
7039 return const0_rtx;
7040 }
7041 /* FALLTHROUGH */
7042
7043 case BUILT_IN_CHKP_BNDMK:
7044 case BUILT_IN_CHKP_BNDSTX:
7045 case BUILT_IN_CHKP_BNDCL:
7046 case BUILT_IN_CHKP_BNDCU:
7047 case BUILT_IN_CHKP_BNDLDX:
7048 case BUILT_IN_CHKP_BNDRET:
7049 case BUILT_IN_CHKP_INTERSECT:
7050 case BUILT_IN_CHKP_NARROW:
7051 case BUILT_IN_CHKP_EXTRACT_LOWER:
7052 case BUILT_IN_CHKP_EXTRACT_UPPER:
7053 /* Software implementation of Pointer Bounds Checker is NYI.
7054 Target support is required. */
7055 error ("Your target platform does not support -fcheck-pointer-bounds");
7056 break;
7057
7058 case BUILT_IN_ACC_ON_DEVICE:
7059 target = expand_builtin_acc_on_device (exp, target);
7060 if (target)
7061 return target;
7062 break;
7063
7064 default: /* just do library call, if unknown builtin */
7065 break;
7066 }
7067
7068 /* The switch statement above can drop through to cause the function
7069 to be called normally. */
7070 return expand_call (exp, target, ignore);
7071 }
7072
7073 /* Similar to expand_builtin but is used for instrumented calls. */
7074
7075 rtx
7076 expand_builtin_with_bounds (tree exp, rtx target,
7077 rtx subtarget ATTRIBUTE_UNUSED,
7078 machine_mode mode, int ignore)
7079 {
7080 tree fndecl = get_callee_fndecl (exp);
7081 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7082
7083 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7084
7085 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7086 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7087
7088 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7089 && fcode < END_CHKP_BUILTINS);
7090
7091 switch (fcode)
7092 {
7093 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7094 target = expand_builtin_memcpy_with_bounds (exp, target);
7095 if (target)
7096 return target;
7097 break;
7098
7099 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7100 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7101 if (target)
7102 return target;
7103 break;
7104
7105 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7106 target = expand_builtin_memset_with_bounds (exp, target, mode);
7107 if (target)
7108 return target;
7109 break;
7110
7111 default:
7112 break;
7113 }
7114
7115 /* The switch statement above can drop through to cause the function
7116 to be called normally. */
7117 return expand_call (exp, target, ignore);
7118 }
7119
7120 /* Determine whether a tree node represents a call to a built-in
7121 function. If the tree T is a call to a built-in function with
7122 the right number of arguments of the appropriate types, return
7123 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7124 Otherwise the return value is END_BUILTINS. */
7125
7126 enum built_in_function
7127 builtin_mathfn_code (const_tree t)
7128 {
7129 const_tree fndecl, arg, parmlist;
7130 const_tree argtype, parmtype;
7131 const_call_expr_arg_iterator iter;
7132
7133 if (TREE_CODE (t) != CALL_EXPR
7134 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7135 return END_BUILTINS;
7136
7137 fndecl = get_callee_fndecl (t);
7138 if (fndecl == NULL_TREE
7139 || TREE_CODE (fndecl) != FUNCTION_DECL
7140 || ! DECL_BUILT_IN (fndecl)
7141 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7142 return END_BUILTINS;
7143
7144 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7145 init_const_call_expr_arg_iterator (t, &iter);
7146 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7147 {
7148 /* If a function doesn't take a variable number of arguments,
7149 the last element in the list will have type `void'. */
7150 parmtype = TREE_VALUE (parmlist);
7151 if (VOID_TYPE_P (parmtype))
7152 {
7153 if (more_const_call_expr_args_p (&iter))
7154 return END_BUILTINS;
7155 return DECL_FUNCTION_CODE (fndecl);
7156 }
7157
7158 if (! more_const_call_expr_args_p (&iter))
7159 return END_BUILTINS;
7160
7161 arg = next_const_call_expr_arg (&iter);
7162 argtype = TREE_TYPE (arg);
7163
7164 if (SCALAR_FLOAT_TYPE_P (parmtype))
7165 {
7166 if (! SCALAR_FLOAT_TYPE_P (argtype))
7167 return END_BUILTINS;
7168 }
7169 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7170 {
7171 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7172 return END_BUILTINS;
7173 }
7174 else if (POINTER_TYPE_P (parmtype))
7175 {
7176 if (! POINTER_TYPE_P (argtype))
7177 return END_BUILTINS;
7178 }
7179 else if (INTEGRAL_TYPE_P (parmtype))
7180 {
7181 if (! INTEGRAL_TYPE_P (argtype))
7182 return END_BUILTINS;
7183 }
7184 else
7185 return END_BUILTINS;
7186 }
7187
7188 /* Variable-length argument list. */
7189 return DECL_FUNCTION_CODE (fndecl);
7190 }
7191
7192 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7193 evaluate to a constant. */
7194
7195 static tree
7196 fold_builtin_constant_p (tree arg)
7197 {
7198 /* We return 1 for a numeric type that's known to be a constant
7199 value at compile-time or for an aggregate type that's a
7200 literal constant. */
7201 STRIP_NOPS (arg);
7202
7203 /* If we know this is a constant, emit the constant of one. */
7204 if (CONSTANT_CLASS_P (arg)
7205 || (TREE_CODE (arg) == CONSTRUCTOR
7206 && TREE_CONSTANT (arg)))
7207 return integer_one_node;
7208 if (TREE_CODE (arg) == ADDR_EXPR)
7209 {
7210 tree op = TREE_OPERAND (arg, 0);
7211 if (TREE_CODE (op) == STRING_CST
7212 || (TREE_CODE (op) == ARRAY_REF
7213 && integer_zerop (TREE_OPERAND (op, 1))
7214 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7215 return integer_one_node;
7216 }
7217
7218 /* If this expression has side effects, show we don't know it to be a
7219 constant. Likewise if it's a pointer or aggregate type since in
7220 those case we only want literals, since those are only optimized
7221 when generating RTL, not later.
7222 And finally, if we are compiling an initializer, not code, we
7223 need to return a definite result now; there's not going to be any
7224 more optimization done. */
7225 if (TREE_SIDE_EFFECTS (arg)
7226 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7227 || POINTER_TYPE_P (TREE_TYPE (arg))
7228 || cfun == 0
7229 || folding_initializer
7230 || force_folding_builtin_constant_p)
7231 return integer_zero_node;
7232
7233 return NULL_TREE;
7234 }
7235
7236 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7237 return it as a truthvalue. */
7238
7239 static tree
7240 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7241 tree predictor)
7242 {
7243 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7244
7245 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7246 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7247 ret_type = TREE_TYPE (TREE_TYPE (fn));
7248 pred_type = TREE_VALUE (arg_types);
7249 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7250
7251 pred = fold_convert_loc (loc, pred_type, pred);
7252 expected = fold_convert_loc (loc, expected_type, expected);
7253 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7254 predictor);
7255
7256 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7257 build_int_cst (ret_type, 0));
7258 }
7259
7260 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7261 NULL_TREE if no simplification is possible. */
7262
7263 tree
7264 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7265 {
7266 tree inner, fndecl, inner_arg0;
7267 enum tree_code code;
7268
7269 /* Distribute the expected value over short-circuiting operators.
7270 See through the cast from truthvalue_type_node to long. */
7271 inner_arg0 = arg0;
7272 while (CONVERT_EXPR_P (inner_arg0)
7273 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7275 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7276
7277 /* If this is a builtin_expect within a builtin_expect keep the
7278 inner one. See through a comparison against a constant. It
7279 might have been added to create a thruthvalue. */
7280 inner = inner_arg0;
7281
7282 if (COMPARISON_CLASS_P (inner)
7283 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7284 inner = TREE_OPERAND (inner, 0);
7285
7286 if (TREE_CODE (inner) == CALL_EXPR
7287 && (fndecl = get_callee_fndecl (inner))
7288 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7289 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7290 return arg0;
7291
7292 inner = inner_arg0;
7293 code = TREE_CODE (inner);
7294 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7295 {
7296 tree op0 = TREE_OPERAND (inner, 0);
7297 tree op1 = TREE_OPERAND (inner, 1);
7298
7299 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7300 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7301 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7302
7303 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7304 }
7305
7306 /* If the argument isn't invariant then there's nothing else we can do. */
7307 if (!TREE_CONSTANT (inner_arg0))
7308 return NULL_TREE;
7309
7310 /* If we expect that a comparison against the argument will fold to
7311 a constant return the constant. In practice, this means a true
7312 constant or the address of a non-weak symbol. */
7313 inner = inner_arg0;
7314 STRIP_NOPS (inner);
7315 if (TREE_CODE (inner) == ADDR_EXPR)
7316 {
7317 do
7318 {
7319 inner = TREE_OPERAND (inner, 0);
7320 }
7321 while (TREE_CODE (inner) == COMPONENT_REF
7322 || TREE_CODE (inner) == ARRAY_REF);
7323 if ((TREE_CODE (inner) == VAR_DECL
7324 || TREE_CODE (inner) == FUNCTION_DECL)
7325 && DECL_WEAK (inner))
7326 return NULL_TREE;
7327 }
7328
7329 /* Otherwise, ARG0 already has the proper type for the return value. */
7330 return arg0;
7331 }
7332
7333 /* Fold a call to __builtin_classify_type with argument ARG. */
7334
7335 static tree
7336 fold_builtin_classify_type (tree arg)
7337 {
7338 if (arg == 0)
7339 return build_int_cst (integer_type_node, no_type_class);
7340
7341 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7342 }
7343
7344 /* Fold a call to __builtin_strlen with argument ARG. */
7345
7346 static tree
7347 fold_builtin_strlen (location_t loc, tree type, tree arg)
7348 {
7349 if (!validate_arg (arg, POINTER_TYPE))
7350 return NULL_TREE;
7351 else
7352 {
7353 tree len = c_strlen (arg, 0);
7354
7355 if (len)
7356 return fold_convert_loc (loc, type, len);
7357
7358 return NULL_TREE;
7359 }
7360 }
7361
7362 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7363
7364 static tree
7365 fold_builtin_inf (location_t loc, tree type, int warn)
7366 {
7367 REAL_VALUE_TYPE real;
7368
7369 /* __builtin_inff is intended to be usable to define INFINITY on all
7370 targets. If an infinity is not available, INFINITY expands "to a
7371 positive constant of type float that overflows at translation
7372 time", footnote "In this case, using INFINITY will violate the
7373 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7374 Thus we pedwarn to ensure this constraint violation is
7375 diagnosed. */
7376 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7377 pedwarn (loc, 0, "target format does not support infinity");
7378
7379 real_inf (&real);
7380 return build_real (type, real);
7381 }
7382
7383 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7384
7385 static tree
7386 fold_builtin_nan (tree arg, tree type, int quiet)
7387 {
7388 REAL_VALUE_TYPE real;
7389 const char *str;
7390
7391 if (!validate_arg (arg, POINTER_TYPE))
7392 return NULL_TREE;
7393 str = c_getstr (arg);
7394 if (!str)
7395 return NULL_TREE;
7396
7397 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7398 return NULL_TREE;
7399
7400 return build_real (type, real);
7401 }
7402
7403 /* Return true if the floating point expression T has an integer value.
7404 We also allow +Inf, -Inf and NaN to be considered integer values. */
7405
7406 static bool
7407 integer_valued_real_p (tree t)
7408 {
7409 switch (TREE_CODE (t))
7410 {
7411 case FLOAT_EXPR:
7412 return true;
7413
7414 case ABS_EXPR:
7415 case SAVE_EXPR:
7416 return integer_valued_real_p (TREE_OPERAND (t, 0));
7417
7418 case COMPOUND_EXPR:
7419 case MODIFY_EXPR:
7420 case BIND_EXPR:
7421 return integer_valued_real_p (TREE_OPERAND (t, 1));
7422
7423 case PLUS_EXPR:
7424 case MINUS_EXPR:
7425 case MULT_EXPR:
7426 case MIN_EXPR:
7427 case MAX_EXPR:
7428 return integer_valued_real_p (TREE_OPERAND (t, 0))
7429 && integer_valued_real_p (TREE_OPERAND (t, 1));
7430
7431 case COND_EXPR:
7432 return integer_valued_real_p (TREE_OPERAND (t, 1))
7433 && integer_valued_real_p (TREE_OPERAND (t, 2));
7434
7435 case REAL_CST:
7436 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7437
7438 CASE_CONVERT:
7439 {
7440 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7441 if (TREE_CODE (type) == INTEGER_TYPE)
7442 return true;
7443 if (TREE_CODE (type) == REAL_TYPE)
7444 return integer_valued_real_p (TREE_OPERAND (t, 0));
7445 break;
7446 }
7447
7448 case CALL_EXPR:
7449 switch (builtin_mathfn_code (t))
7450 {
7451 CASE_FLT_FN (BUILT_IN_CEIL):
7452 CASE_FLT_FN (BUILT_IN_FLOOR):
7453 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7454 CASE_FLT_FN (BUILT_IN_RINT):
7455 CASE_FLT_FN (BUILT_IN_ROUND):
7456 CASE_FLT_FN (BUILT_IN_TRUNC):
7457 return true;
7458
7459 CASE_FLT_FN (BUILT_IN_FMIN):
7460 CASE_FLT_FN (BUILT_IN_FMAX):
7461 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7462 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7463
7464 default:
7465 break;
7466 }
7467 break;
7468
7469 default:
7470 break;
7471 }
7472 return false;
7473 }
7474
7475 /* FNDECL is assumed to be a builtin where truncation can be propagated
7476 across (for instance floor((double)f) == (double)floorf (f).
7477 Do the transformation for a call with argument ARG. */
7478
7479 static tree
7480 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7481 {
7482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7483
7484 if (!validate_arg (arg, REAL_TYPE))
7485 return NULL_TREE;
7486
7487 /* Integer rounding functions are idempotent. */
7488 if (fcode == builtin_mathfn_code (arg))
7489 return arg;
7490
7491 /* If argument is already integer valued, and we don't need to worry
7492 about setting errno, there's no need to perform rounding. */
7493 if (! flag_errno_math && integer_valued_real_p (arg))
7494 return arg;
7495
7496 if (optimize)
7497 {
7498 tree arg0 = strip_float_extensions (arg);
7499 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7500 tree newtype = TREE_TYPE (arg0);
7501 tree decl;
7502
7503 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7504 && (decl = mathfn_built_in (newtype, fcode)))
7505 return fold_convert_loc (loc, ftype,
7506 build_call_expr_loc (loc, decl, 1,
7507 fold_convert_loc (loc,
7508 newtype,
7509 arg0)));
7510 }
7511 return NULL_TREE;
7512 }
7513
7514 /* FNDECL is assumed to be builtin which can narrow the FP type of
7515 the argument, for instance lround((double)f) -> lroundf (f).
7516 Do the transformation for a call with argument ARG. */
7517
7518 static tree
7519 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7520 {
7521 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7522
7523 if (!validate_arg (arg, REAL_TYPE))
7524 return NULL_TREE;
7525
7526 /* If argument is already integer valued, and we don't need to worry
7527 about setting errno, there's no need to perform rounding. */
7528 if (! flag_errno_math && integer_valued_real_p (arg))
7529 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7530 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7531
7532 if (optimize)
7533 {
7534 tree ftype = TREE_TYPE (arg);
7535 tree arg0 = strip_float_extensions (arg);
7536 tree newtype = TREE_TYPE (arg0);
7537 tree decl;
7538
7539 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7540 && (decl = mathfn_built_in (newtype, fcode)))
7541 return build_call_expr_loc (loc, decl, 1,
7542 fold_convert_loc (loc, newtype, arg0));
7543 }
7544
7545 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7546 sizeof (int) == sizeof (long). */
7547 if (TYPE_PRECISION (integer_type_node)
7548 == TYPE_PRECISION (long_integer_type_node))
7549 {
7550 tree newfn = NULL_TREE;
7551 switch (fcode)
7552 {
7553 CASE_FLT_FN (BUILT_IN_ICEIL):
7554 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7555 break;
7556
7557 CASE_FLT_FN (BUILT_IN_IFLOOR):
7558 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7559 break;
7560
7561 CASE_FLT_FN (BUILT_IN_IROUND):
7562 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7563 break;
7564
7565 CASE_FLT_FN (BUILT_IN_IRINT):
7566 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7567 break;
7568
7569 default:
7570 break;
7571 }
7572
7573 if (newfn)
7574 {
7575 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7576 return fold_convert_loc (loc,
7577 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7578 }
7579 }
7580
7581 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7582 sizeof (long long) == sizeof (long). */
7583 if (TYPE_PRECISION (long_long_integer_type_node)
7584 == TYPE_PRECISION (long_integer_type_node))
7585 {
7586 tree newfn = NULL_TREE;
7587 switch (fcode)
7588 {
7589 CASE_FLT_FN (BUILT_IN_LLCEIL):
7590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7591 break;
7592
7593 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7594 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7595 break;
7596
7597 CASE_FLT_FN (BUILT_IN_LLROUND):
7598 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7599 break;
7600
7601 CASE_FLT_FN (BUILT_IN_LLRINT):
7602 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7603 break;
7604
7605 default:
7606 break;
7607 }
7608
7609 if (newfn)
7610 {
7611 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7612 return fold_convert_loc (loc,
7613 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7614 }
7615 }
7616
7617 return NULL_TREE;
7618 }
7619
7620 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7621 return type. Return NULL_TREE if no simplification can be made. */
7622
7623 static tree
7624 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7625 {
7626 tree res;
7627
7628 if (!validate_arg (arg, COMPLEX_TYPE)
7629 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7630 return NULL_TREE;
7631
7632 /* Calculate the result when the argument is a constant. */
7633 if (TREE_CODE (arg) == COMPLEX_CST
7634 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7635 type, mpfr_hypot)))
7636 return res;
7637
7638 if (TREE_CODE (arg) == COMPLEX_EXPR)
7639 {
7640 tree real = TREE_OPERAND (arg, 0);
7641 tree imag = TREE_OPERAND (arg, 1);
7642
7643 /* If either part is zero, cabs is fabs of the other. */
7644 if (real_zerop (real))
7645 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7646 if (real_zerop (imag))
7647 return fold_build1_loc (loc, ABS_EXPR, type, real);
7648
7649 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7650 if (flag_unsafe_math_optimizations
7651 && operand_equal_p (real, imag, OEP_PURE_SAME))
7652 {
7653 const REAL_VALUE_TYPE sqrt2_trunc
7654 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7655 STRIP_NOPS (real);
7656 return fold_build2_loc (loc, MULT_EXPR, type,
7657 fold_build1_loc (loc, ABS_EXPR, type, real),
7658 build_real (type, sqrt2_trunc));
7659 }
7660 }
7661
7662 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7663 if (TREE_CODE (arg) == NEGATE_EXPR
7664 || TREE_CODE (arg) == CONJ_EXPR)
7665 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7666
7667 /* Don't do this when optimizing for size. */
7668 if (flag_unsafe_math_optimizations
7669 && optimize && optimize_function_for_speed_p (cfun))
7670 {
7671 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7672
7673 if (sqrtfn != NULL_TREE)
7674 {
7675 tree rpart, ipart, result;
7676
7677 arg = builtin_save_expr (arg);
7678
7679 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7680 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7681
7682 rpart = builtin_save_expr (rpart);
7683 ipart = builtin_save_expr (ipart);
7684
7685 result = fold_build2_loc (loc, PLUS_EXPR, type,
7686 fold_build2_loc (loc, MULT_EXPR, type,
7687 rpart, rpart),
7688 fold_build2_loc (loc, MULT_EXPR, type,
7689 ipart, ipart));
7690
7691 return build_call_expr_loc (loc, sqrtfn, 1, result);
7692 }
7693 }
7694
7695 return NULL_TREE;
7696 }
7697
7698 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7699 complex tree type of the result. If NEG is true, the imaginary
7700 zero is negative. */
7701
7702 static tree
7703 build_complex_cproj (tree type, bool neg)
7704 {
7705 REAL_VALUE_TYPE rinf, rzero = dconst0;
7706
7707 real_inf (&rinf);
7708 rzero.sign = neg;
7709 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7710 build_real (TREE_TYPE (type), rzero));
7711 }
7712
7713 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7714 return type. Return NULL_TREE if no simplification can be made. */
7715
7716 static tree
7717 fold_builtin_cproj (location_t loc, tree arg, tree type)
7718 {
7719 if (!validate_arg (arg, COMPLEX_TYPE)
7720 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7721 return NULL_TREE;
7722
7723 /* If there are no infinities, return arg. */
7724 if (! HONOR_INFINITIES (type))
7725 return non_lvalue_loc (loc, arg);
7726
7727 /* Calculate the result when the argument is a constant. */
7728 if (TREE_CODE (arg) == COMPLEX_CST)
7729 {
7730 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7731 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7732
7733 if (real_isinf (real) || real_isinf (imag))
7734 return build_complex_cproj (type, imag->sign);
7735 else
7736 return arg;
7737 }
7738 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7739 {
7740 tree real = TREE_OPERAND (arg, 0);
7741 tree imag = TREE_OPERAND (arg, 1);
7742
7743 STRIP_NOPS (real);
7744 STRIP_NOPS (imag);
7745
7746 /* If the real part is inf and the imag part is known to be
7747 nonnegative, return (inf + 0i). Remember side-effects are
7748 possible in the imag part. */
7749 if (TREE_CODE (real) == REAL_CST
7750 && real_isinf (TREE_REAL_CST_PTR (real))
7751 && tree_expr_nonnegative_p (imag))
7752 return omit_one_operand_loc (loc, type,
7753 build_complex_cproj (type, false),
7754 arg);
7755
7756 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7757 Remember side-effects are possible in the real part. */
7758 if (TREE_CODE (imag) == REAL_CST
7759 && real_isinf (TREE_REAL_CST_PTR (imag)))
7760 return
7761 omit_one_operand_loc (loc, type,
7762 build_complex_cproj (type, TREE_REAL_CST_PTR
7763 (imag)->sign), arg);
7764 }
7765
7766 return NULL_TREE;
7767 }
7768
7769 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7770 Return NULL_TREE if no simplification can be made. */
7771
7772 static tree
7773 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7774 {
7775
7776 enum built_in_function fcode;
7777 tree res;
7778
7779 if (!validate_arg (arg, REAL_TYPE))
7780 return NULL_TREE;
7781
7782 /* Calculate the result when the argument is a constant. */
7783 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7784 return res;
7785
7786 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7787 fcode = builtin_mathfn_code (arg);
7788 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7789 {
7790 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7791 arg = fold_build2_loc (loc, MULT_EXPR, type,
7792 CALL_EXPR_ARG (arg, 0),
7793 build_real (type, dconsthalf));
7794 return build_call_expr_loc (loc, expfn, 1, arg);
7795 }
7796
7797 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7798 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7799 {
7800 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7801
7802 if (powfn)
7803 {
7804 tree arg0 = CALL_EXPR_ARG (arg, 0);
7805 tree tree_root;
7806 /* The inner root was either sqrt or cbrt. */
7807 /* This was a conditional expression but it triggered a bug
7808 in Sun C 5.5. */
7809 REAL_VALUE_TYPE dconstroot;
7810 if (BUILTIN_SQRT_P (fcode))
7811 dconstroot = dconsthalf;
7812 else
7813 dconstroot = dconst_third ();
7814
7815 /* Adjust for the outer root. */
7816 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7817 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7818 tree_root = build_real (type, dconstroot);
7819 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7820 }
7821 }
7822
7823 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7824 if (flag_unsafe_math_optimizations
7825 && (fcode == BUILT_IN_POW
7826 || fcode == BUILT_IN_POWF
7827 || fcode == BUILT_IN_POWL))
7828 {
7829 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7830 tree arg0 = CALL_EXPR_ARG (arg, 0);
7831 tree arg1 = CALL_EXPR_ARG (arg, 1);
7832 tree narg1;
7833 if (!tree_expr_nonnegative_p (arg0))
7834 arg0 = build1 (ABS_EXPR, type, arg0);
7835 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7836 build_real (type, dconsthalf));
7837 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7838 }
7839
7840 return NULL_TREE;
7841 }
7842
7843 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7844 Return NULL_TREE if no simplification can be made. */
7845
7846 static tree
7847 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7848 {
7849 const enum built_in_function fcode = builtin_mathfn_code (arg);
7850 tree res;
7851
7852 if (!validate_arg (arg, REAL_TYPE))
7853 return NULL_TREE;
7854
7855 /* Calculate the result when the argument is a constant. */
7856 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7857 return res;
7858
7859 if (flag_unsafe_math_optimizations)
7860 {
7861 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7862 if (BUILTIN_EXPONENT_P (fcode))
7863 {
7864 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7865 const REAL_VALUE_TYPE third_trunc =
7866 real_value_truncate (TYPE_MODE (type), dconst_third ());
7867 arg = fold_build2_loc (loc, MULT_EXPR, type,
7868 CALL_EXPR_ARG (arg, 0),
7869 build_real (type, third_trunc));
7870 return build_call_expr_loc (loc, expfn, 1, arg);
7871 }
7872
7873 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7874 if (BUILTIN_SQRT_P (fcode))
7875 {
7876 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7877
7878 if (powfn)
7879 {
7880 tree arg0 = CALL_EXPR_ARG (arg, 0);
7881 tree tree_root;
7882 REAL_VALUE_TYPE dconstroot = dconst_third ();
7883
7884 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7885 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7886 tree_root = build_real (type, dconstroot);
7887 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7888 }
7889 }
7890
7891 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7892 if (BUILTIN_CBRT_P (fcode))
7893 {
7894 tree arg0 = CALL_EXPR_ARG (arg, 0);
7895 if (tree_expr_nonnegative_p (arg0))
7896 {
7897 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7898
7899 if (powfn)
7900 {
7901 tree tree_root;
7902 REAL_VALUE_TYPE dconstroot;
7903
7904 real_arithmetic (&dconstroot, MULT_EXPR,
7905 dconst_third_ptr (), dconst_third_ptr ());
7906 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7907 tree_root = build_real (type, dconstroot);
7908 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7909 }
7910 }
7911 }
7912
7913 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7914 if (fcode == BUILT_IN_POW
7915 || fcode == BUILT_IN_POWF
7916 || fcode == BUILT_IN_POWL)
7917 {
7918 tree arg00 = CALL_EXPR_ARG (arg, 0);
7919 tree arg01 = CALL_EXPR_ARG (arg, 1);
7920 if (tree_expr_nonnegative_p (arg00))
7921 {
7922 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7923 const REAL_VALUE_TYPE dconstroot
7924 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7925 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7926 build_real (type, dconstroot));
7927 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7928 }
7929 }
7930 }
7931 return NULL_TREE;
7932 }
7933
7934 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7935 TYPE is the type of the return value. Return NULL_TREE if no
7936 simplification can be made. */
7937
7938 static tree
7939 fold_builtin_cos (location_t loc,
7940 tree arg, tree type, tree fndecl)
7941 {
7942 tree res, narg;
7943
7944 if (!validate_arg (arg, REAL_TYPE))
7945 return NULL_TREE;
7946
7947 /* Calculate the result when the argument is a constant. */
7948 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7949 return res;
7950
7951 /* Optimize cos(-x) into cos (x). */
7952 if ((narg = fold_strip_sign_ops (arg)))
7953 return build_call_expr_loc (loc, fndecl, 1, narg);
7954
7955 return NULL_TREE;
7956 }
7957
7958 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7960
7961 static tree
7962 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7963 {
7964 if (validate_arg (arg, REAL_TYPE))
7965 {
7966 tree res, narg;
7967
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7970 return res;
7971
7972 /* Optimize cosh(-x) into cosh (x). */
7973 if ((narg = fold_strip_sign_ops (arg)))
7974 return build_call_expr_loc (loc, fndecl, 1, narg);
7975 }
7976
7977 return NULL_TREE;
7978 }
7979
7980 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7981 argument ARG. TYPE is the type of the return value. Return
7982 NULL_TREE if no simplification can be made. */
7983
7984 static tree
7985 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7986 bool hyper)
7987 {
7988 if (validate_arg (arg, COMPLEX_TYPE)
7989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7990 {
7991 tree tmp;
7992
7993 /* Calculate the result when the argument is a constant. */
7994 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7995 return tmp;
7996
7997 /* Optimize fn(-x) into fn(x). */
7998 if ((tmp = fold_strip_sign_ops (arg)))
7999 return build_call_expr_loc (loc, fndecl, 1, tmp);
8000 }
8001
8002 return NULL_TREE;
8003 }
8004
8005 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8006 Return NULL_TREE if no simplification can be made. */
8007
8008 static tree
8009 fold_builtin_tan (tree arg, tree type)
8010 {
8011 enum built_in_function fcode;
8012 tree res;
8013
8014 if (!validate_arg (arg, REAL_TYPE))
8015 return NULL_TREE;
8016
8017 /* Calculate the result when the argument is a constant. */
8018 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8019 return res;
8020
8021 /* Optimize tan(atan(x)) = x. */
8022 fcode = builtin_mathfn_code (arg);
8023 if (flag_unsafe_math_optimizations
8024 && (fcode == BUILT_IN_ATAN
8025 || fcode == BUILT_IN_ATANF
8026 || fcode == BUILT_IN_ATANL))
8027 return CALL_EXPR_ARG (arg, 0);
8028
8029 return NULL_TREE;
8030 }
8031
8032 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8033 NULL_TREE if no simplification can be made. */
8034
8035 static tree
8036 fold_builtin_sincos (location_t loc,
8037 tree arg0, tree arg1, tree arg2)
8038 {
8039 tree type;
8040 tree res, fn, call;
8041
8042 if (!validate_arg (arg0, REAL_TYPE)
8043 || !validate_arg (arg1, POINTER_TYPE)
8044 || !validate_arg (arg2, POINTER_TYPE))
8045 return NULL_TREE;
8046
8047 type = TREE_TYPE (arg0);
8048
8049 /* Calculate the result when the argument is a constant. */
8050 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8051 return res;
8052
8053 /* Canonicalize sincos to cexpi. */
8054 if (!targetm.libc_has_function (function_c99_math_complex))
8055 return NULL_TREE;
8056 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8057 if (!fn)
8058 return NULL_TREE;
8059
8060 call = build_call_expr_loc (loc, fn, 1, arg0);
8061 call = builtin_save_expr (call);
8062
8063 return build2 (COMPOUND_EXPR, void_type_node,
8064 build2 (MODIFY_EXPR, void_type_node,
8065 build_fold_indirect_ref_loc (loc, arg1),
8066 build1 (IMAGPART_EXPR, type, call)),
8067 build2 (MODIFY_EXPR, void_type_node,
8068 build_fold_indirect_ref_loc (loc, arg2),
8069 build1 (REALPART_EXPR, type, call)));
8070 }
8071
8072 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8073 NULL_TREE if no simplification can be made. */
8074
8075 static tree
8076 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8077 {
8078 tree rtype;
8079 tree realp, imagp, ifn;
8080 tree res;
8081
8082 if (!validate_arg (arg0, COMPLEX_TYPE)
8083 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8084 return NULL_TREE;
8085
8086 /* Calculate the result when the argument is a constant. */
8087 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8088 return res;
8089
8090 rtype = TREE_TYPE (TREE_TYPE (arg0));
8091
8092 /* In case we can figure out the real part of arg0 and it is constant zero
8093 fold to cexpi. */
8094 if (!targetm.libc_has_function (function_c99_math_complex))
8095 return NULL_TREE;
8096 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8097 if (!ifn)
8098 return NULL_TREE;
8099
8100 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8101 && real_zerop (realp))
8102 {
8103 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8104 return build_call_expr_loc (loc, ifn, 1, narg);
8105 }
8106
8107 /* In case we can easily decompose real and imaginary parts split cexp
8108 to exp (r) * cexpi (i). */
8109 if (flag_unsafe_math_optimizations
8110 && realp)
8111 {
8112 tree rfn, rcall, icall;
8113
8114 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8115 if (!rfn)
8116 return NULL_TREE;
8117
8118 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8119 if (!imagp)
8120 return NULL_TREE;
8121
8122 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8123 icall = builtin_save_expr (icall);
8124 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8125 rcall = builtin_save_expr (rcall);
8126 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8127 fold_build2_loc (loc, MULT_EXPR, rtype,
8128 rcall,
8129 fold_build1_loc (loc, REALPART_EXPR,
8130 rtype, icall)),
8131 fold_build2_loc (loc, MULT_EXPR, rtype,
8132 rcall,
8133 fold_build1_loc (loc, IMAGPART_EXPR,
8134 rtype, icall)));
8135 }
8136
8137 return NULL_TREE;
8138 }
8139
8140 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8141 Return NULL_TREE if no simplification can be made. */
8142
8143 static tree
8144 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8145 {
8146 if (!validate_arg (arg, REAL_TYPE))
8147 return NULL_TREE;
8148
8149 /* Optimize trunc of constant value. */
8150 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8151 {
8152 REAL_VALUE_TYPE r, x;
8153 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8154
8155 x = TREE_REAL_CST (arg);
8156 real_trunc (&r, TYPE_MODE (type), &x);
8157 return build_real (type, r);
8158 }
8159
8160 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8161 }
8162
8163 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8164 Return NULL_TREE if no simplification can be made. */
8165
8166 static tree
8167 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8168 {
8169 if (!validate_arg (arg, REAL_TYPE))
8170 return NULL_TREE;
8171
8172 /* Optimize floor of constant value. */
8173 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8174 {
8175 REAL_VALUE_TYPE x;
8176
8177 x = TREE_REAL_CST (arg);
8178 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8179 {
8180 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8181 REAL_VALUE_TYPE r;
8182
8183 real_floor (&r, TYPE_MODE (type), &x);
8184 return build_real (type, r);
8185 }
8186 }
8187
8188 /* Fold floor (x) where x is nonnegative to trunc (x). */
8189 if (tree_expr_nonnegative_p (arg))
8190 {
8191 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8192 if (truncfn)
8193 return build_call_expr_loc (loc, truncfn, 1, arg);
8194 }
8195
8196 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8197 }
8198
8199 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8200 Return NULL_TREE if no simplification can be made. */
8201
8202 static tree
8203 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8204 {
8205 if (!validate_arg (arg, REAL_TYPE))
8206 return NULL_TREE;
8207
8208 /* Optimize ceil of constant value. */
8209 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8210 {
8211 REAL_VALUE_TYPE x;
8212
8213 x = TREE_REAL_CST (arg);
8214 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8215 {
8216 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8217 REAL_VALUE_TYPE r;
8218
8219 real_ceil (&r, TYPE_MODE (type), &x);
8220 return build_real (type, r);
8221 }
8222 }
8223
8224 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8225 }
8226
8227 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8228 Return NULL_TREE if no simplification can be made. */
8229
8230 static tree
8231 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8232 {
8233 if (!validate_arg (arg, REAL_TYPE))
8234 return NULL_TREE;
8235
8236 /* Optimize round of constant value. */
8237 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8238 {
8239 REAL_VALUE_TYPE x;
8240
8241 x = TREE_REAL_CST (arg);
8242 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8243 {
8244 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8245 REAL_VALUE_TYPE r;
8246
8247 real_round (&r, TYPE_MODE (type), &x);
8248 return build_real (type, r);
8249 }
8250 }
8251
8252 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8253 }
8254
8255 /* Fold function call to builtin lround, lroundf or lroundl (or the
8256 corresponding long long versions) and other rounding functions. ARG
8257 is the argument to the call. Return NULL_TREE if no simplification
8258 can be made. */
8259
8260 static tree
8261 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8262 {
8263 if (!validate_arg (arg, REAL_TYPE))
8264 return NULL_TREE;
8265
8266 /* Optimize lround of constant value. */
8267 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8268 {
8269 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8270
8271 if (real_isfinite (&x))
8272 {
8273 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8274 tree ftype = TREE_TYPE (arg);
8275 REAL_VALUE_TYPE r;
8276 bool fail = false;
8277
8278 switch (DECL_FUNCTION_CODE (fndecl))
8279 {
8280 CASE_FLT_FN (BUILT_IN_IFLOOR):
8281 CASE_FLT_FN (BUILT_IN_LFLOOR):
8282 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8283 real_floor (&r, TYPE_MODE (ftype), &x);
8284 break;
8285
8286 CASE_FLT_FN (BUILT_IN_ICEIL):
8287 CASE_FLT_FN (BUILT_IN_LCEIL):
8288 CASE_FLT_FN (BUILT_IN_LLCEIL):
8289 real_ceil (&r, TYPE_MODE (ftype), &x);
8290 break;
8291
8292 CASE_FLT_FN (BUILT_IN_IROUND):
8293 CASE_FLT_FN (BUILT_IN_LROUND):
8294 CASE_FLT_FN (BUILT_IN_LLROUND):
8295 real_round (&r, TYPE_MODE (ftype), &x);
8296 break;
8297
8298 default:
8299 gcc_unreachable ();
8300 }
8301
8302 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8303 if (!fail)
8304 return wide_int_to_tree (itype, val);
8305 }
8306 }
8307
8308 switch (DECL_FUNCTION_CODE (fndecl))
8309 {
8310 CASE_FLT_FN (BUILT_IN_LFLOOR):
8311 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8312 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8313 if (tree_expr_nonnegative_p (arg))
8314 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8315 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8316 break;
8317 default:;
8318 }
8319
8320 return fold_fixed_mathfn (loc, fndecl, arg);
8321 }
8322
8323 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8324 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8325 the argument to the call. Return NULL_TREE if no simplification can
8326 be made. */
8327
8328 static tree
8329 fold_builtin_bitop (tree fndecl, tree arg)
8330 {
8331 if (!validate_arg (arg, INTEGER_TYPE))
8332 return NULL_TREE;
8333
8334 /* Optimize for constant argument. */
8335 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8336 {
8337 tree type = TREE_TYPE (arg);
8338 int result;
8339
8340 switch (DECL_FUNCTION_CODE (fndecl))
8341 {
8342 CASE_INT_FN (BUILT_IN_FFS):
8343 result = wi::ffs (arg);
8344 break;
8345
8346 CASE_INT_FN (BUILT_IN_CLZ):
8347 if (wi::ne_p (arg, 0))
8348 result = wi::clz (arg);
8349 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8350 result = TYPE_PRECISION (type);
8351 break;
8352
8353 CASE_INT_FN (BUILT_IN_CTZ):
8354 if (wi::ne_p (arg, 0))
8355 result = wi::ctz (arg);
8356 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8357 result = TYPE_PRECISION (type);
8358 break;
8359
8360 CASE_INT_FN (BUILT_IN_CLRSB):
8361 result = wi::clrsb (arg);
8362 break;
8363
8364 CASE_INT_FN (BUILT_IN_POPCOUNT):
8365 result = wi::popcount (arg);
8366 break;
8367
8368 CASE_INT_FN (BUILT_IN_PARITY):
8369 result = wi::parity (arg);
8370 break;
8371
8372 default:
8373 gcc_unreachable ();
8374 }
8375
8376 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8377 }
8378
8379 return NULL_TREE;
8380 }
8381
8382 /* Fold function call to builtin_bswap and the short, long and long long
8383 variants. Return NULL_TREE if no simplification can be made. */
8384 static tree
8385 fold_builtin_bswap (tree fndecl, tree arg)
8386 {
8387 if (! validate_arg (arg, INTEGER_TYPE))
8388 return NULL_TREE;
8389
8390 /* Optimize constant value. */
8391 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8392 {
8393 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8394
8395 switch (DECL_FUNCTION_CODE (fndecl))
8396 {
8397 case BUILT_IN_BSWAP16:
8398 case BUILT_IN_BSWAP32:
8399 case BUILT_IN_BSWAP64:
8400 {
8401 signop sgn = TYPE_SIGN (type);
8402 tree result =
8403 wide_int_to_tree (type,
8404 wide_int::from (arg, TYPE_PRECISION (type),
8405 sgn).bswap ());
8406 return result;
8407 }
8408 default:
8409 gcc_unreachable ();
8410 }
8411 }
8412
8413 return NULL_TREE;
8414 }
8415
8416 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8417 NULL_TREE if no simplification can be made. */
8418
8419 static tree
8420 fold_builtin_hypot (location_t loc, tree fndecl,
8421 tree arg0, tree arg1, tree type)
8422 {
8423 tree res, narg0, narg1;
8424
8425 if (!validate_arg (arg0, REAL_TYPE)
8426 || !validate_arg (arg1, REAL_TYPE))
8427 return NULL_TREE;
8428
8429 /* Calculate the result when the argument is a constant. */
8430 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8431 return res;
8432
8433 /* If either argument to hypot has a negate or abs, strip that off.
8434 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8435 narg0 = fold_strip_sign_ops (arg0);
8436 narg1 = fold_strip_sign_ops (arg1);
8437 if (narg0 || narg1)
8438 {
8439 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8440 narg1 ? narg1 : arg1);
8441 }
8442
8443 /* If either argument is zero, hypot is fabs of the other. */
8444 if (real_zerop (arg0))
8445 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8446 else if (real_zerop (arg1))
8447 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8448
8449 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8450 if (flag_unsafe_math_optimizations
8451 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8452 {
8453 const REAL_VALUE_TYPE sqrt2_trunc
8454 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8455 return fold_build2_loc (loc, MULT_EXPR, type,
8456 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8457 build_real (type, sqrt2_trunc));
8458 }
8459
8460 return NULL_TREE;
8461 }
8462
8463
8464 /* Fold a builtin function call to pow, powf, or powl. Return
8465 NULL_TREE if no simplification can be made. */
8466 static tree
8467 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8468 {
8469 tree res;
8470
8471 if (!validate_arg (arg0, REAL_TYPE)
8472 || !validate_arg (arg1, REAL_TYPE))
8473 return NULL_TREE;
8474
8475 /* Calculate the result when the argument is a constant. */
8476 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8477 return res;
8478
8479 /* Optimize pow(1.0,y) = 1.0. */
8480 if (real_onep (arg0))
8481 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8482
8483 if (TREE_CODE (arg1) == REAL_CST
8484 && !TREE_OVERFLOW (arg1))
8485 {
8486 REAL_VALUE_TYPE cint;
8487 REAL_VALUE_TYPE c;
8488 HOST_WIDE_INT n;
8489
8490 c = TREE_REAL_CST (arg1);
8491
8492 /* Optimize pow(x,0.0) = 1.0. */
8493 if (REAL_VALUES_EQUAL (c, dconst0))
8494 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8495 arg0);
8496
8497 /* Optimize pow(x,1.0) = x. */
8498 if (REAL_VALUES_EQUAL (c, dconst1))
8499 return arg0;
8500
8501 /* Optimize pow(x,-1.0) = 1.0/x. */
8502 if (REAL_VALUES_EQUAL (c, dconstm1))
8503 return fold_build2_loc (loc, RDIV_EXPR, type,
8504 build_real (type, dconst1), arg0);
8505
8506 /* Optimize pow(x,0.5) = sqrt(x). */
8507 if (flag_unsafe_math_optimizations
8508 && REAL_VALUES_EQUAL (c, dconsthalf))
8509 {
8510 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8511
8512 if (sqrtfn != NULL_TREE)
8513 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8514 }
8515
8516 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8517 if (flag_unsafe_math_optimizations)
8518 {
8519 const REAL_VALUE_TYPE dconstroot
8520 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8521
8522 if (REAL_VALUES_EQUAL (c, dconstroot))
8523 {
8524 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8525 if (cbrtfn != NULL_TREE)
8526 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8527 }
8528 }
8529
8530 /* Check for an integer exponent. */
8531 n = real_to_integer (&c);
8532 real_from_integer (&cint, VOIDmode, n, SIGNED);
8533 if (real_identical (&c, &cint))
8534 {
8535 /* Attempt to evaluate pow at compile-time, unless this should
8536 raise an exception. */
8537 if (TREE_CODE (arg0) == REAL_CST
8538 && !TREE_OVERFLOW (arg0)
8539 && (n > 0
8540 || (!flag_trapping_math && !flag_errno_math)
8541 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8542 {
8543 REAL_VALUE_TYPE x;
8544 bool inexact;
8545
8546 x = TREE_REAL_CST (arg0);
8547 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8548 if (flag_unsafe_math_optimizations || !inexact)
8549 return build_real (type, x);
8550 }
8551
8552 /* Strip sign ops from even integer powers. */
8553 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8554 {
8555 tree narg0 = fold_strip_sign_ops (arg0);
8556 if (narg0)
8557 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8558 }
8559 }
8560 }
8561
8562 if (flag_unsafe_math_optimizations)
8563 {
8564 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8565
8566 /* Optimize pow(expN(x),y) = expN(x*y). */
8567 if (BUILTIN_EXPONENT_P (fcode))
8568 {
8569 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8570 tree arg = CALL_EXPR_ARG (arg0, 0);
8571 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8572 return build_call_expr_loc (loc, expfn, 1, arg);
8573 }
8574
8575 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8576 if (BUILTIN_SQRT_P (fcode))
8577 {
8578 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8579 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8580 build_real (type, dconsthalf));
8581 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8582 }
8583
8584 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8585 if (BUILTIN_CBRT_P (fcode))
8586 {
8587 tree arg = CALL_EXPR_ARG (arg0, 0);
8588 if (tree_expr_nonnegative_p (arg))
8589 {
8590 const REAL_VALUE_TYPE dconstroot
8591 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8592 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8593 build_real (type, dconstroot));
8594 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8595 }
8596 }
8597
8598 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8599 if (fcode == BUILT_IN_POW
8600 || fcode == BUILT_IN_POWF
8601 || fcode == BUILT_IN_POWL)
8602 {
8603 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8604 if (tree_expr_nonnegative_p (arg00))
8605 {
8606 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8607 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8608 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8609 }
8610 }
8611 }
8612
8613 return NULL_TREE;
8614 }
8615
8616 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8617 Return NULL_TREE if no simplification can be made. */
8618 static tree
8619 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8620 tree arg0, tree arg1, tree type)
8621 {
8622 if (!validate_arg (arg0, REAL_TYPE)
8623 || !validate_arg (arg1, INTEGER_TYPE))
8624 return NULL_TREE;
8625
8626 /* Optimize pow(1.0,y) = 1.0. */
8627 if (real_onep (arg0))
8628 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8629
8630 if (tree_fits_shwi_p (arg1))
8631 {
8632 HOST_WIDE_INT c = tree_to_shwi (arg1);
8633
8634 /* Evaluate powi at compile-time. */
8635 if (TREE_CODE (arg0) == REAL_CST
8636 && !TREE_OVERFLOW (arg0))
8637 {
8638 REAL_VALUE_TYPE x;
8639 x = TREE_REAL_CST (arg0);
8640 real_powi (&x, TYPE_MODE (type), &x, c);
8641 return build_real (type, x);
8642 }
8643
8644 /* Optimize pow(x,0) = 1.0. */
8645 if (c == 0)
8646 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8647 arg0);
8648
8649 /* Optimize pow(x,1) = x. */
8650 if (c == 1)
8651 return arg0;
8652
8653 /* Optimize pow(x,-1) = 1.0/x. */
8654 if (c == -1)
8655 return fold_build2_loc (loc, RDIV_EXPR, type,
8656 build_real (type, dconst1), arg0);
8657 }
8658
8659 return NULL_TREE;
8660 }
8661
8662 /* A subroutine of fold_builtin to fold the various exponent
8663 functions. Return NULL_TREE if no simplification can be made.
8664 FUNC is the corresponding MPFR exponent function. */
8665
8666 static tree
8667 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8668 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8669 {
8670 if (validate_arg (arg, REAL_TYPE))
8671 {
8672 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8673 tree res;
8674
8675 /* Calculate the result when the argument is a constant. */
8676 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8677 return res;
8678
8679 /* Optimize expN(logN(x)) = x. */
8680 if (flag_unsafe_math_optimizations)
8681 {
8682 const enum built_in_function fcode = builtin_mathfn_code (arg);
8683
8684 if ((func == mpfr_exp
8685 && (fcode == BUILT_IN_LOG
8686 || fcode == BUILT_IN_LOGF
8687 || fcode == BUILT_IN_LOGL))
8688 || (func == mpfr_exp2
8689 && (fcode == BUILT_IN_LOG2
8690 || fcode == BUILT_IN_LOG2F
8691 || fcode == BUILT_IN_LOG2L))
8692 || (func == mpfr_exp10
8693 && (fcode == BUILT_IN_LOG10
8694 || fcode == BUILT_IN_LOG10F
8695 || fcode == BUILT_IN_LOG10L)))
8696 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8697 }
8698 }
8699
8700 return NULL_TREE;
8701 }
8702
8703 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8704 arguments to the call, and TYPE is its return type.
8705 Return NULL_TREE if no simplification can be made. */
8706
8707 static tree
8708 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8709 {
8710 if (!validate_arg (arg1, POINTER_TYPE)
8711 || !validate_arg (arg2, INTEGER_TYPE)
8712 || !validate_arg (len, INTEGER_TYPE))
8713 return NULL_TREE;
8714 else
8715 {
8716 const char *p1;
8717
8718 if (TREE_CODE (arg2) != INTEGER_CST
8719 || !tree_fits_uhwi_p (len))
8720 return NULL_TREE;
8721
8722 p1 = c_getstr (arg1);
8723 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8724 {
8725 char c;
8726 const char *r;
8727 tree tem;
8728
8729 if (target_char_cast (arg2, &c))
8730 return NULL_TREE;
8731
8732 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8733
8734 if (r == NULL)
8735 return build_int_cst (TREE_TYPE (arg1), 0);
8736
8737 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8738 return fold_convert_loc (loc, type, tem);
8739 }
8740 return NULL_TREE;
8741 }
8742 }
8743
8744 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8745 Return NULL_TREE if no simplification can be made. */
8746
8747 static tree
8748 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8749 {
8750 const char *p1, *p2;
8751
8752 if (!validate_arg (arg1, POINTER_TYPE)
8753 || !validate_arg (arg2, POINTER_TYPE)
8754 || !validate_arg (len, INTEGER_TYPE))
8755 return NULL_TREE;
8756
8757 /* If the LEN parameter is zero, return zero. */
8758 if (integer_zerop (len))
8759 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8760 arg1, arg2);
8761
8762 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8763 if (operand_equal_p (arg1, arg2, 0))
8764 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8765
8766 p1 = c_getstr (arg1);
8767 p2 = c_getstr (arg2);
8768
8769 /* If all arguments are constant, and the value of len is not greater
8770 than the lengths of arg1 and arg2, evaluate at compile-time. */
8771 if (tree_fits_uhwi_p (len) && p1 && p2
8772 && compare_tree_int (len, strlen (p1) + 1) <= 0
8773 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8774 {
8775 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8776
8777 if (r > 0)
8778 return integer_one_node;
8779 else if (r < 0)
8780 return integer_minus_one_node;
8781 else
8782 return integer_zero_node;
8783 }
8784
8785 /* If len parameter is one, return an expression corresponding to
8786 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8787 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8788 {
8789 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8790 tree cst_uchar_ptr_node
8791 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8792
8793 tree ind1
8794 = fold_convert_loc (loc, integer_type_node,
8795 build1 (INDIRECT_REF, cst_uchar_node,
8796 fold_convert_loc (loc,
8797 cst_uchar_ptr_node,
8798 arg1)));
8799 tree ind2
8800 = fold_convert_loc (loc, integer_type_node,
8801 build1 (INDIRECT_REF, cst_uchar_node,
8802 fold_convert_loc (loc,
8803 cst_uchar_ptr_node,
8804 arg2)));
8805 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8806 }
8807
8808 return NULL_TREE;
8809 }
8810
8811 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8812 Return NULL_TREE if no simplification can be made. */
8813
8814 static tree
8815 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8816 {
8817 const char *p1, *p2;
8818
8819 if (!validate_arg (arg1, POINTER_TYPE)
8820 || !validate_arg (arg2, POINTER_TYPE))
8821 return NULL_TREE;
8822
8823 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8824 if (operand_equal_p (arg1, arg2, 0))
8825 return integer_zero_node;
8826
8827 p1 = c_getstr (arg1);
8828 p2 = c_getstr (arg2);
8829
8830 if (p1 && p2)
8831 {
8832 const int i = strcmp (p1, p2);
8833 if (i < 0)
8834 return integer_minus_one_node;
8835 else if (i > 0)
8836 return integer_one_node;
8837 else
8838 return integer_zero_node;
8839 }
8840
8841 /* If the second arg is "", return *(const unsigned char*)arg1. */
8842 if (p2 && *p2 == '\0')
8843 {
8844 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8845 tree cst_uchar_ptr_node
8846 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8847
8848 return fold_convert_loc (loc, integer_type_node,
8849 build1 (INDIRECT_REF, cst_uchar_node,
8850 fold_convert_loc (loc,
8851 cst_uchar_ptr_node,
8852 arg1)));
8853 }
8854
8855 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8856 if (p1 && *p1 == '\0')
8857 {
8858 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8859 tree cst_uchar_ptr_node
8860 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8861
8862 tree temp
8863 = fold_convert_loc (loc, integer_type_node,
8864 build1 (INDIRECT_REF, cst_uchar_node,
8865 fold_convert_loc (loc,
8866 cst_uchar_ptr_node,
8867 arg2)));
8868 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8869 }
8870
8871 return NULL_TREE;
8872 }
8873
8874 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8875 Return NULL_TREE if no simplification can be made. */
8876
8877 static tree
8878 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8879 {
8880 const char *p1, *p2;
8881
8882 if (!validate_arg (arg1, POINTER_TYPE)
8883 || !validate_arg (arg2, POINTER_TYPE)
8884 || !validate_arg (len, INTEGER_TYPE))
8885 return NULL_TREE;
8886
8887 /* If the LEN parameter is zero, return zero. */
8888 if (integer_zerop (len))
8889 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8890 arg1, arg2);
8891
8892 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8893 if (operand_equal_p (arg1, arg2, 0))
8894 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8895
8896 p1 = c_getstr (arg1);
8897 p2 = c_getstr (arg2);
8898
8899 if (tree_fits_uhwi_p (len) && p1 && p2)
8900 {
8901 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8902 if (i > 0)
8903 return integer_one_node;
8904 else if (i < 0)
8905 return integer_minus_one_node;
8906 else
8907 return integer_zero_node;
8908 }
8909
8910 /* If the second arg is "", and the length is greater than zero,
8911 return *(const unsigned char*)arg1. */
8912 if (p2 && *p2 == '\0'
8913 && TREE_CODE (len) == INTEGER_CST
8914 && tree_int_cst_sgn (len) == 1)
8915 {
8916 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8917 tree cst_uchar_ptr_node
8918 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8919
8920 return fold_convert_loc (loc, integer_type_node,
8921 build1 (INDIRECT_REF, cst_uchar_node,
8922 fold_convert_loc (loc,
8923 cst_uchar_ptr_node,
8924 arg1)));
8925 }
8926
8927 /* If the first arg is "", and the length is greater than zero,
8928 return -*(const unsigned char*)arg2. */
8929 if (p1 && *p1 == '\0'
8930 && TREE_CODE (len) == INTEGER_CST
8931 && tree_int_cst_sgn (len) == 1)
8932 {
8933 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8934 tree cst_uchar_ptr_node
8935 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8936
8937 tree temp = fold_convert_loc (loc, integer_type_node,
8938 build1 (INDIRECT_REF, cst_uchar_node,
8939 fold_convert_loc (loc,
8940 cst_uchar_ptr_node,
8941 arg2)));
8942 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8943 }
8944
8945 /* If len parameter is one, return an expression corresponding to
8946 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8947 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8948 {
8949 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8950 tree cst_uchar_ptr_node
8951 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8952
8953 tree ind1 = fold_convert_loc (loc, integer_type_node,
8954 build1 (INDIRECT_REF, cst_uchar_node,
8955 fold_convert_loc (loc,
8956 cst_uchar_ptr_node,
8957 arg1)));
8958 tree ind2 = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8961 cst_uchar_ptr_node,
8962 arg2)));
8963 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8964 }
8965
8966 return NULL_TREE;
8967 }
8968
8969 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8970 ARG. Return NULL_TREE if no simplification can be made. */
8971
8972 static tree
8973 fold_builtin_signbit (location_t loc, tree arg, tree type)
8974 {
8975 if (!validate_arg (arg, REAL_TYPE))
8976 return NULL_TREE;
8977
8978 /* If ARG is a compile-time constant, determine the result. */
8979 if (TREE_CODE (arg) == REAL_CST
8980 && !TREE_OVERFLOW (arg))
8981 {
8982 REAL_VALUE_TYPE c;
8983
8984 c = TREE_REAL_CST (arg);
8985 return (REAL_VALUE_NEGATIVE (c)
8986 ? build_one_cst (type)
8987 : build_zero_cst (type));
8988 }
8989
8990 /* If ARG is non-negative, the result is always zero. */
8991 if (tree_expr_nonnegative_p (arg))
8992 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8993
8994 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8995 if (!HONOR_SIGNED_ZEROS (arg))
8996 return fold_convert (type,
8997 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8998 build_real (TREE_TYPE (arg), dconst0)));
8999
9000 return NULL_TREE;
9001 }
9002
9003 /* Fold function call to builtin copysign, copysignf or copysignl with
9004 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9005 be made. */
9006
9007 static tree
9008 fold_builtin_copysign (location_t loc, tree fndecl,
9009 tree arg1, tree arg2, tree type)
9010 {
9011 tree tem;
9012
9013 if (!validate_arg (arg1, REAL_TYPE)
9014 || !validate_arg (arg2, REAL_TYPE))
9015 return NULL_TREE;
9016
9017 /* copysign(X,X) is X. */
9018 if (operand_equal_p (arg1, arg2, 0))
9019 return fold_convert_loc (loc, type, arg1);
9020
9021 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9022 if (TREE_CODE (arg1) == REAL_CST
9023 && TREE_CODE (arg2) == REAL_CST
9024 && !TREE_OVERFLOW (arg1)
9025 && !TREE_OVERFLOW (arg2))
9026 {
9027 REAL_VALUE_TYPE c1, c2;
9028
9029 c1 = TREE_REAL_CST (arg1);
9030 c2 = TREE_REAL_CST (arg2);
9031 /* c1.sign := c2.sign. */
9032 real_copysign (&c1, &c2);
9033 return build_real (type, c1);
9034 }
9035
9036 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9037 Remember to evaluate Y for side-effects. */
9038 if (tree_expr_nonnegative_p (arg2))
9039 return omit_one_operand_loc (loc, type,
9040 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9041 arg2);
9042
9043 /* Strip sign changing operations for the first argument. */
9044 tem = fold_strip_sign_ops (arg1);
9045 if (tem)
9046 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9047
9048 return NULL_TREE;
9049 }
9050
9051 /* Fold a call to builtin isascii with argument ARG. */
9052
9053 static tree
9054 fold_builtin_isascii (location_t loc, tree arg)
9055 {
9056 if (!validate_arg (arg, INTEGER_TYPE))
9057 return NULL_TREE;
9058 else
9059 {
9060 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9061 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9062 build_int_cst (integer_type_node,
9063 ~ (unsigned HOST_WIDE_INT) 0x7f));
9064 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9065 arg, integer_zero_node);
9066 }
9067 }
9068
9069 /* Fold a call to builtin toascii with argument ARG. */
9070
9071 static tree
9072 fold_builtin_toascii (location_t loc, tree arg)
9073 {
9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
9076
9077 /* Transform toascii(c) -> (c & 0x7f). */
9078 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9079 build_int_cst (integer_type_node, 0x7f));
9080 }
9081
9082 /* Fold a call to builtin isdigit with argument ARG. */
9083
9084 static tree
9085 fold_builtin_isdigit (location_t loc, tree arg)
9086 {
9087 if (!validate_arg (arg, INTEGER_TYPE))
9088 return NULL_TREE;
9089 else
9090 {
9091 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9092 /* According to the C standard, isdigit is unaffected by locale.
9093 However, it definitely is affected by the target character set. */
9094 unsigned HOST_WIDE_INT target_digit0
9095 = lang_hooks.to_target_charset ('0');
9096
9097 if (target_digit0 == 0)
9098 return NULL_TREE;
9099
9100 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9101 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9102 build_int_cst (unsigned_type_node, target_digit0));
9103 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9104 build_int_cst (unsigned_type_node, 9));
9105 }
9106 }
9107
9108 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9109
9110 static tree
9111 fold_builtin_fabs (location_t loc, tree arg, tree type)
9112 {
9113 if (!validate_arg (arg, REAL_TYPE))
9114 return NULL_TREE;
9115
9116 arg = fold_convert_loc (loc, type, arg);
9117 if (TREE_CODE (arg) == REAL_CST)
9118 return fold_abs_const (arg, type);
9119 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9120 }
9121
9122 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9123
9124 static tree
9125 fold_builtin_abs (location_t loc, tree arg, tree type)
9126 {
9127 if (!validate_arg (arg, INTEGER_TYPE))
9128 return NULL_TREE;
9129
9130 arg = fold_convert_loc (loc, type, arg);
9131 if (TREE_CODE (arg) == INTEGER_CST)
9132 return fold_abs_const (arg, type);
9133 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9134 }
9135
9136 /* Fold a fma operation with arguments ARG[012]. */
9137
9138 tree
9139 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9140 tree type, tree arg0, tree arg1, tree arg2)
9141 {
9142 if (TREE_CODE (arg0) == REAL_CST
9143 && TREE_CODE (arg1) == REAL_CST
9144 && TREE_CODE (arg2) == REAL_CST)
9145 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9146
9147 return NULL_TREE;
9148 }
9149
9150 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9151
9152 static tree
9153 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9154 {
9155 if (validate_arg (arg0, REAL_TYPE)
9156 && validate_arg (arg1, REAL_TYPE)
9157 && validate_arg (arg2, REAL_TYPE))
9158 {
9159 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9160 if (tem)
9161 return tem;
9162
9163 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9164 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9165 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9166 }
9167 return NULL_TREE;
9168 }
9169
9170 /* Fold a call to builtin fmin or fmax. */
9171
9172 static tree
9173 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9174 tree type, bool max)
9175 {
9176 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9177 {
9178 /* Calculate the result when the argument is a constant. */
9179 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9180
9181 if (res)
9182 return res;
9183
9184 /* If either argument is NaN, return the other one. Avoid the
9185 transformation if we get (and honor) a signalling NaN. Using
9186 omit_one_operand() ensures we create a non-lvalue. */
9187 if (TREE_CODE (arg0) == REAL_CST
9188 && real_isnan (&TREE_REAL_CST (arg0))
9189 && (! HONOR_SNANS (arg0)
9190 || ! TREE_REAL_CST (arg0).signalling))
9191 return omit_one_operand_loc (loc, type, arg1, arg0);
9192 if (TREE_CODE (arg1) == REAL_CST
9193 && real_isnan (&TREE_REAL_CST (arg1))
9194 && (! HONOR_SNANS (arg1)
9195 || ! TREE_REAL_CST (arg1).signalling))
9196 return omit_one_operand_loc (loc, type, arg0, arg1);
9197
9198 /* Transform fmin/fmax(x,x) -> x. */
9199 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9200 return omit_one_operand_loc (loc, type, arg0, arg1);
9201
9202 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9203 functions to return the numeric arg if the other one is NaN.
9204 These tree codes don't honor that, so only transform if
9205 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9206 handled, so we don't have to worry about it either. */
9207 if (flag_finite_math_only)
9208 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9209 fold_convert_loc (loc, type, arg0),
9210 fold_convert_loc (loc, type, arg1));
9211 }
9212 return NULL_TREE;
9213 }
9214
9215 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9216
9217 static tree
9218 fold_builtin_carg (location_t loc, tree arg, tree type)
9219 {
9220 if (validate_arg (arg, COMPLEX_TYPE)
9221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9222 {
9223 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9224
9225 if (atan2_fn)
9226 {
9227 tree new_arg = builtin_save_expr (arg);
9228 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9229 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9230 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9231 }
9232 }
9233
9234 return NULL_TREE;
9235 }
9236
9237 /* Fold a call to builtin logb/ilogb. */
9238
9239 static tree
9240 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9241 {
9242 if (! validate_arg (arg, REAL_TYPE))
9243 return NULL_TREE;
9244
9245 STRIP_NOPS (arg);
9246
9247 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9248 {
9249 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9250
9251 switch (value->cl)
9252 {
9253 case rvc_nan:
9254 case rvc_inf:
9255 /* If arg is Inf or NaN and we're logb, return it. */
9256 if (TREE_CODE (rettype) == REAL_TYPE)
9257 {
9258 /* For logb(-Inf) we have to return +Inf. */
9259 if (real_isinf (value) && real_isneg (value))
9260 {
9261 REAL_VALUE_TYPE tem;
9262 real_inf (&tem);
9263 return build_real (rettype, tem);
9264 }
9265 return fold_convert_loc (loc, rettype, arg);
9266 }
9267 /* Fall through... */
9268 case rvc_zero:
9269 /* Zero may set errno and/or raise an exception for logb, also
9270 for ilogb we don't know FP_ILOGB0. */
9271 return NULL_TREE;
9272 case rvc_normal:
9273 /* For normal numbers, proceed iff radix == 2. In GCC,
9274 normalized significands are in the range [0.5, 1.0). We
9275 want the exponent as if they were [1.0, 2.0) so get the
9276 exponent and subtract 1. */
9277 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9278 return fold_convert_loc (loc, rettype,
9279 build_int_cst (integer_type_node,
9280 REAL_EXP (value)-1));
9281 break;
9282 }
9283 }
9284
9285 return NULL_TREE;
9286 }
9287
9288 /* Fold a call to builtin significand, if radix == 2. */
9289
9290 static tree
9291 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9292 {
9293 if (! validate_arg (arg, REAL_TYPE))
9294 return NULL_TREE;
9295
9296 STRIP_NOPS (arg);
9297
9298 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9299 {
9300 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9301
9302 switch (value->cl)
9303 {
9304 case rvc_zero:
9305 case rvc_nan:
9306 case rvc_inf:
9307 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9308 return fold_convert_loc (loc, rettype, arg);
9309 case rvc_normal:
9310 /* For normal numbers, proceed iff radix == 2. */
9311 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9312 {
9313 REAL_VALUE_TYPE result = *value;
9314 /* In GCC, normalized significands are in the range [0.5,
9315 1.0). We want them to be [1.0, 2.0) so set the
9316 exponent to 1. */
9317 SET_REAL_EXP (&result, 1);
9318 return build_real (rettype, result);
9319 }
9320 break;
9321 }
9322 }
9323
9324 return NULL_TREE;
9325 }
9326
9327 /* Fold a call to builtin frexp, we can assume the base is 2. */
9328
9329 static tree
9330 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9331 {
9332 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9333 return NULL_TREE;
9334
9335 STRIP_NOPS (arg0);
9336
9337 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9338 return NULL_TREE;
9339
9340 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9341
9342 /* Proceed if a valid pointer type was passed in. */
9343 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9344 {
9345 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9346 tree frac, exp;
9347
9348 switch (value->cl)
9349 {
9350 case rvc_zero:
9351 /* For +-0, return (*exp = 0, +-0). */
9352 exp = integer_zero_node;
9353 frac = arg0;
9354 break;
9355 case rvc_nan:
9356 case rvc_inf:
9357 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9358 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9359 case rvc_normal:
9360 {
9361 /* Since the frexp function always expects base 2, and in
9362 GCC normalized significands are already in the range
9363 [0.5, 1.0), we have exactly what frexp wants. */
9364 REAL_VALUE_TYPE frac_rvt = *value;
9365 SET_REAL_EXP (&frac_rvt, 0);
9366 frac = build_real (rettype, frac_rvt);
9367 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9368 }
9369 break;
9370 default:
9371 gcc_unreachable ();
9372 }
9373
9374 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9375 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9376 TREE_SIDE_EFFECTS (arg1) = 1;
9377 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9378 }
9379
9380 return NULL_TREE;
9381 }
9382
9383 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9384 then we can assume the base is two. If it's false, then we have to
9385 check the mode of the TYPE parameter in certain cases. */
9386
9387 static tree
9388 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9389 tree type, bool ldexp)
9390 {
9391 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9392 {
9393 STRIP_NOPS (arg0);
9394 STRIP_NOPS (arg1);
9395
9396 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9397 if (real_zerop (arg0) || integer_zerop (arg1)
9398 || (TREE_CODE (arg0) == REAL_CST
9399 && !real_isfinite (&TREE_REAL_CST (arg0))))
9400 return omit_one_operand_loc (loc, type, arg0, arg1);
9401
9402 /* If both arguments are constant, then try to evaluate it. */
9403 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9404 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9405 && tree_fits_shwi_p (arg1))
9406 {
9407 /* Bound the maximum adjustment to twice the range of the
9408 mode's valid exponents. Use abs to ensure the range is
9409 positive as a sanity check. */
9410 const long max_exp_adj = 2 *
9411 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9412 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9413
9414 /* Get the user-requested adjustment. */
9415 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9416
9417 /* The requested adjustment must be inside this range. This
9418 is a preliminary cap to avoid things like overflow, we
9419 may still fail to compute the result for other reasons. */
9420 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9421 {
9422 REAL_VALUE_TYPE initial_result;
9423
9424 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9425
9426 /* Ensure we didn't overflow. */
9427 if (! real_isinf (&initial_result))
9428 {
9429 const REAL_VALUE_TYPE trunc_result
9430 = real_value_truncate (TYPE_MODE (type), initial_result);
9431
9432 /* Only proceed if the target mode can hold the
9433 resulting value. */
9434 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9435 return build_real (type, trunc_result);
9436 }
9437 }
9438 }
9439 }
9440
9441 return NULL_TREE;
9442 }
9443
9444 /* Fold a call to builtin modf. */
9445
9446 static tree
9447 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9448 {
9449 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9450 return NULL_TREE;
9451
9452 STRIP_NOPS (arg0);
9453
9454 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9455 return NULL_TREE;
9456
9457 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9458
9459 /* Proceed if a valid pointer type was passed in. */
9460 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9461 {
9462 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9463 REAL_VALUE_TYPE trunc, frac;
9464
9465 switch (value->cl)
9466 {
9467 case rvc_nan:
9468 case rvc_zero:
9469 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9470 trunc = frac = *value;
9471 break;
9472 case rvc_inf:
9473 /* For +-Inf, return (*arg1 = arg0, +-0). */
9474 frac = dconst0;
9475 frac.sign = value->sign;
9476 trunc = *value;
9477 break;
9478 case rvc_normal:
9479 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9480 real_trunc (&trunc, VOIDmode, value);
9481 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9482 /* If the original number was negative and already
9483 integral, then the fractional part is -0.0. */
9484 if (value->sign && frac.cl == rvc_zero)
9485 frac.sign = value->sign;
9486 break;
9487 }
9488
9489 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9490 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9491 build_real (rettype, trunc));
9492 TREE_SIDE_EFFECTS (arg1) = 1;
9493 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9494 build_real (rettype, frac));
9495 }
9496
9497 return NULL_TREE;
9498 }
9499
9500 /* Given a location LOC, an interclass builtin function decl FNDECL
9501 and its single argument ARG, return an folded expression computing
9502 the same, or NULL_TREE if we either couldn't or didn't want to fold
9503 (the latter happen if there's an RTL instruction available). */
9504
9505 static tree
9506 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9507 {
9508 machine_mode mode;
9509
9510 if (!validate_arg (arg, REAL_TYPE))
9511 return NULL_TREE;
9512
9513 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9514 return NULL_TREE;
9515
9516 mode = TYPE_MODE (TREE_TYPE (arg));
9517
9518 /* If there is no optab, try generic code. */
9519 switch (DECL_FUNCTION_CODE (fndecl))
9520 {
9521 tree result;
9522
9523 CASE_FLT_FN (BUILT_IN_ISINF):
9524 {
9525 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9526 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9527 tree const type = TREE_TYPE (arg);
9528 REAL_VALUE_TYPE r;
9529 char buf[128];
9530
9531 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9532 real_from_string (&r, buf);
9533 result = build_call_expr (isgr_fn, 2,
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 return result;
9537 }
9538 CASE_FLT_FN (BUILT_IN_FINITE):
9539 case BUILT_IN_ISFINITE:
9540 {
9541 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9542 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9543 tree const type = TREE_TYPE (arg);
9544 REAL_VALUE_TYPE r;
9545 char buf[128];
9546
9547 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9548 real_from_string (&r, buf);
9549 result = build_call_expr (isle_fn, 2,
9550 fold_build1_loc (loc, ABS_EXPR, type, arg),
9551 build_real (type, r));
9552 /*result = fold_build2_loc (loc, UNGT_EXPR,
9553 TREE_TYPE (TREE_TYPE (fndecl)),
9554 fold_build1_loc (loc, ABS_EXPR, type, arg),
9555 build_real (type, r));
9556 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9557 TREE_TYPE (TREE_TYPE (fndecl)),
9558 result);*/
9559 return result;
9560 }
9561 case BUILT_IN_ISNORMAL:
9562 {
9563 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9564 islessequal(fabs(x),DBL_MAX). */
9565 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9566 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9567 tree const type = TREE_TYPE (arg);
9568 REAL_VALUE_TYPE rmax, rmin;
9569 char buf[128];
9570
9571 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9572 real_from_string (&rmax, buf);
9573 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9574 real_from_string (&rmin, buf);
9575 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9576 result = build_call_expr (isle_fn, 2, arg,
9577 build_real (type, rmax));
9578 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9579 build_call_expr (isge_fn, 2, arg,
9580 build_real (type, rmin)));
9581 return result;
9582 }
9583 default:
9584 break;
9585 }
9586
9587 return NULL_TREE;
9588 }
9589
9590 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9591 ARG is the argument for the call. */
9592
9593 static tree
9594 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9595 {
9596 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9597 REAL_VALUE_TYPE r;
9598
9599 if (!validate_arg (arg, REAL_TYPE))
9600 return NULL_TREE;
9601
9602 switch (builtin_index)
9603 {
9604 case BUILT_IN_ISINF:
9605 if (!HONOR_INFINITIES (arg))
9606 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9607
9608 if (TREE_CODE (arg) == REAL_CST)
9609 {
9610 r = TREE_REAL_CST (arg);
9611 if (real_isinf (&r))
9612 return real_compare (GT_EXPR, &r, &dconst0)
9613 ? integer_one_node : integer_minus_one_node;
9614 else
9615 return integer_zero_node;
9616 }
9617
9618 return NULL_TREE;
9619
9620 case BUILT_IN_ISINF_SIGN:
9621 {
9622 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9623 /* In a boolean context, GCC will fold the inner COND_EXPR to
9624 1. So e.g. "if (isinf_sign(x))" would be folded to just
9625 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9626 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9627 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9628 tree tmp = NULL_TREE;
9629
9630 arg = builtin_save_expr (arg);
9631
9632 if (signbit_fn && isinf_fn)
9633 {
9634 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9635 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9636
9637 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9638 signbit_call, integer_zero_node);
9639 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9640 isinf_call, integer_zero_node);
9641
9642 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9643 integer_minus_one_node, integer_one_node);
9644 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9645 isinf_call, tmp,
9646 integer_zero_node);
9647 }
9648
9649 return tmp;
9650 }
9651
9652 case BUILT_IN_ISFINITE:
9653 if (!HONOR_NANS (arg)
9654 && !HONOR_INFINITIES (arg))
9655 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9656
9657 if (TREE_CODE (arg) == REAL_CST)
9658 {
9659 r = TREE_REAL_CST (arg);
9660 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9661 }
9662
9663 return NULL_TREE;
9664
9665 case BUILT_IN_ISNAN:
9666 if (!HONOR_NANS (arg))
9667 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9668
9669 if (TREE_CODE (arg) == REAL_CST)
9670 {
9671 r = TREE_REAL_CST (arg);
9672 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9673 }
9674
9675 arg = builtin_save_expr (arg);
9676 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9677
9678 default:
9679 gcc_unreachable ();
9680 }
9681 }
9682
9683 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9684 This builtin will generate code to return the appropriate floating
9685 point classification depending on the value of the floating point
9686 number passed in. The possible return values must be supplied as
9687 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9688 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9689 one floating point argument which is "type generic". */
9690
9691 static tree
9692 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9693 {
9694 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9695 arg, type, res, tmp;
9696 machine_mode mode;
9697 REAL_VALUE_TYPE r;
9698 char buf[128];
9699
9700 /* Verify the required arguments in the original call. */
9701 if (nargs != 6
9702 || !validate_arg (args[0], INTEGER_TYPE)
9703 || !validate_arg (args[1], INTEGER_TYPE)
9704 || !validate_arg (args[2], INTEGER_TYPE)
9705 || !validate_arg (args[3], INTEGER_TYPE)
9706 || !validate_arg (args[4], INTEGER_TYPE)
9707 || !validate_arg (args[5], REAL_TYPE))
9708 return NULL_TREE;
9709
9710 fp_nan = args[0];
9711 fp_infinite = args[1];
9712 fp_normal = args[2];
9713 fp_subnormal = args[3];
9714 fp_zero = args[4];
9715 arg = args[5];
9716 type = TREE_TYPE (arg);
9717 mode = TYPE_MODE (type);
9718 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9719
9720 /* fpclassify(x) ->
9721 isnan(x) ? FP_NAN :
9722 (fabs(x) == Inf ? FP_INFINITE :
9723 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9724 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9725
9726 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9727 build_real (type, dconst0));
9728 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9729 tmp, fp_zero, fp_subnormal);
9730
9731 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9732 real_from_string (&r, buf);
9733 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9734 arg, build_real (type, r));
9735 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9736
9737 if (HONOR_INFINITIES (mode))
9738 {
9739 real_inf (&r);
9740 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9741 build_real (type, r));
9742 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9743 fp_infinite, res);
9744 }
9745
9746 if (HONOR_NANS (mode))
9747 {
9748 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9749 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9750 }
9751
9752 return res;
9753 }
9754
9755 /* Fold a call to an unordered comparison function such as
9756 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9757 being called and ARG0 and ARG1 are the arguments for the call.
9758 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9759 the opposite of the desired result. UNORDERED_CODE is used
9760 for modes that can hold NaNs and ORDERED_CODE is used for
9761 the rest. */
9762
9763 static tree
9764 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9765 enum tree_code unordered_code,
9766 enum tree_code ordered_code)
9767 {
9768 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9769 enum tree_code code;
9770 tree type0, type1;
9771 enum tree_code code0, code1;
9772 tree cmp_type = NULL_TREE;
9773
9774 type0 = TREE_TYPE (arg0);
9775 type1 = TREE_TYPE (arg1);
9776
9777 code0 = TREE_CODE (type0);
9778 code1 = TREE_CODE (type1);
9779
9780 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9781 /* Choose the wider of two real types. */
9782 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9783 ? type0 : type1;
9784 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9785 cmp_type = type0;
9786 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9787 cmp_type = type1;
9788
9789 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9790 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9791
9792 if (unordered_code == UNORDERED_EXPR)
9793 {
9794 if (!HONOR_NANS (arg0))
9795 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9796 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9797 }
9798
9799 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9800 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9801 fold_build2_loc (loc, code, type, arg0, arg1));
9802 }
9803
9804 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9805 arithmetics if it can never overflow, or into internal functions that
9806 return both result of arithmetics and overflowed boolean flag in
9807 a complex integer result, or some other check for overflow. */
9808
9809 static tree
9810 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9811 tree arg0, tree arg1, tree arg2)
9812 {
9813 enum internal_fn ifn = IFN_LAST;
9814 tree type = TREE_TYPE (TREE_TYPE (arg2));
9815 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9816 switch (fcode)
9817 {
9818 case BUILT_IN_ADD_OVERFLOW:
9819 case BUILT_IN_SADD_OVERFLOW:
9820 case BUILT_IN_SADDL_OVERFLOW:
9821 case BUILT_IN_SADDLL_OVERFLOW:
9822 case BUILT_IN_UADD_OVERFLOW:
9823 case BUILT_IN_UADDL_OVERFLOW:
9824 case BUILT_IN_UADDLL_OVERFLOW:
9825 ifn = IFN_ADD_OVERFLOW;
9826 break;
9827 case BUILT_IN_SUB_OVERFLOW:
9828 case BUILT_IN_SSUB_OVERFLOW:
9829 case BUILT_IN_SSUBL_OVERFLOW:
9830 case BUILT_IN_SSUBLL_OVERFLOW:
9831 case BUILT_IN_USUB_OVERFLOW:
9832 case BUILT_IN_USUBL_OVERFLOW:
9833 case BUILT_IN_USUBLL_OVERFLOW:
9834 ifn = IFN_SUB_OVERFLOW;
9835 break;
9836 case BUILT_IN_MUL_OVERFLOW:
9837 case BUILT_IN_SMUL_OVERFLOW:
9838 case BUILT_IN_SMULL_OVERFLOW:
9839 case BUILT_IN_SMULLL_OVERFLOW:
9840 case BUILT_IN_UMUL_OVERFLOW:
9841 case BUILT_IN_UMULL_OVERFLOW:
9842 case BUILT_IN_UMULLL_OVERFLOW:
9843 ifn = IFN_MUL_OVERFLOW;
9844 break;
9845 default:
9846 gcc_unreachable ();
9847 }
9848 tree ctype = build_complex_type (type);
9849 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9850 2, arg0, arg1);
9851 tree tgt = save_expr (call);
9852 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9853 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9854 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9855 tree store
9856 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9857 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9858 }
9859
9860 /* Fold a call to built-in function FNDECL with 0 arguments.
9861 This function returns NULL_TREE if no simplification was possible. */
9862
9863 static tree
9864 fold_builtin_0 (location_t loc, tree fndecl)
9865 {
9866 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9867 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9868 switch (fcode)
9869 {
9870 CASE_FLT_FN (BUILT_IN_INF):
9871 case BUILT_IN_INFD32:
9872 case BUILT_IN_INFD64:
9873 case BUILT_IN_INFD128:
9874 return fold_builtin_inf (loc, type, true);
9875
9876 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9877 return fold_builtin_inf (loc, type, false);
9878
9879 case BUILT_IN_CLASSIFY_TYPE:
9880 return fold_builtin_classify_type (NULL_TREE);
9881
9882 default:
9883 break;
9884 }
9885 return NULL_TREE;
9886 }
9887
9888 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9889 This function returns NULL_TREE if no simplification was possible. */
9890
9891 static tree
9892 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9893 {
9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9896 switch (fcode)
9897 {
9898 case BUILT_IN_CONSTANT_P:
9899 {
9900 tree val = fold_builtin_constant_p (arg0);
9901
9902 /* Gimplification will pull the CALL_EXPR for the builtin out of
9903 an if condition. When not optimizing, we'll not CSE it back.
9904 To avoid link error types of regressions, return false now. */
9905 if (!val && !optimize)
9906 val = integer_zero_node;
9907
9908 return val;
9909 }
9910
9911 case BUILT_IN_CLASSIFY_TYPE:
9912 return fold_builtin_classify_type (arg0);
9913
9914 case BUILT_IN_STRLEN:
9915 return fold_builtin_strlen (loc, type, arg0);
9916
9917 CASE_FLT_FN (BUILT_IN_FABS):
9918 case BUILT_IN_FABSD32:
9919 case BUILT_IN_FABSD64:
9920 case BUILT_IN_FABSD128:
9921 return fold_builtin_fabs (loc, arg0, type);
9922
9923 case BUILT_IN_ABS:
9924 case BUILT_IN_LABS:
9925 case BUILT_IN_LLABS:
9926 case BUILT_IN_IMAXABS:
9927 return fold_builtin_abs (loc, arg0, type);
9928
9929 CASE_FLT_FN (BUILT_IN_CONJ):
9930 if (validate_arg (arg0, COMPLEX_TYPE)
9931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9932 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9933 break;
9934
9935 CASE_FLT_FN (BUILT_IN_CREAL):
9936 if (validate_arg (arg0, COMPLEX_TYPE)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9938 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9939 break;
9940
9941 CASE_FLT_FN (BUILT_IN_CIMAG):
9942 if (validate_arg (arg0, COMPLEX_TYPE)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9944 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9945 break;
9946
9947 CASE_FLT_FN (BUILT_IN_CCOS):
9948 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9949
9950 CASE_FLT_FN (BUILT_IN_CCOSH):
9951 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9952
9953 CASE_FLT_FN (BUILT_IN_CPROJ):
9954 return fold_builtin_cproj (loc, arg0, type);
9955
9956 CASE_FLT_FN (BUILT_IN_CSIN):
9957 if (validate_arg (arg0, COMPLEX_TYPE)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9959 return do_mpc_arg1 (arg0, type, mpc_sin);
9960 break;
9961
9962 CASE_FLT_FN (BUILT_IN_CSINH):
9963 if (validate_arg (arg0, COMPLEX_TYPE)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9965 return do_mpc_arg1 (arg0, type, mpc_sinh);
9966 break;
9967
9968 CASE_FLT_FN (BUILT_IN_CTAN):
9969 if (validate_arg (arg0, COMPLEX_TYPE)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9971 return do_mpc_arg1 (arg0, type, mpc_tan);
9972 break;
9973
9974 CASE_FLT_FN (BUILT_IN_CTANH):
9975 if (validate_arg (arg0, COMPLEX_TYPE)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9977 return do_mpc_arg1 (arg0, type, mpc_tanh);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_CLOG):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9983 return do_mpc_arg1 (arg0, type, mpc_log);
9984 break;
9985
9986 CASE_FLT_FN (BUILT_IN_CSQRT):
9987 if (validate_arg (arg0, COMPLEX_TYPE)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9989 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9990 break;
9991
9992 CASE_FLT_FN (BUILT_IN_CASIN):
9993 if (validate_arg (arg0, COMPLEX_TYPE)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9995 return do_mpc_arg1 (arg0, type, mpc_asin);
9996 break;
9997
9998 CASE_FLT_FN (BUILT_IN_CACOS):
9999 if (validate_arg (arg0, COMPLEX_TYPE)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10001 return do_mpc_arg1 (arg0, type, mpc_acos);
10002 break;
10003
10004 CASE_FLT_FN (BUILT_IN_CATAN):
10005 if (validate_arg (arg0, COMPLEX_TYPE)
10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10007 return do_mpc_arg1 (arg0, type, mpc_atan);
10008 break;
10009
10010 CASE_FLT_FN (BUILT_IN_CASINH):
10011 if (validate_arg (arg0, COMPLEX_TYPE)
10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10013 return do_mpc_arg1 (arg0, type, mpc_asinh);
10014 break;
10015
10016 CASE_FLT_FN (BUILT_IN_CACOSH):
10017 if (validate_arg (arg0, COMPLEX_TYPE)
10018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10019 return do_mpc_arg1 (arg0, type, mpc_acosh);
10020 break;
10021
10022 CASE_FLT_FN (BUILT_IN_CATANH):
10023 if (validate_arg (arg0, COMPLEX_TYPE)
10024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10025 return do_mpc_arg1 (arg0, type, mpc_atanh);
10026 break;
10027
10028 CASE_FLT_FN (BUILT_IN_CABS):
10029 return fold_builtin_cabs (loc, arg0, type, fndecl);
10030
10031 CASE_FLT_FN (BUILT_IN_CARG):
10032 return fold_builtin_carg (loc, arg0, type);
10033
10034 CASE_FLT_FN (BUILT_IN_SQRT):
10035 return fold_builtin_sqrt (loc, arg0, type);
10036
10037 CASE_FLT_FN (BUILT_IN_CBRT):
10038 return fold_builtin_cbrt (loc, arg0, type);
10039
10040 CASE_FLT_FN (BUILT_IN_ASIN):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10043 &dconstm1, &dconst1, true);
10044 break;
10045
10046 CASE_FLT_FN (BUILT_IN_ACOS):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10049 &dconstm1, &dconst1, true);
10050 break;
10051
10052 CASE_FLT_FN (BUILT_IN_ATAN):
10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10055 break;
10056
10057 CASE_FLT_FN (BUILT_IN_ASINH):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10060 break;
10061
10062 CASE_FLT_FN (BUILT_IN_ACOSH):
10063 if (validate_arg (arg0, REAL_TYPE))
10064 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10065 &dconst1, NULL, true);
10066 break;
10067
10068 CASE_FLT_FN (BUILT_IN_ATANH):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10071 &dconstm1, &dconst1, false);
10072 break;
10073
10074 CASE_FLT_FN (BUILT_IN_SIN):
10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10077 break;
10078
10079 CASE_FLT_FN (BUILT_IN_COS):
10080 return fold_builtin_cos (loc, arg0, type, fndecl);
10081
10082 CASE_FLT_FN (BUILT_IN_TAN):
10083 return fold_builtin_tan (arg0, type);
10084
10085 CASE_FLT_FN (BUILT_IN_CEXP):
10086 return fold_builtin_cexp (loc, arg0, type);
10087
10088 CASE_FLT_FN (BUILT_IN_CEXPI):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_SINH):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_COSH):
10099 return fold_builtin_cosh (loc, arg0, type, fndecl);
10100
10101 CASE_FLT_FN (BUILT_IN_TANH):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10104 break;
10105
10106 CASE_FLT_FN (BUILT_IN_ERF):
10107 if (validate_arg (arg0, REAL_TYPE))
10108 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_ERFC):
10112 if (validate_arg (arg0, REAL_TYPE))
10113 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10114 break;
10115
10116 CASE_FLT_FN (BUILT_IN_TGAMMA):
10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10119 break;
10120
10121 CASE_FLT_FN (BUILT_IN_EXP):
10122 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10123
10124 CASE_FLT_FN (BUILT_IN_EXP2):
10125 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10126
10127 CASE_FLT_FN (BUILT_IN_EXP10):
10128 CASE_FLT_FN (BUILT_IN_POW10):
10129 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10130
10131 CASE_FLT_FN (BUILT_IN_EXPM1):
10132 if (validate_arg (arg0, REAL_TYPE))
10133 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10134 break;
10135
10136 CASE_FLT_FN (BUILT_IN_LOG):
10137 if (validate_arg (arg0, REAL_TYPE))
10138 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10139 break;
10140
10141 CASE_FLT_FN (BUILT_IN_LOG2):
10142 if (validate_arg (arg0, REAL_TYPE))
10143 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10144 break;
10145
10146 CASE_FLT_FN (BUILT_IN_LOG10):
10147 if (validate_arg (arg0, REAL_TYPE))
10148 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10149 break;
10150
10151 CASE_FLT_FN (BUILT_IN_LOG1P):
10152 if (validate_arg (arg0, REAL_TYPE))
10153 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10154 &dconstm1, NULL, false);
10155 break;
10156
10157 CASE_FLT_FN (BUILT_IN_J0):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10160 NULL, NULL, 0);
10161 break;
10162
10163 CASE_FLT_FN (BUILT_IN_J1):
10164 if (validate_arg (arg0, REAL_TYPE))
10165 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10166 NULL, NULL, 0);
10167 break;
10168
10169 CASE_FLT_FN (BUILT_IN_Y0):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10172 &dconst0, NULL, false);
10173 break;
10174
10175 CASE_FLT_FN (BUILT_IN_Y1):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10178 &dconst0, NULL, false);
10179 break;
10180
10181 CASE_FLT_FN (BUILT_IN_NAN):
10182 case BUILT_IN_NAND32:
10183 case BUILT_IN_NAND64:
10184 case BUILT_IN_NAND128:
10185 return fold_builtin_nan (arg0, type, true);
10186
10187 CASE_FLT_FN (BUILT_IN_NANS):
10188 return fold_builtin_nan (arg0, type, false);
10189
10190 CASE_FLT_FN (BUILT_IN_FLOOR):
10191 return fold_builtin_floor (loc, fndecl, arg0);
10192
10193 CASE_FLT_FN (BUILT_IN_CEIL):
10194 return fold_builtin_ceil (loc, fndecl, arg0);
10195
10196 CASE_FLT_FN (BUILT_IN_TRUNC):
10197 return fold_builtin_trunc (loc, fndecl, arg0);
10198
10199 CASE_FLT_FN (BUILT_IN_ROUND):
10200 return fold_builtin_round (loc, fndecl, arg0);
10201
10202 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10203 CASE_FLT_FN (BUILT_IN_RINT):
10204 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10205
10206 CASE_FLT_FN (BUILT_IN_ICEIL):
10207 CASE_FLT_FN (BUILT_IN_LCEIL):
10208 CASE_FLT_FN (BUILT_IN_LLCEIL):
10209 CASE_FLT_FN (BUILT_IN_LFLOOR):
10210 CASE_FLT_FN (BUILT_IN_IFLOOR):
10211 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10212 CASE_FLT_FN (BUILT_IN_IROUND):
10213 CASE_FLT_FN (BUILT_IN_LROUND):
10214 CASE_FLT_FN (BUILT_IN_LLROUND):
10215 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10216
10217 CASE_FLT_FN (BUILT_IN_IRINT):
10218 CASE_FLT_FN (BUILT_IN_LRINT):
10219 CASE_FLT_FN (BUILT_IN_LLRINT):
10220 return fold_fixed_mathfn (loc, fndecl, arg0);
10221
10222 case BUILT_IN_BSWAP16:
10223 case BUILT_IN_BSWAP32:
10224 case BUILT_IN_BSWAP64:
10225 return fold_builtin_bswap (fndecl, arg0);
10226
10227 CASE_INT_FN (BUILT_IN_FFS):
10228 CASE_INT_FN (BUILT_IN_CLZ):
10229 CASE_INT_FN (BUILT_IN_CTZ):
10230 CASE_INT_FN (BUILT_IN_CLRSB):
10231 CASE_INT_FN (BUILT_IN_POPCOUNT):
10232 CASE_INT_FN (BUILT_IN_PARITY):
10233 return fold_builtin_bitop (fndecl, arg0);
10234
10235 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10236 return fold_builtin_signbit (loc, arg0, type);
10237
10238 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10239 return fold_builtin_significand (loc, arg0, type);
10240
10241 CASE_FLT_FN (BUILT_IN_ILOGB):
10242 CASE_FLT_FN (BUILT_IN_LOGB):
10243 return fold_builtin_logb (loc, arg0, type);
10244
10245 case BUILT_IN_ISASCII:
10246 return fold_builtin_isascii (loc, arg0);
10247
10248 case BUILT_IN_TOASCII:
10249 return fold_builtin_toascii (loc, arg0);
10250
10251 case BUILT_IN_ISDIGIT:
10252 return fold_builtin_isdigit (loc, arg0);
10253
10254 CASE_FLT_FN (BUILT_IN_FINITE):
10255 case BUILT_IN_FINITED32:
10256 case BUILT_IN_FINITED64:
10257 case BUILT_IN_FINITED128:
10258 case BUILT_IN_ISFINITE:
10259 {
10260 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10261 if (ret)
10262 return ret;
10263 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10264 }
10265
10266 CASE_FLT_FN (BUILT_IN_ISINF):
10267 case BUILT_IN_ISINFD32:
10268 case BUILT_IN_ISINFD64:
10269 case BUILT_IN_ISINFD128:
10270 {
10271 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10272 if (ret)
10273 return ret;
10274 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10275 }
10276
10277 case BUILT_IN_ISNORMAL:
10278 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10279
10280 case BUILT_IN_ISINF_SIGN:
10281 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10282
10283 CASE_FLT_FN (BUILT_IN_ISNAN):
10284 case BUILT_IN_ISNAND32:
10285 case BUILT_IN_ISNAND64:
10286 case BUILT_IN_ISNAND128:
10287 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10288
10289 case BUILT_IN_FREE:
10290 if (integer_zerop (arg0))
10291 return build_empty_stmt (loc);
10292 break;
10293
10294 default:
10295 break;
10296 }
10297
10298 return NULL_TREE;
10299
10300 }
10301
10302 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10303 This function returns NULL_TREE if no simplification was possible. */
10304
10305 static tree
10306 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10307 {
10308 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10309 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10310
10311 switch (fcode)
10312 {
10313 CASE_FLT_FN (BUILT_IN_JN):
10314 if (validate_arg (arg0, INTEGER_TYPE)
10315 && validate_arg (arg1, REAL_TYPE))
10316 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_YN):
10320 if (validate_arg (arg0, INTEGER_TYPE)
10321 && validate_arg (arg1, REAL_TYPE))
10322 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10323 &dconst0, false);
10324 break;
10325
10326 CASE_FLT_FN (BUILT_IN_DREM):
10327 CASE_FLT_FN (BUILT_IN_REMAINDER):
10328 if (validate_arg (arg0, REAL_TYPE)
10329 && validate_arg (arg1, REAL_TYPE))
10330 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10331 break;
10332
10333 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10334 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10335 if (validate_arg (arg0, REAL_TYPE)
10336 && validate_arg (arg1, POINTER_TYPE))
10337 return do_mpfr_lgamma_r (arg0, arg1, type);
10338 break;
10339
10340 CASE_FLT_FN (BUILT_IN_ATAN2):
10341 if (validate_arg (arg0, REAL_TYPE)
10342 && validate_arg (arg1, REAL_TYPE))
10343 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10344 break;
10345
10346 CASE_FLT_FN (BUILT_IN_FDIM):
10347 if (validate_arg (arg0, REAL_TYPE)
10348 && validate_arg (arg1, REAL_TYPE))
10349 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10350 break;
10351
10352 CASE_FLT_FN (BUILT_IN_HYPOT):
10353 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10354
10355 CASE_FLT_FN (BUILT_IN_CPOW):
10356 if (validate_arg (arg0, COMPLEX_TYPE)
10357 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10358 && validate_arg (arg1, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10360 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10361 break;
10362
10363 CASE_FLT_FN (BUILT_IN_LDEXP):
10364 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10365 CASE_FLT_FN (BUILT_IN_SCALBN):
10366 CASE_FLT_FN (BUILT_IN_SCALBLN):
10367 return fold_builtin_load_exponent (loc, arg0, arg1,
10368 type, /*ldexp=*/false);
10369
10370 CASE_FLT_FN (BUILT_IN_FREXP):
10371 return fold_builtin_frexp (loc, arg0, arg1, type);
10372
10373 CASE_FLT_FN (BUILT_IN_MODF):
10374 return fold_builtin_modf (loc, arg0, arg1, type);
10375
10376 case BUILT_IN_STRSTR:
10377 return fold_builtin_strstr (loc, arg0, arg1, type);
10378
10379 case BUILT_IN_STRSPN:
10380 return fold_builtin_strspn (loc, arg0, arg1);
10381
10382 case BUILT_IN_STRCSPN:
10383 return fold_builtin_strcspn (loc, arg0, arg1);
10384
10385 case BUILT_IN_STRCHR:
10386 case BUILT_IN_INDEX:
10387 return fold_builtin_strchr (loc, arg0, arg1, type);
10388
10389 case BUILT_IN_STRRCHR:
10390 case BUILT_IN_RINDEX:
10391 return fold_builtin_strrchr (loc, arg0, arg1, type);
10392
10393 case BUILT_IN_STRCMP:
10394 return fold_builtin_strcmp (loc, arg0, arg1);
10395
10396 case BUILT_IN_STRPBRK:
10397 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10398
10399 case BUILT_IN_EXPECT:
10400 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10401
10402 CASE_FLT_FN (BUILT_IN_POW):
10403 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10404
10405 CASE_FLT_FN (BUILT_IN_POWI):
10406 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10407
10408 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10409 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10410
10411 CASE_FLT_FN (BUILT_IN_FMIN):
10412 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10413
10414 CASE_FLT_FN (BUILT_IN_FMAX):
10415 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10416
10417 case BUILT_IN_ISGREATER:
10418 return fold_builtin_unordered_cmp (loc, fndecl,
10419 arg0, arg1, UNLE_EXPR, LE_EXPR);
10420 case BUILT_IN_ISGREATEREQUAL:
10421 return fold_builtin_unordered_cmp (loc, fndecl,
10422 arg0, arg1, UNLT_EXPR, LT_EXPR);
10423 case BUILT_IN_ISLESS:
10424 return fold_builtin_unordered_cmp (loc, fndecl,
10425 arg0, arg1, UNGE_EXPR, GE_EXPR);
10426 case BUILT_IN_ISLESSEQUAL:
10427 return fold_builtin_unordered_cmp (loc, fndecl,
10428 arg0, arg1, UNGT_EXPR, GT_EXPR);
10429 case BUILT_IN_ISLESSGREATER:
10430 return fold_builtin_unordered_cmp (loc, fndecl,
10431 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10432 case BUILT_IN_ISUNORDERED:
10433 return fold_builtin_unordered_cmp (loc, fndecl,
10434 arg0, arg1, UNORDERED_EXPR,
10435 NOP_EXPR);
10436
10437 /* We do the folding for va_start in the expander. */
10438 case BUILT_IN_VA_START:
10439 break;
10440
10441 case BUILT_IN_OBJECT_SIZE:
10442 return fold_builtin_object_size (arg0, arg1);
10443
10444 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10445 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10446
10447 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10448 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10449
10450 default:
10451 break;
10452 }
10453 return NULL_TREE;
10454 }
10455
10456 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10457 and ARG2.
10458 This function returns NULL_TREE if no simplification was possible. */
10459
10460 static tree
10461 fold_builtin_3 (location_t loc, tree fndecl,
10462 tree arg0, tree arg1, tree arg2)
10463 {
10464 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10465 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10466 switch (fcode)
10467 {
10468
10469 CASE_FLT_FN (BUILT_IN_SINCOS):
10470 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10471
10472 CASE_FLT_FN (BUILT_IN_FMA):
10473 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10474 break;
10475
10476 CASE_FLT_FN (BUILT_IN_REMQUO):
10477 if (validate_arg (arg0, REAL_TYPE)
10478 && validate_arg (arg1, REAL_TYPE)
10479 && validate_arg (arg2, POINTER_TYPE))
10480 return do_mpfr_remquo (arg0, arg1, arg2);
10481 break;
10482
10483 case BUILT_IN_STRNCMP:
10484 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10485
10486 case BUILT_IN_MEMCHR:
10487 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10488
10489 case BUILT_IN_BCMP:
10490 case BUILT_IN_MEMCMP:
10491 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10492
10493 case BUILT_IN_EXPECT:
10494 return fold_builtin_expect (loc, arg0, arg1, arg2);
10495
10496 case BUILT_IN_ADD_OVERFLOW:
10497 case BUILT_IN_SUB_OVERFLOW:
10498 case BUILT_IN_MUL_OVERFLOW:
10499 case BUILT_IN_SADD_OVERFLOW:
10500 case BUILT_IN_SADDL_OVERFLOW:
10501 case BUILT_IN_SADDLL_OVERFLOW:
10502 case BUILT_IN_SSUB_OVERFLOW:
10503 case BUILT_IN_SSUBL_OVERFLOW:
10504 case BUILT_IN_SSUBLL_OVERFLOW:
10505 case BUILT_IN_SMUL_OVERFLOW:
10506 case BUILT_IN_SMULL_OVERFLOW:
10507 case BUILT_IN_SMULLL_OVERFLOW:
10508 case BUILT_IN_UADD_OVERFLOW:
10509 case BUILT_IN_UADDL_OVERFLOW:
10510 case BUILT_IN_UADDLL_OVERFLOW:
10511 case BUILT_IN_USUB_OVERFLOW:
10512 case BUILT_IN_USUBL_OVERFLOW:
10513 case BUILT_IN_USUBLL_OVERFLOW:
10514 case BUILT_IN_UMUL_OVERFLOW:
10515 case BUILT_IN_UMULL_OVERFLOW:
10516 case BUILT_IN_UMULLL_OVERFLOW:
10517 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10518
10519 default:
10520 break;
10521 }
10522 return NULL_TREE;
10523 }
10524
10525 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10526 arguments. IGNORE is true if the result of the
10527 function call is ignored. This function returns NULL_TREE if no
10528 simplification was possible. */
10529
10530 tree
10531 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10532 {
10533 tree ret = NULL_TREE;
10534
10535 switch (nargs)
10536 {
10537 case 0:
10538 ret = fold_builtin_0 (loc, fndecl);
10539 break;
10540 case 1:
10541 ret = fold_builtin_1 (loc, fndecl, args[0]);
10542 break;
10543 case 2:
10544 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10545 break;
10546 case 3:
10547 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10548 break;
10549 default:
10550 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10551 break;
10552 }
10553 if (ret)
10554 {
10555 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10556 SET_EXPR_LOCATION (ret, loc);
10557 TREE_NO_WARNING (ret) = 1;
10558 return ret;
10559 }
10560 return NULL_TREE;
10561 }
10562
10563 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10564 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10565 of arguments in ARGS to be omitted. OLDNARGS is the number of
10566 elements in ARGS. */
10567
10568 static tree
10569 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10570 int skip, tree fndecl, int n, va_list newargs)
10571 {
10572 int nargs = oldnargs - skip + n;
10573 tree *buffer;
10574
10575 if (n > 0)
10576 {
10577 int i, j;
10578
10579 buffer = XALLOCAVEC (tree, nargs);
10580 for (i = 0; i < n; i++)
10581 buffer[i] = va_arg (newargs, tree);
10582 for (j = skip; j < oldnargs; j++, i++)
10583 buffer[i] = args[j];
10584 }
10585 else
10586 buffer = args + skip;
10587
10588 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10589 }
10590
10591 /* Return true if FNDECL shouldn't be folded right now.
10592 If a built-in function has an inline attribute always_inline
10593 wrapper, defer folding it after always_inline functions have
10594 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10595 might not be performed. */
10596
10597 bool
10598 avoid_folding_inline_builtin (tree fndecl)
10599 {
10600 return (DECL_DECLARED_INLINE_P (fndecl)
10601 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10602 && cfun
10603 && !cfun->always_inline_functions_inlined
10604 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10605 }
10606
10607 /* A wrapper function for builtin folding that prevents warnings for
10608 "statement without effect" and the like, caused by removing the
10609 call node earlier than the warning is generated. */
10610
10611 tree
10612 fold_call_expr (location_t loc, tree exp, bool ignore)
10613 {
10614 tree ret = NULL_TREE;
10615 tree fndecl = get_callee_fndecl (exp);
10616 if (fndecl
10617 && TREE_CODE (fndecl) == FUNCTION_DECL
10618 && DECL_BUILT_IN (fndecl)
10619 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10620 yet. Defer folding until we see all the arguments
10621 (after inlining). */
10622 && !CALL_EXPR_VA_ARG_PACK (exp))
10623 {
10624 int nargs = call_expr_nargs (exp);
10625
10626 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10627 instead last argument is __builtin_va_arg_pack (). Defer folding
10628 even in that case, until arguments are finalized. */
10629 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10630 {
10631 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10632 if (fndecl2
10633 && TREE_CODE (fndecl2) == FUNCTION_DECL
10634 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10635 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10636 return NULL_TREE;
10637 }
10638
10639 if (avoid_folding_inline_builtin (fndecl))
10640 return NULL_TREE;
10641
10642 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10643 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10644 CALL_EXPR_ARGP (exp), ignore);
10645 else
10646 {
10647 tree *args = CALL_EXPR_ARGP (exp);
10648 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10649 if (ret)
10650 return ret;
10651 }
10652 }
10653 return NULL_TREE;
10654 }
10655
10656 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10657 N arguments are passed in the array ARGARRAY. Return a folded
10658 expression or NULL_TREE if no simplification was possible. */
10659
10660 tree
10661 fold_builtin_call_array (location_t loc, tree,
10662 tree fn,
10663 int n,
10664 tree *argarray)
10665 {
10666 if (TREE_CODE (fn) != ADDR_EXPR)
10667 return NULL_TREE;
10668
10669 tree fndecl = TREE_OPERAND (fn, 0);
10670 if (TREE_CODE (fndecl) == FUNCTION_DECL
10671 && DECL_BUILT_IN (fndecl))
10672 {
10673 /* If last argument is __builtin_va_arg_pack (), arguments to this
10674 function are not finalized yet. Defer folding until they are. */
10675 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10676 {
10677 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10678 if (fndecl2
10679 && TREE_CODE (fndecl2) == FUNCTION_DECL
10680 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10681 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10682 return NULL_TREE;
10683 }
10684 if (avoid_folding_inline_builtin (fndecl))
10685 return NULL_TREE;
10686 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10687 return targetm.fold_builtin (fndecl, n, argarray, false);
10688 else
10689 return fold_builtin_n (loc, fndecl, argarray, n, false);
10690 }
10691
10692 return NULL_TREE;
10693 }
10694
10695 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10696 along with N new arguments specified as the "..." parameters. SKIP
10697 is the number of arguments in EXP to be omitted. This function is used
10698 to do varargs-to-varargs transformations. */
10699
10700 static tree
10701 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10702 {
10703 va_list ap;
10704 tree t;
10705
10706 va_start (ap, n);
10707 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10708 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10709 va_end (ap);
10710
10711 return t;
10712 }
10713
10714 /* Validate a single argument ARG against a tree code CODE representing
10715 a type. */
10716
10717 static bool
10718 validate_arg (const_tree arg, enum tree_code code)
10719 {
10720 if (!arg)
10721 return false;
10722 else if (code == POINTER_TYPE)
10723 return POINTER_TYPE_P (TREE_TYPE (arg));
10724 else if (code == INTEGER_TYPE)
10725 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10726 return code == TREE_CODE (TREE_TYPE (arg));
10727 }
10728
10729 /* This function validates the types of a function call argument list
10730 against a specified list of tree_codes. If the last specifier is a 0,
10731 that represents an ellipses, otherwise the last specifier must be a
10732 VOID_TYPE.
10733
10734 This is the GIMPLE version of validate_arglist. Eventually we want to
10735 completely convert builtins.c to work from GIMPLEs and the tree based
10736 validate_arglist will then be removed. */
10737
10738 bool
10739 validate_gimple_arglist (const gcall *call, ...)
10740 {
10741 enum tree_code code;
10742 bool res = 0;
10743 va_list ap;
10744 const_tree arg;
10745 size_t i;
10746
10747 va_start (ap, call);
10748 i = 0;
10749
10750 do
10751 {
10752 code = (enum tree_code) va_arg (ap, int);
10753 switch (code)
10754 {
10755 case 0:
10756 /* This signifies an ellipses, any further arguments are all ok. */
10757 res = true;
10758 goto end;
10759 case VOID_TYPE:
10760 /* This signifies an endlink, if no arguments remain, return
10761 true, otherwise return false. */
10762 res = (i == gimple_call_num_args (call));
10763 goto end;
10764 default:
10765 /* If no parameters remain or the parameter's code does not
10766 match the specified code, return false. Otherwise continue
10767 checking any remaining arguments. */
10768 arg = gimple_call_arg (call, i++);
10769 if (!validate_arg (arg, code))
10770 goto end;
10771 break;
10772 }
10773 }
10774 while (1);
10775
10776 /* We need gotos here since we can only have one VA_CLOSE in a
10777 function. */
10778 end: ;
10779 va_end (ap);
10780
10781 return res;
10782 }
10783
10784 /* Default target-specific builtin expander that does nothing. */
10785
10786 rtx
10787 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10788 rtx target ATTRIBUTE_UNUSED,
10789 rtx subtarget ATTRIBUTE_UNUSED,
10790 machine_mode mode ATTRIBUTE_UNUSED,
10791 int ignore ATTRIBUTE_UNUSED)
10792 {
10793 return NULL_RTX;
10794 }
10795
10796 /* Returns true is EXP represents data that would potentially reside
10797 in a readonly section. */
10798
10799 bool
10800 readonly_data_expr (tree exp)
10801 {
10802 STRIP_NOPS (exp);
10803
10804 if (TREE_CODE (exp) != ADDR_EXPR)
10805 return false;
10806
10807 exp = get_base_address (TREE_OPERAND (exp, 0));
10808 if (!exp)
10809 return false;
10810
10811 /* Make sure we call decl_readonly_section only for trees it
10812 can handle (since it returns true for everything it doesn't
10813 understand). */
10814 if (TREE_CODE (exp) == STRING_CST
10815 || TREE_CODE (exp) == CONSTRUCTOR
10816 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10817 return decl_readonly_section (exp, 0);
10818 else
10819 return false;
10820 }
10821
10822 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10823 to the call, and TYPE is its return type.
10824
10825 Return NULL_TREE if no simplification was possible, otherwise return the
10826 simplified form of the call as a tree.
10827
10828 The simplified form may be a constant or other expression which
10829 computes the same value, but in a more efficient manner (including
10830 calls to other builtin functions).
10831
10832 The call may contain arguments which need to be evaluated, but
10833 which are not useful to determine the result of the call. In
10834 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10835 COMPOUND_EXPR will be an argument which must be evaluated.
10836 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10837 COMPOUND_EXPR in the chain will contain the tree for the simplified
10838 form of the builtin function call. */
10839
10840 static tree
10841 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10842 {
10843 if (!validate_arg (s1, POINTER_TYPE)
10844 || !validate_arg (s2, POINTER_TYPE))
10845 return NULL_TREE;
10846 else
10847 {
10848 tree fn;
10849 const char *p1, *p2;
10850
10851 p2 = c_getstr (s2);
10852 if (p2 == NULL)
10853 return NULL_TREE;
10854
10855 p1 = c_getstr (s1);
10856 if (p1 != NULL)
10857 {
10858 const char *r = strstr (p1, p2);
10859 tree tem;
10860
10861 if (r == NULL)
10862 return build_int_cst (TREE_TYPE (s1), 0);
10863
10864 /* Return an offset into the constant string argument. */
10865 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10866 return fold_convert_loc (loc, type, tem);
10867 }
10868
10869 /* The argument is const char *, and the result is char *, so we need
10870 a type conversion here to avoid a warning. */
10871 if (p2[0] == '\0')
10872 return fold_convert_loc (loc, type, s1);
10873
10874 if (p2[1] != '\0')
10875 return NULL_TREE;
10876
10877 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10878 if (!fn)
10879 return NULL_TREE;
10880
10881 /* New argument list transforming strstr(s1, s2) to
10882 strchr(s1, s2[0]). */
10883 return build_call_expr_loc (loc, fn, 2, s1,
10884 build_int_cst (integer_type_node, p2[0]));
10885 }
10886 }
10887
10888 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10889 the call, and TYPE is its return type.
10890
10891 Return NULL_TREE if no simplification was possible, otherwise return the
10892 simplified form of the call as a tree.
10893
10894 The simplified form may be a constant or other expression which
10895 computes the same value, but in a more efficient manner (including
10896 calls to other builtin functions).
10897
10898 The call may contain arguments which need to be evaluated, but
10899 which are not useful to determine the result of the call. In
10900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10901 COMPOUND_EXPR will be an argument which must be evaluated.
10902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10903 COMPOUND_EXPR in the chain will contain the tree for the simplified
10904 form of the builtin function call. */
10905
10906 static tree
10907 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10908 {
10909 if (!validate_arg (s1, POINTER_TYPE)
10910 || !validate_arg (s2, INTEGER_TYPE))
10911 return NULL_TREE;
10912 else
10913 {
10914 const char *p1;
10915
10916 if (TREE_CODE (s2) != INTEGER_CST)
10917 return NULL_TREE;
10918
10919 p1 = c_getstr (s1);
10920 if (p1 != NULL)
10921 {
10922 char c;
10923 const char *r;
10924 tree tem;
10925
10926 if (target_char_cast (s2, &c))
10927 return NULL_TREE;
10928
10929 r = strchr (p1, c);
10930
10931 if (r == NULL)
10932 return build_int_cst (TREE_TYPE (s1), 0);
10933
10934 /* Return an offset into the constant string argument. */
10935 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10936 return fold_convert_loc (loc, type, tem);
10937 }
10938 return NULL_TREE;
10939 }
10940 }
10941
10942 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10943 the call, and TYPE is its return type.
10944
10945 Return NULL_TREE if no simplification was possible, otherwise return the
10946 simplified form of the call as a tree.
10947
10948 The simplified form may be a constant or other expression which
10949 computes the same value, but in a more efficient manner (including
10950 calls to other builtin functions).
10951
10952 The call may contain arguments which need to be evaluated, but
10953 which are not useful to determine the result of the call. In
10954 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10955 COMPOUND_EXPR will be an argument which must be evaluated.
10956 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10957 COMPOUND_EXPR in the chain will contain the tree for the simplified
10958 form of the builtin function call. */
10959
10960 static tree
10961 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10962 {
10963 if (!validate_arg (s1, POINTER_TYPE)
10964 || !validate_arg (s2, INTEGER_TYPE))
10965 return NULL_TREE;
10966 else
10967 {
10968 tree fn;
10969 const char *p1;
10970
10971 if (TREE_CODE (s2) != INTEGER_CST)
10972 return NULL_TREE;
10973
10974 p1 = c_getstr (s1);
10975 if (p1 != NULL)
10976 {
10977 char c;
10978 const char *r;
10979 tree tem;
10980
10981 if (target_char_cast (s2, &c))
10982 return NULL_TREE;
10983
10984 r = strrchr (p1, c);
10985
10986 if (r == NULL)
10987 return build_int_cst (TREE_TYPE (s1), 0);
10988
10989 /* Return an offset into the constant string argument. */
10990 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10991 return fold_convert_loc (loc, type, tem);
10992 }
10993
10994 if (! integer_zerop (s2))
10995 return NULL_TREE;
10996
10997 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10998 if (!fn)
10999 return NULL_TREE;
11000
11001 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11002 return build_call_expr_loc (loc, fn, 2, s1, s2);
11003 }
11004 }
11005
11006 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11007 to the call, and TYPE is its return type.
11008
11009 Return NULL_TREE if no simplification was possible, otherwise return the
11010 simplified form of the call as a tree.
11011
11012 The simplified form may be a constant or other expression which
11013 computes the same value, but in a more efficient manner (including
11014 calls to other builtin functions).
11015
11016 The call may contain arguments which need to be evaluated, but
11017 which are not useful to determine the result of the call. In
11018 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11019 COMPOUND_EXPR will be an argument which must be evaluated.
11020 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11021 COMPOUND_EXPR in the chain will contain the tree for the simplified
11022 form of the builtin function call. */
11023
11024 static tree
11025 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11026 {
11027 if (!validate_arg (s1, POINTER_TYPE)
11028 || !validate_arg (s2, POINTER_TYPE))
11029 return NULL_TREE;
11030 else
11031 {
11032 tree fn;
11033 const char *p1, *p2;
11034
11035 p2 = c_getstr (s2);
11036 if (p2 == NULL)
11037 return NULL_TREE;
11038
11039 p1 = c_getstr (s1);
11040 if (p1 != NULL)
11041 {
11042 const char *r = strpbrk (p1, p2);
11043 tree tem;
11044
11045 if (r == NULL)
11046 return build_int_cst (TREE_TYPE (s1), 0);
11047
11048 /* Return an offset into the constant string argument. */
11049 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11050 return fold_convert_loc (loc, type, tem);
11051 }
11052
11053 if (p2[0] == '\0')
11054 /* strpbrk(x, "") == NULL.
11055 Evaluate and ignore s1 in case it had side-effects. */
11056 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11057
11058 if (p2[1] != '\0')
11059 return NULL_TREE; /* Really call strpbrk. */
11060
11061 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11062 if (!fn)
11063 return NULL_TREE;
11064
11065 /* New argument list transforming strpbrk(s1, s2) to
11066 strchr(s1, s2[0]). */
11067 return build_call_expr_loc (loc, fn, 2, s1,
11068 build_int_cst (integer_type_node, p2[0]));
11069 }
11070 }
11071
11072 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11073 to the call.
11074
11075 Return NULL_TREE if no simplification was possible, otherwise return the
11076 simplified form of the call as a tree.
11077
11078 The simplified form may be a constant or other expression which
11079 computes the same value, but in a more efficient manner (including
11080 calls to other builtin functions).
11081
11082 The call may contain arguments which need to be evaluated, but
11083 which are not useful to determine the result of the call. In
11084 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11085 COMPOUND_EXPR will be an argument which must be evaluated.
11086 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11087 COMPOUND_EXPR in the chain will contain the tree for the simplified
11088 form of the builtin function call. */
11089
11090 static tree
11091 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11092 {
11093 if (!validate_arg (s1, POINTER_TYPE)
11094 || !validate_arg (s2, POINTER_TYPE))
11095 return NULL_TREE;
11096 else
11097 {
11098 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11099
11100 /* If both arguments are constants, evaluate at compile-time. */
11101 if (p1 && p2)
11102 {
11103 const size_t r = strspn (p1, p2);
11104 return build_int_cst (size_type_node, r);
11105 }
11106
11107 /* If either argument is "", return NULL_TREE. */
11108 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11109 /* Evaluate and ignore both arguments in case either one has
11110 side-effects. */
11111 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11112 s1, s2);
11113 return NULL_TREE;
11114 }
11115 }
11116
11117 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11118 to the call.
11119
11120 Return NULL_TREE if no simplification was possible, otherwise return the
11121 simplified form of the call as a tree.
11122
11123 The simplified form may be a constant or other expression which
11124 computes the same value, but in a more efficient manner (including
11125 calls to other builtin functions).
11126
11127 The call may contain arguments which need to be evaluated, but
11128 which are not useful to determine the result of the call. In
11129 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11130 COMPOUND_EXPR will be an argument which must be evaluated.
11131 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11132 COMPOUND_EXPR in the chain will contain the tree for the simplified
11133 form of the builtin function call. */
11134
11135 static tree
11136 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11137 {
11138 if (!validate_arg (s1, POINTER_TYPE)
11139 || !validate_arg (s2, POINTER_TYPE))
11140 return NULL_TREE;
11141 else
11142 {
11143 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11144
11145 /* If both arguments are constants, evaluate at compile-time. */
11146 if (p1 && p2)
11147 {
11148 const size_t r = strcspn (p1, p2);
11149 return build_int_cst (size_type_node, r);
11150 }
11151
11152 /* If the first argument is "", return NULL_TREE. */
11153 if (p1 && *p1 == '\0')
11154 {
11155 /* Evaluate and ignore argument s2 in case it has
11156 side-effects. */
11157 return omit_one_operand_loc (loc, size_type_node,
11158 size_zero_node, s2);
11159 }
11160
11161 /* If the second argument is "", return __builtin_strlen(s1). */
11162 if (p2 && *p2 == '\0')
11163 {
11164 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11165
11166 /* If the replacement _DECL isn't initialized, don't do the
11167 transformation. */
11168 if (!fn)
11169 return NULL_TREE;
11170
11171 return build_call_expr_loc (loc, fn, 1, s1);
11172 }
11173 return NULL_TREE;
11174 }
11175 }
11176
11177 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11178 produced. False otherwise. This is done so that we don't output the error
11179 or warning twice or three times. */
11180
11181 bool
11182 fold_builtin_next_arg (tree exp, bool va_start_p)
11183 {
11184 tree fntype = TREE_TYPE (current_function_decl);
11185 int nargs = call_expr_nargs (exp);
11186 tree arg;
11187 /* There is good chance the current input_location points inside the
11188 definition of the va_start macro (perhaps on the token for
11189 builtin) in a system header, so warnings will not be emitted.
11190 Use the location in real source code. */
11191 source_location current_location =
11192 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11193 NULL);
11194
11195 if (!stdarg_p (fntype))
11196 {
11197 error ("%<va_start%> used in function with fixed args");
11198 return true;
11199 }
11200
11201 if (va_start_p)
11202 {
11203 if (va_start_p && (nargs != 2))
11204 {
11205 error ("wrong number of arguments to function %<va_start%>");
11206 return true;
11207 }
11208 arg = CALL_EXPR_ARG (exp, 1);
11209 }
11210 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11211 when we checked the arguments and if needed issued a warning. */
11212 else
11213 {
11214 if (nargs == 0)
11215 {
11216 /* Evidently an out of date version of <stdarg.h>; can't validate
11217 va_start's second argument, but can still work as intended. */
11218 warning_at (current_location,
11219 OPT_Wvarargs,
11220 "%<__builtin_next_arg%> called without an argument");
11221 return true;
11222 }
11223 else if (nargs > 1)
11224 {
11225 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11226 return true;
11227 }
11228 arg = CALL_EXPR_ARG (exp, 0);
11229 }
11230
11231 if (TREE_CODE (arg) == SSA_NAME)
11232 arg = SSA_NAME_VAR (arg);
11233
11234 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11235 or __builtin_next_arg (0) the first time we see it, after checking
11236 the arguments and if needed issuing a warning. */
11237 if (!integer_zerop (arg))
11238 {
11239 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11240
11241 /* Strip off all nops for the sake of the comparison. This
11242 is not quite the same as STRIP_NOPS. It does more.
11243 We must also strip off INDIRECT_EXPR for C++ reference
11244 parameters. */
11245 while (CONVERT_EXPR_P (arg)
11246 || TREE_CODE (arg) == INDIRECT_REF)
11247 arg = TREE_OPERAND (arg, 0);
11248 if (arg != last_parm)
11249 {
11250 /* FIXME: Sometimes with the tree optimizers we can get the
11251 not the last argument even though the user used the last
11252 argument. We just warn and set the arg to be the last
11253 argument so that we will get wrong-code because of
11254 it. */
11255 warning_at (current_location,
11256 OPT_Wvarargs,
11257 "second parameter of %<va_start%> not last named argument");
11258 }
11259
11260 /* Undefined by C99 7.15.1.4p4 (va_start):
11261 "If the parameter parmN is declared with the register storage
11262 class, with a function or array type, or with a type that is
11263 not compatible with the type that results after application of
11264 the default argument promotions, the behavior is undefined."
11265 */
11266 else if (DECL_REGISTER (arg))
11267 {
11268 warning_at (current_location,
11269 OPT_Wvarargs,
11270 "undefined behaviour when second parameter of "
11271 "%<va_start%> is declared with %<register%> storage");
11272 }
11273
11274 /* We want to verify the second parameter just once before the tree
11275 optimizers are run and then avoid keeping it in the tree,
11276 as otherwise we could warn even for correct code like:
11277 void foo (int i, ...)
11278 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11279 if (va_start_p)
11280 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11281 else
11282 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11283 }
11284 return false;
11285 }
11286
11287
11288 /* Expand a call EXP to __builtin_object_size. */
11289
11290 static rtx
11291 expand_builtin_object_size (tree exp)
11292 {
11293 tree ost;
11294 int object_size_type;
11295 tree fndecl = get_callee_fndecl (exp);
11296
11297 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11298 {
11299 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11300 exp, fndecl);
11301 expand_builtin_trap ();
11302 return const0_rtx;
11303 }
11304
11305 ost = CALL_EXPR_ARG (exp, 1);
11306 STRIP_NOPS (ost);
11307
11308 if (TREE_CODE (ost) != INTEGER_CST
11309 || tree_int_cst_sgn (ost) < 0
11310 || compare_tree_int (ost, 3) > 0)
11311 {
11312 error ("%Klast argument of %D is not integer constant between 0 and 3",
11313 exp, fndecl);
11314 expand_builtin_trap ();
11315 return const0_rtx;
11316 }
11317
11318 object_size_type = tree_to_shwi (ost);
11319
11320 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11321 }
11322
11323 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11324 FCODE is the BUILT_IN_* to use.
11325 Return NULL_RTX if we failed; the caller should emit a normal call,
11326 otherwise try to get the result in TARGET, if convenient (and in
11327 mode MODE if that's convenient). */
11328
11329 static rtx
11330 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11331 enum built_in_function fcode)
11332 {
11333 tree dest, src, len, size;
11334
11335 if (!validate_arglist (exp,
11336 POINTER_TYPE,
11337 fcode == BUILT_IN_MEMSET_CHK
11338 ? INTEGER_TYPE : POINTER_TYPE,
11339 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11340 return NULL_RTX;
11341
11342 dest = CALL_EXPR_ARG (exp, 0);
11343 src = CALL_EXPR_ARG (exp, 1);
11344 len = CALL_EXPR_ARG (exp, 2);
11345 size = CALL_EXPR_ARG (exp, 3);
11346
11347 if (! tree_fits_uhwi_p (size))
11348 return NULL_RTX;
11349
11350 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11351 {
11352 tree fn;
11353
11354 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11355 {
11356 warning_at (tree_nonartificial_location (exp),
11357 0, "%Kcall to %D will always overflow destination buffer",
11358 exp, get_callee_fndecl (exp));
11359 return NULL_RTX;
11360 }
11361
11362 fn = NULL_TREE;
11363 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11364 mem{cpy,pcpy,move,set} is available. */
11365 switch (fcode)
11366 {
11367 case BUILT_IN_MEMCPY_CHK:
11368 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11369 break;
11370 case BUILT_IN_MEMPCPY_CHK:
11371 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11372 break;
11373 case BUILT_IN_MEMMOVE_CHK:
11374 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11375 break;
11376 case BUILT_IN_MEMSET_CHK:
11377 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11378 break;
11379 default:
11380 break;
11381 }
11382
11383 if (! fn)
11384 return NULL_RTX;
11385
11386 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11387 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11388 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11389 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11390 }
11391 else if (fcode == BUILT_IN_MEMSET_CHK)
11392 return NULL_RTX;
11393 else
11394 {
11395 unsigned int dest_align = get_pointer_alignment (dest);
11396
11397 /* If DEST is not a pointer type, call the normal function. */
11398 if (dest_align == 0)
11399 return NULL_RTX;
11400
11401 /* If SRC and DEST are the same (and not volatile), do nothing. */
11402 if (operand_equal_p (src, dest, 0))
11403 {
11404 tree expr;
11405
11406 if (fcode != BUILT_IN_MEMPCPY_CHK)
11407 {
11408 /* Evaluate and ignore LEN in case it has side-effects. */
11409 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11410 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11411 }
11412
11413 expr = fold_build_pointer_plus (dest, len);
11414 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11415 }
11416
11417 /* __memmove_chk special case. */
11418 if (fcode == BUILT_IN_MEMMOVE_CHK)
11419 {
11420 unsigned int src_align = get_pointer_alignment (src);
11421
11422 if (src_align == 0)
11423 return NULL_RTX;
11424
11425 /* If src is categorized for a readonly section we can use
11426 normal __memcpy_chk. */
11427 if (readonly_data_expr (src))
11428 {
11429 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11430 if (!fn)
11431 return NULL_RTX;
11432 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11433 dest, src, len, size);
11434 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11435 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11436 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11437 }
11438 }
11439 return NULL_RTX;
11440 }
11441 }
11442
11443 /* Emit warning if a buffer overflow is detected at compile time. */
11444
11445 static void
11446 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11447 {
11448 int is_strlen = 0;
11449 tree len, size;
11450 location_t loc = tree_nonartificial_location (exp);
11451
11452 switch (fcode)
11453 {
11454 case BUILT_IN_STRCPY_CHK:
11455 case BUILT_IN_STPCPY_CHK:
11456 /* For __strcat_chk the warning will be emitted only if overflowing
11457 by at least strlen (dest) + 1 bytes. */
11458 case BUILT_IN_STRCAT_CHK:
11459 len = CALL_EXPR_ARG (exp, 1);
11460 size = CALL_EXPR_ARG (exp, 2);
11461 is_strlen = 1;
11462 break;
11463 case BUILT_IN_STRNCAT_CHK:
11464 case BUILT_IN_STRNCPY_CHK:
11465 case BUILT_IN_STPNCPY_CHK:
11466 len = CALL_EXPR_ARG (exp, 2);
11467 size = CALL_EXPR_ARG (exp, 3);
11468 break;
11469 case BUILT_IN_SNPRINTF_CHK:
11470 case BUILT_IN_VSNPRINTF_CHK:
11471 len = CALL_EXPR_ARG (exp, 1);
11472 size = CALL_EXPR_ARG (exp, 3);
11473 break;
11474 default:
11475 gcc_unreachable ();
11476 }
11477
11478 if (!len || !size)
11479 return;
11480
11481 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11482 return;
11483
11484 if (is_strlen)
11485 {
11486 len = c_strlen (len, 1);
11487 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11488 return;
11489 }
11490 else if (fcode == BUILT_IN_STRNCAT_CHK)
11491 {
11492 tree src = CALL_EXPR_ARG (exp, 1);
11493 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11494 return;
11495 src = c_strlen (src, 1);
11496 if (! src || ! tree_fits_uhwi_p (src))
11497 {
11498 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11499 exp, get_callee_fndecl (exp));
11500 return;
11501 }
11502 else if (tree_int_cst_lt (src, size))
11503 return;
11504 }
11505 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11506 return;
11507
11508 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11509 exp, get_callee_fndecl (exp));
11510 }
11511
11512 /* Emit warning if a buffer overflow is detected at compile time
11513 in __sprintf_chk/__vsprintf_chk calls. */
11514
11515 static void
11516 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11517 {
11518 tree size, len, fmt;
11519 const char *fmt_str;
11520 int nargs = call_expr_nargs (exp);
11521
11522 /* Verify the required arguments in the original call. */
11523
11524 if (nargs < 4)
11525 return;
11526 size = CALL_EXPR_ARG (exp, 2);
11527 fmt = CALL_EXPR_ARG (exp, 3);
11528
11529 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11530 return;
11531
11532 /* Check whether the format is a literal string constant. */
11533 fmt_str = c_getstr (fmt);
11534 if (fmt_str == NULL)
11535 return;
11536
11537 if (!init_target_chars ())
11538 return;
11539
11540 /* If the format doesn't contain % args or %%, we know its size. */
11541 if (strchr (fmt_str, target_percent) == 0)
11542 len = build_int_cstu (size_type_node, strlen (fmt_str));
11543 /* If the format is "%s" and first ... argument is a string literal,
11544 we know it too. */
11545 else if (fcode == BUILT_IN_SPRINTF_CHK
11546 && strcmp (fmt_str, target_percent_s) == 0)
11547 {
11548 tree arg;
11549
11550 if (nargs < 5)
11551 return;
11552 arg = CALL_EXPR_ARG (exp, 4);
11553 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11554 return;
11555
11556 len = c_strlen (arg, 1);
11557 if (!len || ! tree_fits_uhwi_p (len))
11558 return;
11559 }
11560 else
11561 return;
11562
11563 if (! tree_int_cst_lt (len, size))
11564 warning_at (tree_nonartificial_location (exp),
11565 0, "%Kcall to %D will always overflow destination buffer",
11566 exp, get_callee_fndecl (exp));
11567 }
11568
11569 /* Emit warning if a free is called with address of a variable. */
11570
11571 static void
11572 maybe_emit_free_warning (tree exp)
11573 {
11574 tree arg = CALL_EXPR_ARG (exp, 0);
11575
11576 STRIP_NOPS (arg);
11577 if (TREE_CODE (arg) != ADDR_EXPR)
11578 return;
11579
11580 arg = get_base_address (TREE_OPERAND (arg, 0));
11581 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11582 return;
11583
11584 if (SSA_VAR_P (arg))
11585 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11586 "%Kattempt to free a non-heap object %qD", exp, arg);
11587 else
11588 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11589 "%Kattempt to free a non-heap object", exp);
11590 }
11591
11592 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11593 if possible. */
11594
11595 static tree
11596 fold_builtin_object_size (tree ptr, tree ost)
11597 {
11598 unsigned HOST_WIDE_INT bytes;
11599 int object_size_type;
11600
11601 if (!validate_arg (ptr, POINTER_TYPE)
11602 || !validate_arg (ost, INTEGER_TYPE))
11603 return NULL_TREE;
11604
11605 STRIP_NOPS (ost);
11606
11607 if (TREE_CODE (ost) != INTEGER_CST
11608 || tree_int_cst_sgn (ost) < 0
11609 || compare_tree_int (ost, 3) > 0)
11610 return NULL_TREE;
11611
11612 object_size_type = tree_to_shwi (ost);
11613
11614 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11615 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11616 and (size_t) 0 for types 2 and 3. */
11617 if (TREE_SIDE_EFFECTS (ptr))
11618 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11619
11620 if (TREE_CODE (ptr) == ADDR_EXPR)
11621 {
11622 bytes = compute_builtin_object_size (ptr, object_size_type);
11623 if (wi::fits_to_tree_p (bytes, size_type_node))
11624 return build_int_cstu (size_type_node, bytes);
11625 }
11626 else if (TREE_CODE (ptr) == SSA_NAME)
11627 {
11628 /* If object size is not known yet, delay folding until
11629 later. Maybe subsequent passes will help determining
11630 it. */
11631 bytes = compute_builtin_object_size (ptr, object_size_type);
11632 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11633 && wi::fits_to_tree_p (bytes, size_type_node))
11634 return build_int_cstu (size_type_node, bytes);
11635 }
11636
11637 return NULL_TREE;
11638 }
11639
11640 /* Builtins with folding operations that operate on "..." arguments
11641 need special handling; we need to store the arguments in a convenient
11642 data structure before attempting any folding. Fortunately there are
11643 only a few builtins that fall into this category. FNDECL is the
11644 function, EXP is the CALL_EXPR for the call. */
11645
11646 static tree
11647 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11648 {
11649 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11650 tree ret = NULL_TREE;
11651
11652 switch (fcode)
11653 {
11654 case BUILT_IN_FPCLASSIFY:
11655 ret = fold_builtin_fpclassify (loc, args, nargs);
11656 break;
11657
11658 default:
11659 break;
11660 }
11661 if (ret)
11662 {
11663 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11664 SET_EXPR_LOCATION (ret, loc);
11665 TREE_NO_WARNING (ret) = 1;
11666 return ret;
11667 }
11668 return NULL_TREE;
11669 }
11670
11671 /* Initialize format string characters in the target charset. */
11672
11673 bool
11674 init_target_chars (void)
11675 {
11676 static bool init;
11677 if (!init)
11678 {
11679 target_newline = lang_hooks.to_target_charset ('\n');
11680 target_percent = lang_hooks.to_target_charset ('%');
11681 target_c = lang_hooks.to_target_charset ('c');
11682 target_s = lang_hooks.to_target_charset ('s');
11683 if (target_newline == 0 || target_percent == 0 || target_c == 0
11684 || target_s == 0)
11685 return false;
11686
11687 target_percent_c[0] = target_percent;
11688 target_percent_c[1] = target_c;
11689 target_percent_c[2] = '\0';
11690
11691 target_percent_s[0] = target_percent;
11692 target_percent_s[1] = target_s;
11693 target_percent_s[2] = '\0';
11694
11695 target_percent_s_newline[0] = target_percent;
11696 target_percent_s_newline[1] = target_s;
11697 target_percent_s_newline[2] = target_newline;
11698 target_percent_s_newline[3] = '\0';
11699
11700 init = true;
11701 }
11702 return true;
11703 }
11704
11705 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11706 and no overflow/underflow occurred. INEXACT is true if M was not
11707 exactly calculated. TYPE is the tree type for the result. This
11708 function assumes that you cleared the MPFR flags and then
11709 calculated M to see if anything subsequently set a flag prior to
11710 entering this function. Return NULL_TREE if any checks fail. */
11711
11712 static tree
11713 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11714 {
11715 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11716 overflow/underflow occurred. If -frounding-math, proceed iff the
11717 result of calling FUNC was exact. */
11718 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11719 && (!flag_rounding_math || !inexact))
11720 {
11721 REAL_VALUE_TYPE rr;
11722
11723 real_from_mpfr (&rr, m, type, GMP_RNDN);
11724 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11725 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11726 but the mpft_t is not, then we underflowed in the
11727 conversion. */
11728 if (real_isfinite (&rr)
11729 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11730 {
11731 REAL_VALUE_TYPE rmode;
11732
11733 real_convert (&rmode, TYPE_MODE (type), &rr);
11734 /* Proceed iff the specified mode can hold the value. */
11735 if (real_identical (&rmode, &rr))
11736 return build_real (type, rmode);
11737 }
11738 }
11739 return NULL_TREE;
11740 }
11741
11742 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11743 number and no overflow/underflow occurred. INEXACT is true if M
11744 was not exactly calculated. TYPE is the tree type for the result.
11745 This function assumes that you cleared the MPFR flags and then
11746 calculated M to see if anything subsequently set a flag prior to
11747 entering this function. Return NULL_TREE if any checks fail, if
11748 FORCE_CONVERT is true, then bypass the checks. */
11749
11750 static tree
11751 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11752 {
11753 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11754 overflow/underflow occurred. If -frounding-math, proceed iff the
11755 result of calling FUNC was exact. */
11756 if (force_convert
11757 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11758 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11759 && (!flag_rounding_math || !inexact)))
11760 {
11761 REAL_VALUE_TYPE re, im;
11762
11763 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11764 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11765 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11766 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11767 but the mpft_t is not, then we underflowed in the
11768 conversion. */
11769 if (force_convert
11770 || (real_isfinite (&re) && real_isfinite (&im)
11771 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11772 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11773 {
11774 REAL_VALUE_TYPE re_mode, im_mode;
11775
11776 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11777 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11778 /* Proceed iff the specified mode can hold the value. */
11779 if (force_convert
11780 || (real_identical (&re_mode, &re)
11781 && real_identical (&im_mode, &im)))
11782 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11783 build_real (TREE_TYPE (type), im_mode));
11784 }
11785 }
11786 return NULL_TREE;
11787 }
11788
11789 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11790 FUNC on it and return the resulting value as a tree with type TYPE.
11791 If MIN and/or MAX are not NULL, then the supplied ARG must be
11792 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11793 acceptable values, otherwise they are not. The mpfr precision is
11794 set to the precision of TYPE. We assume that function FUNC returns
11795 zero if the result could be calculated exactly within the requested
11796 precision. */
11797
11798 static tree
11799 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11800 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11801 bool inclusive)
11802 {
11803 tree result = NULL_TREE;
11804
11805 STRIP_NOPS (arg);
11806
11807 /* To proceed, MPFR must exactly represent the target floating point
11808 format, which only happens when the target base equals two. */
11809 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11810 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11811 {
11812 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11813
11814 if (real_isfinite (ra)
11815 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11816 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11817 {
11818 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11819 const int prec = fmt->p;
11820 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11821 int inexact;
11822 mpfr_t m;
11823
11824 mpfr_init2 (m, prec);
11825 mpfr_from_real (m, ra, GMP_RNDN);
11826 mpfr_clear_flags ();
11827 inexact = func (m, m, rnd);
11828 result = do_mpfr_ckconv (m, type, inexact);
11829 mpfr_clear (m);
11830 }
11831 }
11832
11833 return result;
11834 }
11835
11836 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11837 FUNC on it and return the resulting value as a tree with type TYPE.
11838 The mpfr precision is set to the precision of TYPE. We assume that
11839 function FUNC returns zero if the result could be calculated
11840 exactly within the requested precision. */
11841
11842 static tree
11843 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11844 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11845 {
11846 tree result = NULL_TREE;
11847
11848 STRIP_NOPS (arg1);
11849 STRIP_NOPS (arg2);
11850
11851 /* To proceed, MPFR must exactly represent the target floating point
11852 format, which only happens when the target base equals two. */
11853 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11854 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11855 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11856 {
11857 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11858 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11859
11860 if (real_isfinite (ra1) && real_isfinite (ra2))
11861 {
11862 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11863 const int prec = fmt->p;
11864 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11865 int inexact;
11866 mpfr_t m1, m2;
11867
11868 mpfr_inits2 (prec, m1, m2, NULL);
11869 mpfr_from_real (m1, ra1, GMP_RNDN);
11870 mpfr_from_real (m2, ra2, GMP_RNDN);
11871 mpfr_clear_flags ();
11872 inexact = func (m1, m1, m2, rnd);
11873 result = do_mpfr_ckconv (m1, type, inexact);
11874 mpfr_clears (m1, m2, NULL);
11875 }
11876 }
11877
11878 return result;
11879 }
11880
11881 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11882 FUNC on it and return the resulting value as a tree with type TYPE.
11883 The mpfr precision is set to the precision of TYPE. We assume that
11884 function FUNC returns zero if the result could be calculated
11885 exactly within the requested precision. */
11886
11887 static tree
11888 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11889 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11890 {
11891 tree result = NULL_TREE;
11892
11893 STRIP_NOPS (arg1);
11894 STRIP_NOPS (arg2);
11895 STRIP_NOPS (arg3);
11896
11897 /* To proceed, MPFR must exactly represent the target floating point
11898 format, which only happens when the target base equals two. */
11899 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11900 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11901 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11902 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11903 {
11904 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11905 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11906 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11907
11908 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11909 {
11910 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11911 const int prec = fmt->p;
11912 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11913 int inexact;
11914 mpfr_t m1, m2, m3;
11915
11916 mpfr_inits2 (prec, m1, m2, m3, NULL);
11917 mpfr_from_real (m1, ra1, GMP_RNDN);
11918 mpfr_from_real (m2, ra2, GMP_RNDN);
11919 mpfr_from_real (m3, ra3, GMP_RNDN);
11920 mpfr_clear_flags ();
11921 inexact = func (m1, m1, m2, m3, rnd);
11922 result = do_mpfr_ckconv (m1, type, inexact);
11923 mpfr_clears (m1, m2, m3, NULL);
11924 }
11925 }
11926
11927 return result;
11928 }
11929
11930 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11931 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11932 If ARG_SINP and ARG_COSP are NULL then the result is returned
11933 as a complex value.
11934 The type is taken from the type of ARG and is used for setting the
11935 precision of the calculation and results. */
11936
11937 static tree
11938 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11939 {
11940 tree const type = TREE_TYPE (arg);
11941 tree result = NULL_TREE;
11942
11943 STRIP_NOPS (arg);
11944
11945 /* To proceed, MPFR must exactly represent the target floating point
11946 format, which only happens when the target base equals two. */
11947 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11948 && TREE_CODE (arg) == REAL_CST
11949 && !TREE_OVERFLOW (arg))
11950 {
11951 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11952
11953 if (real_isfinite (ra))
11954 {
11955 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11956 const int prec = fmt->p;
11957 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11958 tree result_s, result_c;
11959 int inexact;
11960 mpfr_t m, ms, mc;
11961
11962 mpfr_inits2 (prec, m, ms, mc, NULL);
11963 mpfr_from_real (m, ra, GMP_RNDN);
11964 mpfr_clear_flags ();
11965 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11966 result_s = do_mpfr_ckconv (ms, type, inexact);
11967 result_c = do_mpfr_ckconv (mc, type, inexact);
11968 mpfr_clears (m, ms, mc, NULL);
11969 if (result_s && result_c)
11970 {
11971 /* If we are to return in a complex value do so. */
11972 if (!arg_sinp && !arg_cosp)
11973 return build_complex (build_complex_type (type),
11974 result_c, result_s);
11975
11976 /* Dereference the sin/cos pointer arguments. */
11977 arg_sinp = build_fold_indirect_ref (arg_sinp);
11978 arg_cosp = build_fold_indirect_ref (arg_cosp);
11979 /* Proceed if valid pointer type were passed in. */
11980 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11981 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11982 {
11983 /* Set the values. */
11984 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11985 result_s);
11986 TREE_SIDE_EFFECTS (result_s) = 1;
11987 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11988 result_c);
11989 TREE_SIDE_EFFECTS (result_c) = 1;
11990 /* Combine the assignments into a compound expr. */
11991 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11992 result_s, result_c));
11993 }
11994 }
11995 }
11996 }
11997 return result;
11998 }
11999
12000 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12001 two-argument mpfr order N Bessel function FUNC on them and return
12002 the resulting value as a tree with type TYPE. The mpfr precision
12003 is set to the precision of TYPE. We assume that function FUNC
12004 returns zero if the result could be calculated exactly within the
12005 requested precision. */
12006 static tree
12007 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12008 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12009 const REAL_VALUE_TYPE *min, bool inclusive)
12010 {
12011 tree result = NULL_TREE;
12012
12013 STRIP_NOPS (arg1);
12014 STRIP_NOPS (arg2);
12015
12016 /* To proceed, MPFR must exactly represent the target floating point
12017 format, which only happens when the target base equals two. */
12018 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12019 && tree_fits_shwi_p (arg1)
12020 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12021 {
12022 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12023 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12024
12025 if (n == (long)n
12026 && real_isfinite (ra)
12027 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12028 {
12029 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12030 const int prec = fmt->p;
12031 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12032 int inexact;
12033 mpfr_t m;
12034
12035 mpfr_init2 (m, prec);
12036 mpfr_from_real (m, ra, GMP_RNDN);
12037 mpfr_clear_flags ();
12038 inexact = func (m, n, m, rnd);
12039 result = do_mpfr_ckconv (m, type, inexact);
12040 mpfr_clear (m);
12041 }
12042 }
12043
12044 return result;
12045 }
12046
12047 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12048 the pointer *(ARG_QUO) and return the result. The type is taken
12049 from the type of ARG0 and is used for setting the precision of the
12050 calculation and results. */
12051
12052 static tree
12053 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12054 {
12055 tree const type = TREE_TYPE (arg0);
12056 tree result = NULL_TREE;
12057
12058 STRIP_NOPS (arg0);
12059 STRIP_NOPS (arg1);
12060
12061 /* To proceed, MPFR must exactly represent the target floating point
12062 format, which only happens when the target base equals two. */
12063 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12064 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12065 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12066 {
12067 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12068 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12069
12070 if (real_isfinite (ra0) && real_isfinite (ra1))
12071 {
12072 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12073 const int prec = fmt->p;
12074 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12075 tree result_rem;
12076 long integer_quo;
12077 mpfr_t m0, m1;
12078
12079 mpfr_inits2 (prec, m0, m1, NULL);
12080 mpfr_from_real (m0, ra0, GMP_RNDN);
12081 mpfr_from_real (m1, ra1, GMP_RNDN);
12082 mpfr_clear_flags ();
12083 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12084 /* Remquo is independent of the rounding mode, so pass
12085 inexact=0 to do_mpfr_ckconv(). */
12086 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12087 mpfr_clears (m0, m1, NULL);
12088 if (result_rem)
12089 {
12090 /* MPFR calculates quo in the host's long so it may
12091 return more bits in quo than the target int can hold
12092 if sizeof(host long) > sizeof(target int). This can
12093 happen even for native compilers in LP64 mode. In
12094 these cases, modulo the quo value with the largest
12095 number that the target int can hold while leaving one
12096 bit for the sign. */
12097 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12098 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12099
12100 /* Dereference the quo pointer argument. */
12101 arg_quo = build_fold_indirect_ref (arg_quo);
12102 /* Proceed iff a valid pointer type was passed in. */
12103 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12104 {
12105 /* Set the value. */
12106 tree result_quo
12107 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12108 build_int_cst (TREE_TYPE (arg_quo),
12109 integer_quo));
12110 TREE_SIDE_EFFECTS (result_quo) = 1;
12111 /* Combine the quo assignment with the rem. */
12112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12113 result_quo, result_rem));
12114 }
12115 }
12116 }
12117 }
12118 return result;
12119 }
12120
12121 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12122 resulting value as a tree with type TYPE. The mpfr precision is
12123 set to the precision of TYPE. We assume that this mpfr function
12124 returns zero if the result could be calculated exactly within the
12125 requested precision. In addition, the integer pointer represented
12126 by ARG_SG will be dereferenced and set to the appropriate signgam
12127 (-1,1) value. */
12128
12129 static tree
12130 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12131 {
12132 tree result = NULL_TREE;
12133
12134 STRIP_NOPS (arg);
12135
12136 /* To proceed, MPFR must exactly represent the target floating point
12137 format, which only happens when the target base equals two. Also
12138 verify ARG is a constant and that ARG_SG is an int pointer. */
12139 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12140 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12141 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12142 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12143 {
12144 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12145
12146 /* In addition to NaN and Inf, the argument cannot be zero or a
12147 negative integer. */
12148 if (real_isfinite (ra)
12149 && ra->cl != rvc_zero
12150 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12151 {
12152 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12153 const int prec = fmt->p;
12154 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12155 int inexact, sg;
12156 mpfr_t m;
12157 tree result_lg;
12158
12159 mpfr_init2 (m, prec);
12160 mpfr_from_real (m, ra, GMP_RNDN);
12161 mpfr_clear_flags ();
12162 inexact = mpfr_lgamma (m, &sg, m, rnd);
12163 result_lg = do_mpfr_ckconv (m, type, inexact);
12164 mpfr_clear (m);
12165 if (result_lg)
12166 {
12167 tree result_sg;
12168
12169 /* Dereference the arg_sg pointer argument. */
12170 arg_sg = build_fold_indirect_ref (arg_sg);
12171 /* Assign the signgam value into *arg_sg. */
12172 result_sg = fold_build2 (MODIFY_EXPR,
12173 TREE_TYPE (arg_sg), arg_sg,
12174 build_int_cst (TREE_TYPE (arg_sg), sg));
12175 TREE_SIDE_EFFECTS (result_sg) = 1;
12176 /* Combine the signgam assignment with the lgamma result. */
12177 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12178 result_sg, result_lg));
12179 }
12180 }
12181 }
12182
12183 return result;
12184 }
12185
12186 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12187 function FUNC on it and return the resulting value as a tree with
12188 type TYPE. The mpfr precision is set to the precision of TYPE. We
12189 assume that function FUNC returns zero if the result could be
12190 calculated exactly within the requested precision. */
12191
12192 static tree
12193 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12194 {
12195 tree result = NULL_TREE;
12196
12197 STRIP_NOPS (arg);
12198
12199 /* To proceed, MPFR must exactly represent the target floating point
12200 format, which only happens when the target base equals two. */
12201 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12202 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12203 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12204 {
12205 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12206 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12207
12208 if (real_isfinite (re) && real_isfinite (im))
12209 {
12210 const struct real_format *const fmt =
12211 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12212 const int prec = fmt->p;
12213 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12214 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12215 int inexact;
12216 mpc_t m;
12217
12218 mpc_init2 (m, prec);
12219 mpfr_from_real (mpc_realref (m), re, rnd);
12220 mpfr_from_real (mpc_imagref (m), im, rnd);
12221 mpfr_clear_flags ();
12222 inexact = func (m, m, crnd);
12223 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12224 mpc_clear (m);
12225 }
12226 }
12227
12228 return result;
12229 }
12230
12231 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12232 mpc function FUNC on it and return the resulting value as a tree
12233 with type TYPE. The mpfr precision is set to the precision of
12234 TYPE. We assume that function FUNC returns zero if the result
12235 could be calculated exactly within the requested precision. If
12236 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12237 in the arguments and/or results. */
12238
12239 tree
12240 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12241 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12242 {
12243 tree result = NULL_TREE;
12244
12245 STRIP_NOPS (arg0);
12246 STRIP_NOPS (arg1);
12247
12248 /* To proceed, MPFR must exactly represent the target floating point
12249 format, which only happens when the target base equals two. */
12250 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12252 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12254 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12255 {
12256 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12257 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12258 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12259 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12260
12261 if (do_nonfinite
12262 || (real_isfinite (re0) && real_isfinite (im0)
12263 && real_isfinite (re1) && real_isfinite (im1)))
12264 {
12265 const struct real_format *const fmt =
12266 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12267 const int prec = fmt->p;
12268 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12269 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12270 int inexact;
12271 mpc_t m0, m1;
12272
12273 mpc_init2 (m0, prec);
12274 mpc_init2 (m1, prec);
12275 mpfr_from_real (mpc_realref (m0), re0, rnd);
12276 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12277 mpfr_from_real (mpc_realref (m1), re1, rnd);
12278 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12279 mpfr_clear_flags ();
12280 inexact = func (m0, m0, m1, crnd);
12281 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12282 mpc_clear (m0);
12283 mpc_clear (m1);
12284 }
12285 }
12286
12287 return result;
12288 }
12289
12290 /* A wrapper function for builtin folding that prevents warnings for
12291 "statement without effect" and the like, caused by removing the
12292 call node earlier than the warning is generated. */
12293
12294 tree
12295 fold_call_stmt (gcall *stmt, bool ignore)
12296 {
12297 tree ret = NULL_TREE;
12298 tree fndecl = gimple_call_fndecl (stmt);
12299 location_t loc = gimple_location (stmt);
12300 if (fndecl
12301 && TREE_CODE (fndecl) == FUNCTION_DECL
12302 && DECL_BUILT_IN (fndecl)
12303 && !gimple_call_va_arg_pack_p (stmt))
12304 {
12305 int nargs = gimple_call_num_args (stmt);
12306 tree *args = (nargs > 0
12307 ? gimple_call_arg_ptr (stmt, 0)
12308 : &error_mark_node);
12309
12310 if (avoid_folding_inline_builtin (fndecl))
12311 return NULL_TREE;
12312 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12313 {
12314 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12315 }
12316 else
12317 {
12318 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12319 if (ret)
12320 {
12321 /* Propagate location information from original call to
12322 expansion of builtin. Otherwise things like
12323 maybe_emit_chk_warning, that operate on the expansion
12324 of a builtin, will use the wrong location information. */
12325 if (gimple_has_location (stmt))
12326 {
12327 tree realret = ret;
12328 if (TREE_CODE (ret) == NOP_EXPR)
12329 realret = TREE_OPERAND (ret, 0);
12330 if (CAN_HAVE_LOCATION_P (realret)
12331 && !EXPR_HAS_LOCATION (realret))
12332 SET_EXPR_LOCATION (realret, loc);
12333 return realret;
12334 }
12335 return ret;
12336 }
12337 }
12338 }
12339 return NULL_TREE;
12340 }
12341
12342 /* Look up the function in builtin_decl that corresponds to DECL
12343 and set ASMSPEC as its user assembler name. DECL must be a
12344 function decl that declares a builtin. */
12345
12346 void
12347 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12348 {
12349 tree builtin;
12350 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12351 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12352 && asmspec != 0);
12353
12354 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12355 set_user_assembler_name (builtin, asmspec);
12356 switch (DECL_FUNCTION_CODE (decl))
12357 {
12358 case BUILT_IN_MEMCPY:
12359 init_block_move_fn (asmspec);
12360 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12361 break;
12362 case BUILT_IN_MEMSET:
12363 init_block_clear_fn (asmspec);
12364 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12365 break;
12366 case BUILT_IN_MEMMOVE:
12367 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12368 break;
12369 case BUILT_IN_MEMCMP:
12370 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12371 break;
12372 case BUILT_IN_ABORT:
12373 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12374 break;
12375 case BUILT_IN_FFS:
12376 if (INT_TYPE_SIZE < BITS_PER_WORD)
12377 {
12378 set_user_assembler_libfunc ("ffs", asmspec);
12379 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12380 MODE_INT, 0), "ffs");
12381 }
12382 break;
12383 default:
12384 break;
12385 }
12386 }
12387
12388 /* Return true if DECL is a builtin that expands to a constant or similarly
12389 simple code. */
12390 bool
12391 is_simple_builtin (tree decl)
12392 {
12393 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12394 switch (DECL_FUNCTION_CODE (decl))
12395 {
12396 /* Builtins that expand to constants. */
12397 case BUILT_IN_CONSTANT_P:
12398 case BUILT_IN_EXPECT:
12399 case BUILT_IN_OBJECT_SIZE:
12400 case BUILT_IN_UNREACHABLE:
12401 /* Simple register moves or loads from stack. */
12402 case BUILT_IN_ASSUME_ALIGNED:
12403 case BUILT_IN_RETURN_ADDRESS:
12404 case BUILT_IN_EXTRACT_RETURN_ADDR:
12405 case BUILT_IN_FROB_RETURN_ADDR:
12406 case BUILT_IN_RETURN:
12407 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12408 case BUILT_IN_FRAME_ADDRESS:
12409 case BUILT_IN_VA_END:
12410 case BUILT_IN_STACK_SAVE:
12411 case BUILT_IN_STACK_RESTORE:
12412 /* Exception state returns or moves registers around. */
12413 case BUILT_IN_EH_FILTER:
12414 case BUILT_IN_EH_POINTER:
12415 case BUILT_IN_EH_COPY_VALUES:
12416 return true;
12417
12418 default:
12419 return false;
12420 }
12421
12422 return false;
12423 }
12424
12425 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12426 most probably expanded inline into reasonably simple code. This is a
12427 superset of is_simple_builtin. */
12428 bool
12429 is_inexpensive_builtin (tree decl)
12430 {
12431 if (!decl)
12432 return false;
12433 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12434 return true;
12435 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12436 switch (DECL_FUNCTION_CODE (decl))
12437 {
12438 case BUILT_IN_ABS:
12439 case BUILT_IN_ALLOCA:
12440 case BUILT_IN_ALLOCA_WITH_ALIGN:
12441 case BUILT_IN_BSWAP16:
12442 case BUILT_IN_BSWAP32:
12443 case BUILT_IN_BSWAP64:
12444 case BUILT_IN_CLZ:
12445 case BUILT_IN_CLZIMAX:
12446 case BUILT_IN_CLZL:
12447 case BUILT_IN_CLZLL:
12448 case BUILT_IN_CTZ:
12449 case BUILT_IN_CTZIMAX:
12450 case BUILT_IN_CTZL:
12451 case BUILT_IN_CTZLL:
12452 case BUILT_IN_FFS:
12453 case BUILT_IN_FFSIMAX:
12454 case BUILT_IN_FFSL:
12455 case BUILT_IN_FFSLL:
12456 case BUILT_IN_IMAXABS:
12457 case BUILT_IN_FINITE:
12458 case BUILT_IN_FINITEF:
12459 case BUILT_IN_FINITEL:
12460 case BUILT_IN_FINITED32:
12461 case BUILT_IN_FINITED64:
12462 case BUILT_IN_FINITED128:
12463 case BUILT_IN_FPCLASSIFY:
12464 case BUILT_IN_ISFINITE:
12465 case BUILT_IN_ISINF_SIGN:
12466 case BUILT_IN_ISINF:
12467 case BUILT_IN_ISINFF:
12468 case BUILT_IN_ISINFL:
12469 case BUILT_IN_ISINFD32:
12470 case BUILT_IN_ISINFD64:
12471 case BUILT_IN_ISINFD128:
12472 case BUILT_IN_ISNAN:
12473 case BUILT_IN_ISNANF:
12474 case BUILT_IN_ISNANL:
12475 case BUILT_IN_ISNAND32:
12476 case BUILT_IN_ISNAND64:
12477 case BUILT_IN_ISNAND128:
12478 case BUILT_IN_ISNORMAL:
12479 case BUILT_IN_ISGREATER:
12480 case BUILT_IN_ISGREATEREQUAL:
12481 case BUILT_IN_ISLESS:
12482 case BUILT_IN_ISLESSEQUAL:
12483 case BUILT_IN_ISLESSGREATER:
12484 case BUILT_IN_ISUNORDERED:
12485 case BUILT_IN_VA_ARG_PACK:
12486 case BUILT_IN_VA_ARG_PACK_LEN:
12487 case BUILT_IN_VA_COPY:
12488 case BUILT_IN_TRAP:
12489 case BUILT_IN_SAVEREGS:
12490 case BUILT_IN_POPCOUNTL:
12491 case BUILT_IN_POPCOUNTLL:
12492 case BUILT_IN_POPCOUNTIMAX:
12493 case BUILT_IN_POPCOUNT:
12494 case BUILT_IN_PARITYL:
12495 case BUILT_IN_PARITYLL:
12496 case BUILT_IN_PARITYIMAX:
12497 case BUILT_IN_PARITY:
12498 case BUILT_IN_LABS:
12499 case BUILT_IN_LLABS:
12500 case BUILT_IN_PREFETCH:
12501 case BUILT_IN_ACC_ON_DEVICE:
12502 return true;
12503
12504 default:
12505 return is_simple_builtin (decl);
12506 }
12507
12508 return false;
12509 }