]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Move fold_trunc_transparent_mathfn to match.pd
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
160 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_strchr (location_t, tree, tree, tree);
164 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_strcmp (location_t, tree, tree);
167 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
168 static tree fold_builtin_signbit (location_t, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_0 (location_t, tree);
177 static tree fold_builtin_1 (location_t, tree, tree);
178 static tree fold_builtin_2 (location_t, tree, tree, tree);
179 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
180 static tree fold_builtin_varargs (location_t, tree, tree*, int);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strspn (location_t, tree, tree);
186 static tree fold_builtin_strcspn (location_t, tree, tree);
187
188 static rtx expand_builtin_object_size (tree);
189 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
190 enum built_in_function);
191 static void maybe_emit_chk_warning (tree, enum built_in_function);
192 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
193 static void maybe_emit_free_warning (tree);
194 static tree fold_builtin_object_size (tree, tree);
195
196 unsigned HOST_WIDE_INT target_newline;
197 unsigned HOST_WIDE_INT target_percent;
198 static unsigned HOST_WIDE_INT target_c;
199 static unsigned HOST_WIDE_INT target_s;
200 char target_percent_c[3];
201 char target_percent_s[3];
202 char target_percent_s_newline[4];
203 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
204 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
205 static tree do_mpfr_arg2 (tree, tree, tree,
206 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
207 static tree do_mpfr_arg3 (tree, tree, tree, tree,
208 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
209 static tree do_mpfr_sincos (tree, tree, tree);
210 static tree do_mpfr_bessel_n (tree, tree, tree,
211 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
212 const REAL_VALUE_TYPE *, bool);
213 static tree do_mpfr_remquo (tree, tree, tree);
214 static tree do_mpfr_lgamma_r (tree, tree, tree);
215 static void expand_builtin_sync_synchronize (void);
216
217 /* Return true if NAME starts with __builtin_ or __sync_. */
218
219 static bool
220 is_builtin_name (const char *name)
221 {
222 if (strncmp (name, "__builtin_", 10) == 0)
223 return true;
224 if (strncmp (name, "__sync_", 7) == 0)
225 return true;
226 if (strncmp (name, "__atomic_", 9) == 0)
227 return true;
228 if (flag_cilkplus
229 && (!strcmp (name, "__cilkrts_detach")
230 || !strcmp (name, "__cilkrts_pop_frame")))
231 return true;
232 return false;
233 }
234
235
236 /* Return true if DECL is a function symbol representing a built-in. */
237
238 bool
239 is_builtin_fn (tree decl)
240 {
241 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
242 }
243
244 /* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
247
248 static bool
249 called_as_built_in (tree node)
250 {
251 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
252 we want the name used to call the function, not the name it
253 will have. */
254 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
255 return is_builtin_name (name);
256 }
257
258 /* Compute values M and N such that M divides (address of EXP - N) and such
259 that N < M. If these numbers can be determined, store M in alignp and N in
260 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
261 *alignp and any bit-offset to *bitposp.
262
263 Note that the address (and thus the alignment) computed here is based
264 on the address to which a symbol resolves, whereas DECL_ALIGN is based
265 on the address at which an object is actually located. These two
266 addresses are not always the same. For example, on ARM targets,
267 the address &foo of a Thumb function foo() has the lowest bit set,
268 whereas foo() itself starts on an even address.
269
270 If ADDR_P is true we are taking the address of the memory reference EXP
271 and thus cannot rely on the access taking place. */
272
273 static bool
274 get_object_alignment_2 (tree exp, unsigned int *alignp,
275 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
276 {
277 HOST_WIDE_INT bitsize, bitpos;
278 tree offset;
279 machine_mode mode;
280 int unsignedp, volatilep;
281 unsigned int align = BITS_PER_UNIT;
282 bool known_alignment = false;
283
284 /* Get the innermost object and the constant (bitpos) and possibly
285 variable (offset) offset of the access. */
286 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
287 &mode, &unsignedp, &volatilep, true);
288
289 /* Extract alignment information from the innermost object and
290 possibly adjust bitpos and offset. */
291 if (TREE_CODE (exp) == FUNCTION_DECL)
292 {
293 /* Function addresses can encode extra information besides their
294 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
295 allows the low bit to be used as a virtual bit, we know
296 that the address itself must be at least 2-byte aligned. */
297 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
298 align = 2 * BITS_PER_UNIT;
299 }
300 else if (TREE_CODE (exp) == LABEL_DECL)
301 ;
302 else if (TREE_CODE (exp) == CONST_DECL)
303 {
304 /* The alignment of a CONST_DECL is determined by its initializer. */
305 exp = DECL_INITIAL (exp);
306 align = TYPE_ALIGN (TREE_TYPE (exp));
307 if (CONSTANT_CLASS_P (exp))
308 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
309
310 known_alignment = true;
311 }
312 else if (DECL_P (exp))
313 {
314 align = DECL_ALIGN (exp);
315 known_alignment = true;
316 }
317 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
318 {
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 }
321 else if (TREE_CODE (exp) == INDIRECT_REF
322 || TREE_CODE (exp) == MEM_REF
323 || TREE_CODE (exp) == TARGET_MEM_REF)
324 {
325 tree addr = TREE_OPERAND (exp, 0);
326 unsigned ptr_align;
327 unsigned HOST_WIDE_INT ptr_bitpos;
328 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
329
330 /* If the address is explicitely aligned, handle that. */
331 if (TREE_CODE (addr) == BIT_AND_EXPR
332 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 {
334 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
335 ptr_bitmask *= BITS_PER_UNIT;
336 align = ptr_bitmask & -ptr_bitmask;
337 addr = TREE_OPERAND (addr, 0);
338 }
339
340 known_alignment
341 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
342 align = MAX (ptr_align, align);
343
344 /* Re-apply explicit alignment to the bitpos. */
345 ptr_bitpos &= ptr_bitmask;
346
347 /* The alignment of the pointer operand in a TARGET_MEM_REF
348 has to take the variable offset parts into account. */
349 if (TREE_CODE (exp) == TARGET_MEM_REF)
350 {
351 if (TMR_INDEX (exp))
352 {
353 unsigned HOST_WIDE_INT step = 1;
354 if (TMR_STEP (exp))
355 step = TREE_INT_CST_LOW (TMR_STEP (exp));
356 align = MIN (align, (step & -step) * BITS_PER_UNIT);
357 }
358 if (TMR_INDEX2 (exp))
359 align = BITS_PER_UNIT;
360 known_alignment = false;
361 }
362
363 /* When EXP is an actual memory reference then we can use
364 TYPE_ALIGN of a pointer indirection to derive alignment.
365 Do so only if get_pointer_alignment_1 did not reveal absolute
366 alignment knowledge and if using that alignment would
367 improve the situation. */
368 if (!addr_p && !known_alignment
369 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
370 align = TYPE_ALIGN (TREE_TYPE (exp));
371 else
372 {
373 /* Else adjust bitpos accordingly. */
374 bitpos += ptr_bitpos;
375 if (TREE_CODE (exp) == MEM_REF
376 || TREE_CODE (exp) == TARGET_MEM_REF)
377 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
378 }
379 }
380 else if (TREE_CODE (exp) == STRING_CST)
381 {
382 /* STRING_CST are the only constant objects we allow to be not
383 wrapped inside a CONST_DECL. */
384 align = TYPE_ALIGN (TREE_TYPE (exp));
385 if (CONSTANT_CLASS_P (exp))
386 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
387
388 known_alignment = true;
389 }
390
391 /* If there is a non-constant offset part extract the maximum
392 alignment that can prevail. */
393 if (offset)
394 {
395 unsigned int trailing_zeros = tree_ctz (offset);
396 if (trailing_zeros < HOST_BITS_PER_INT)
397 {
398 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
399 if (inner)
400 align = MIN (align, inner);
401 }
402 }
403
404 *alignp = align;
405 *bitposp = bitpos & (*alignp - 1);
406 return known_alignment;
407 }
408
409 /* For a memory reference expression EXP compute values M and N such that M
410 divides (&EXP - N) and such that N < M. If these numbers can be determined,
411 store M in alignp and N in *BITPOSP and return true. Otherwise return false
412 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
413
414 bool
415 get_object_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
417 {
418 return get_object_alignment_2 (exp, alignp, bitposp, false);
419 }
420
421 /* Return the alignment in bits of EXP, an object. */
422
423 unsigned int
424 get_object_alignment (tree exp)
425 {
426 unsigned HOST_WIDE_INT bitpos = 0;
427 unsigned int align;
428
429 get_object_alignment_1 (exp, &align, &bitpos);
430
431 /* align and bitpos now specify known low bits of the pointer.
432 ptr & (align - 1) == bitpos. */
433
434 if (bitpos != 0)
435 align = (bitpos & -bitpos);
436 return align;
437 }
438
439 /* For a pointer valued expression EXP compute values M and N such that M
440 divides (EXP - N) and such that N < M. If these numbers can be determined,
441 store M in alignp and N in *BITPOSP and return true. Return false if
442 the results are just a conservative approximation.
443
444 If EXP is not a pointer, false is returned too. */
445
446 bool
447 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
448 unsigned HOST_WIDE_INT *bitposp)
449 {
450 STRIP_NOPS (exp);
451
452 if (TREE_CODE (exp) == ADDR_EXPR)
453 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
454 alignp, bitposp, true);
455 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
456 {
457 unsigned int align;
458 unsigned HOST_WIDE_INT bitpos;
459 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
460 &align, &bitpos);
461 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
462 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
463 else
464 {
465 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
466 if (trailing_zeros < HOST_BITS_PER_INT)
467 {
468 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
469 if (inner)
470 align = MIN (align, inner);
471 }
472 }
473 *alignp = align;
474 *bitposp = bitpos & (align - 1);
475 return res;
476 }
477 else if (TREE_CODE (exp) == SSA_NAME
478 && POINTER_TYPE_P (TREE_TYPE (exp)))
479 {
480 unsigned int ptr_align, ptr_misalign;
481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
482
483 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
484 {
485 *bitposp = ptr_misalign * BITS_PER_UNIT;
486 *alignp = ptr_align * BITS_PER_UNIT;
487 /* We cannot really tell whether this result is an approximation. */
488 return true;
489 }
490 else
491 {
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
495 }
496 }
497 else if (TREE_CODE (exp) == INTEGER_CST)
498 {
499 *alignp = BIGGEST_ALIGNMENT;
500 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
501 & (BIGGEST_ALIGNMENT - 1));
502 return true;
503 }
504
505 *bitposp = 0;
506 *alignp = BITS_PER_UNIT;
507 return false;
508 }
509
510 /* Return the alignment in bits of EXP, a pointer valued expression.
511 The alignment returned is, by default, the alignment of the thing that
512 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
513
514 Otherwise, look at the expression to see if we can do better, i.e., if the
515 expression is actually pointing at an object whose alignment is tighter. */
516
517 unsigned int
518 get_pointer_alignment (tree exp)
519 {
520 unsigned HOST_WIDE_INT bitpos = 0;
521 unsigned int align;
522
523 get_pointer_alignment_1 (exp, &align, &bitpos);
524
525 /* align and bitpos now specify known low bits of the pointer.
526 ptr & (align - 1) == bitpos. */
527
528 if (bitpos != 0)
529 align = (bitpos & -bitpos);
530
531 return align;
532 }
533
534 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
535 way, because it could contain a zero byte in the middle.
536 TREE_STRING_LENGTH is the size of the character array, not the string.
537
538 ONLY_VALUE should be nonzero if the result is not going to be emitted
539 into the instruction stream and zero if it is going to be expanded.
540 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
541 is returned, otherwise NULL, since
542 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
543 evaluate the side-effects.
544
545 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
546 accesses. Note that this implies the result is not going to be emitted
547 into the instruction stream.
548
549 The value returned is of type `ssizetype'.
550
551 Unfortunately, string_constant can't access the values of const char
552 arrays with initializers, so neither can we do so here. */
553
554 tree
555 c_strlen (tree src, int only_value)
556 {
557 tree offset_node;
558 HOST_WIDE_INT offset;
559 int max;
560 const char *ptr;
561 location_t loc;
562
563 STRIP_NOPS (src);
564 if (TREE_CODE (src) == COND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 {
567 tree len1, len2;
568
569 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
570 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
571 if (tree_int_cst_equal (len1, len2))
572 return len1;
573 }
574
575 if (TREE_CODE (src) == COMPOUND_EXPR
576 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
577 return c_strlen (TREE_OPERAND (src, 1), only_value);
578
579 loc = EXPR_LOC_OR_LOC (src, input_location);
580
581 src = string_constant (src, &offset_node);
582 if (src == 0)
583 return NULL_TREE;
584
585 max = TREE_STRING_LENGTH (src) - 1;
586 ptr = TREE_STRING_POINTER (src);
587
588 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
589 {
590 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
591 compute the offset to the following null if we don't know where to
592 start searching for it. */
593 int i;
594
595 for (i = 0; i < max; i++)
596 if (ptr[i] == 0)
597 return NULL_TREE;
598
599 /* We don't know the starting offset, but we do know that the string
600 has no internal zero bytes. We can assume that the offset falls
601 within the bounds of the string; otherwise, the programmer deserves
602 what he gets. Subtract the offset from the length of the string,
603 and return that. This would perhaps not be valid if we were dealing
604 with named arrays in addition to literal string constants. */
605
606 return size_diffop_loc (loc, size_int (max), offset_node);
607 }
608
609 /* We have a known offset into the string. Start searching there for
610 a null character if we can represent it as a single HOST_WIDE_INT. */
611 if (offset_node == 0)
612 offset = 0;
613 else if (! tree_fits_shwi_p (offset_node))
614 offset = -1;
615 else
616 offset = tree_to_shwi (offset_node);
617
618 /* If the offset is known to be out of bounds, warn, and call strlen at
619 runtime. */
620 if (offset < 0 || offset > max)
621 {
622 /* Suppress multiple warnings for propagated constant strings. */
623 if (only_value != 2
624 && !TREE_NO_WARNING (src))
625 {
626 warning_at (loc, 0, "offset outside bounds of constant string");
627 TREE_NO_WARNING (src) = 1;
628 }
629 return NULL_TREE;
630 }
631
632 /* Use strlen to search for the first zero byte. Since any strings
633 constructed with build_string will have nulls appended, we win even
634 if we get handed something like (char[4])"abcd".
635
636 Since OFFSET is our starting index into the string, no further
637 calculation is needed. */
638 return ssize_int (strlen (ptr + offset));
639 }
640
641 /* Return a char pointer for a C string if it is a string constant
642 or sum of string constant and integer constant. */
643
644 const char *
645 c_getstr (tree src)
646 {
647 tree offset_node;
648
649 src = string_constant (src, &offset_node);
650 if (src == 0)
651 return 0;
652
653 if (offset_node == 0)
654 return TREE_STRING_POINTER (src);
655 else if (!tree_fits_uhwi_p (offset_node)
656 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
657 return 0;
658
659 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
660 }
661
662 /* Return a constant integer corresponding to target reading
663 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
664
665 static rtx
666 c_readstr (const char *str, machine_mode mode)
667 {
668 HOST_WIDE_INT ch;
669 unsigned int i, j;
670 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
671
672 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
673 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
674 / HOST_BITS_PER_WIDE_INT;
675
676 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
677 for (i = 0; i < len; i++)
678 tmp[i] = 0;
679
680 ch = 1;
681 for (i = 0; i < GET_MODE_SIZE (mode); i++)
682 {
683 j = i;
684 if (WORDS_BIG_ENDIAN)
685 j = GET_MODE_SIZE (mode) - i - 1;
686 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
687 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
688 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
689 j *= BITS_PER_UNIT;
690
691 if (ch)
692 ch = (unsigned char) str[i];
693 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
694 }
695
696 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
697 return immed_wide_int_const (c, mode);
698 }
699
700 /* Cast a target constant CST to target CHAR and if that value fits into
701 host char type, return zero and put that value into variable pointed to by
702 P. */
703
704 static int
705 target_char_cast (tree cst, char *p)
706 {
707 unsigned HOST_WIDE_INT val, hostval;
708
709 if (TREE_CODE (cst) != INTEGER_CST
710 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
711 return 1;
712
713 /* Do not care if it fits or not right here. */
714 val = TREE_INT_CST_LOW (cst);
715
716 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
717 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
718
719 hostval = val;
720 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
721 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
722
723 if (val != hostval)
724 return 1;
725
726 *p = hostval;
727 return 0;
728 }
729
730 /* Similar to save_expr, but assumes that arbitrary code is not executed
731 in between the multiple evaluations. In particular, we assume that a
732 non-addressable local variable will not be modified. */
733
734 static tree
735 builtin_save_expr (tree exp)
736 {
737 if (TREE_CODE (exp) == SSA_NAME
738 || (TREE_ADDRESSABLE (exp) == 0
739 && (TREE_CODE (exp) == PARM_DECL
740 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
741 return exp;
742
743 return save_expr (exp);
744 }
745
746 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
747 times to get the address of either a higher stack frame, or a return
748 address located within it (depending on FNDECL_CODE). */
749
750 static rtx
751 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
752 {
753 int i;
754 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
755 if (tem == NULL_RTX)
756 {
757 /* For a zero count with __builtin_return_address, we don't care what
758 frame address we return, because target-specific definitions will
759 override us. Therefore frame pointer elimination is OK, and using
760 the soft frame pointer is OK.
761
762 For a nonzero count, or a zero count with __builtin_frame_address,
763 we require a stable offset from the current frame pointer to the
764 previous one, so we must use the hard frame pointer, and
765 we must disable frame pointer elimination. */
766 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
767 tem = frame_pointer_rtx;
768 else
769 {
770 tem = hard_frame_pointer_rtx;
771
772 /* Tell reload not to eliminate the frame pointer. */
773 crtl->accesses_prior_frames = 1;
774 }
775 }
776
777 if (count > 0)
778 SETUP_FRAME_ADDRESSES ();
779
780 /* On the SPARC, the return address is not in the frame, it is in a
781 register. There is no way to access it off of the current frame
782 pointer, but it can be accessed off the previous frame pointer by
783 reading the value from the register window save area. */
784 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
785 count--;
786
787 /* Scan back COUNT frames to the specified frame. */
788 for (i = 0; i < count; i++)
789 {
790 /* Assume the dynamic chain pointer is in the word that the
791 frame address points to, unless otherwise specified. */
792 tem = DYNAMIC_CHAIN_ADDRESS (tem);
793 tem = memory_address (Pmode, tem);
794 tem = gen_frame_mem (Pmode, tem);
795 tem = copy_to_reg (tem);
796 }
797
798 /* For __builtin_frame_address, return what we've got. But, on
799 the SPARC for example, we may have to add a bias. */
800 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
801 return FRAME_ADDR_RTX (tem);
802
803 /* For __builtin_return_address, get the return address from that frame. */
804 #ifdef RETURN_ADDR_RTX
805 tem = RETURN_ADDR_RTX (count, tem);
806 #else
807 tem = memory_address (Pmode,
808 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
809 tem = gen_frame_mem (Pmode, tem);
810 #endif
811 return tem;
812 }
813
814 /* Alias set used for setjmp buffer. */
815 static alias_set_type setjmp_alias_set = -1;
816
817 /* Construct the leading half of a __builtin_setjmp call. Control will
818 return to RECEIVER_LABEL. This is also called directly by the SJLJ
819 exception handling code. */
820
821 void
822 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
823 {
824 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
825 rtx stack_save;
826 rtx mem;
827
828 if (setjmp_alias_set == -1)
829 setjmp_alias_set = new_alias_set ();
830
831 buf_addr = convert_memory_address (Pmode, buf_addr);
832
833 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
834
835 /* We store the frame pointer and the address of receiver_label in
836 the buffer and use the rest of it for the stack save area, which
837 is machine-dependent. */
838
839 mem = gen_rtx_MEM (Pmode, buf_addr);
840 set_mem_alias_set (mem, setjmp_alias_set);
841 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
842
843 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
844 GET_MODE_SIZE (Pmode))),
845 set_mem_alias_set (mem, setjmp_alias_set);
846
847 emit_move_insn (validize_mem (mem),
848 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
849
850 stack_save = gen_rtx_MEM (sa_mode,
851 plus_constant (Pmode, buf_addr,
852 2 * GET_MODE_SIZE (Pmode)));
853 set_mem_alias_set (stack_save, setjmp_alias_set);
854 emit_stack_save (SAVE_NONLOCAL, &stack_save);
855
856 /* If there is further processing to do, do it. */
857 if (targetm.have_builtin_setjmp_setup ())
858 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
859
860 /* We have a nonlocal label. */
861 cfun->has_nonlocal_label = 1;
862 }
863
864 /* Construct the trailing part of a __builtin_setjmp call. This is
865 also called directly by the SJLJ exception handling code.
866 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
867
868 void
869 expand_builtin_setjmp_receiver (rtx receiver_label)
870 {
871 rtx chain;
872
873 /* Mark the FP as used when we get here, so we have to make sure it's
874 marked as used by this function. */
875 emit_use (hard_frame_pointer_rtx);
876
877 /* Mark the static chain as clobbered here so life information
878 doesn't get messed up for it. */
879 chain = targetm.calls.static_chain (current_function_decl, true);
880 if (chain && REG_P (chain))
881 emit_clobber (chain);
882
883 /* Now put in the code to restore the frame pointer, and argument
884 pointer, if needed. */
885 if (! targetm.have_nonlocal_goto ())
886 {
887 /* First adjust our frame pointer to its actual value. It was
888 previously set to the start of the virtual area corresponding to
889 the stacked variables when we branched here and now needs to be
890 adjusted to the actual hardware fp value.
891
892 Assignments to virtual registers are converted by
893 instantiate_virtual_regs into the corresponding assignment
894 to the underlying register (fp in this case) that makes
895 the original assignment true.
896 So the following insn will actually be decrementing fp by
897 STARTING_FRAME_OFFSET. */
898 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
899
900 /* Restoring the frame pointer also modifies the hard frame pointer.
901 Mark it used (so that the previous assignment remains live once
902 the frame pointer is eliminated) and clobbered (to represent the
903 implicit update from the assignment). */
904 emit_use (hard_frame_pointer_rtx);
905 emit_clobber (hard_frame_pointer_rtx);
906 }
907
908 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
909 {
910 #ifdef ELIMINABLE_REGS
911 /* If the argument pointer can be eliminated in favor of the
912 frame pointer, we don't need to restore it. We assume here
913 that if such an elimination is present, it can always be used.
914 This is the case on all known machines; if we don't make this
915 assumption, we do unnecessary saving on many machines. */
916 size_t i;
917 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
918
919 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
920 if (elim_regs[i].from == ARG_POINTER_REGNUM
921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 break;
923
924 if (i == ARRAY_SIZE (elim_regs))
925 #endif
926 {
927 /* Now restore our arg pointer from the address at which it
928 was saved in our stack frame. */
929 emit_move_insn (crtl->args.internal_arg_pointer,
930 copy_to_reg (get_arg_pointer_save_area ()));
931 }
932 }
933
934 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
935 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
936 else if (targetm.have_nonlocal_goto_receiver ())
937 emit_insn (targetm.gen_nonlocal_goto_receiver ());
938 else
939 { /* Nothing */ }
940
941 /* We must not allow the code we just generated to be reordered by
942 scheduling. Specifically, the update of the frame pointer must
943 happen immediately, not later. */
944 emit_insn (gen_blockage ());
945 }
946
947 /* __builtin_longjmp is passed a pointer to an array of five words (not
948 all will be used on all machines). It operates similarly to the C
949 library function of the same name, but is more efficient. Much of
950 the code below is copied from the handling of non-local gotos. */
951
952 static void
953 expand_builtin_longjmp (rtx buf_addr, rtx value)
954 {
955 rtx fp, lab, stack;
956 rtx_insn *insn, *last;
957 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
958
959 /* DRAP is needed for stack realign if longjmp is expanded to current
960 function */
961 if (SUPPORTS_STACK_ALIGNMENT)
962 crtl->need_drap = true;
963
964 if (setjmp_alias_set == -1)
965 setjmp_alias_set = new_alias_set ();
966
967 buf_addr = convert_memory_address (Pmode, buf_addr);
968
969 buf_addr = force_reg (Pmode, buf_addr);
970
971 /* We require that the user must pass a second argument of 1, because
972 that is what builtin_setjmp will return. */
973 gcc_assert (value == const1_rtx);
974
975 last = get_last_insn ();
976 if (targetm.have_builtin_longjmp ())
977 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
978 else
979 {
980 fp = gen_rtx_MEM (Pmode, buf_addr);
981 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
982 GET_MODE_SIZE (Pmode)));
983
984 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
985 2 * GET_MODE_SIZE (Pmode)));
986 set_mem_alias_set (fp, setjmp_alias_set);
987 set_mem_alias_set (lab, setjmp_alias_set);
988 set_mem_alias_set (stack, setjmp_alias_set);
989
990 /* Pick up FP, label, and SP from the block and jump. This code is
991 from expand_goto in stmt.c; see there for detailed comments. */
992 if (targetm.have_nonlocal_goto ())
993 /* We have to pass a value to the nonlocal_goto pattern that will
994 get copied into the static_chain pointer, but it does not matter
995 what that value is, because builtin_setjmp does not use it. */
996 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
997 else
998 {
999 lab = copy_to_reg (lab);
1000
1001 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1002 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1003
1004 emit_move_insn (hard_frame_pointer_rtx, fp);
1005 emit_stack_restore (SAVE_NONLOCAL, stack);
1006
1007 emit_use (hard_frame_pointer_rtx);
1008 emit_use (stack_pointer_rtx);
1009 emit_indirect_jump (lab);
1010 }
1011 }
1012
1013 /* Search backwards and mark the jump insn as a non-local goto.
1014 Note that this precludes the use of __builtin_longjmp to a
1015 __builtin_setjmp target in the same function. However, we've
1016 already cautioned the user that these functions are for
1017 internal exception handling use only. */
1018 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1019 {
1020 gcc_assert (insn != last);
1021
1022 if (JUMP_P (insn))
1023 {
1024 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1025 break;
1026 }
1027 else if (CALL_P (insn))
1028 break;
1029 }
1030 }
1031
1032 static inline bool
1033 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1034 {
1035 return (iter->i < iter->n);
1036 }
1037
1038 /* This function validates the types of a function call argument list
1039 against a specified list of tree_codes. If the last specifier is a 0,
1040 that represents an ellipses, otherwise the last specifier must be a
1041 VOID_TYPE. */
1042
1043 static bool
1044 validate_arglist (const_tree callexpr, ...)
1045 {
1046 enum tree_code code;
1047 bool res = 0;
1048 va_list ap;
1049 const_call_expr_arg_iterator iter;
1050 const_tree arg;
1051
1052 va_start (ap, callexpr);
1053 init_const_call_expr_arg_iterator (callexpr, &iter);
1054
1055 do
1056 {
1057 code = (enum tree_code) va_arg (ap, int);
1058 switch (code)
1059 {
1060 case 0:
1061 /* This signifies an ellipses, any further arguments are all ok. */
1062 res = true;
1063 goto end;
1064 case VOID_TYPE:
1065 /* This signifies an endlink, if no arguments remain, return
1066 true, otherwise return false. */
1067 res = !more_const_call_expr_args_p (&iter);
1068 goto end;
1069 default:
1070 /* If no parameters remain or the parameter's code does not
1071 match the specified code, return false. Otherwise continue
1072 checking any remaining arguments. */
1073 arg = next_const_call_expr_arg (&iter);
1074 if (!validate_arg (arg, code))
1075 goto end;
1076 break;
1077 }
1078 }
1079 while (1);
1080
1081 /* We need gotos here since we can only have one VA_CLOSE in a
1082 function. */
1083 end: ;
1084 va_end (ap);
1085
1086 return res;
1087 }
1088
1089 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1090 and the address of the save area. */
1091
1092 static rtx
1093 expand_builtin_nonlocal_goto (tree exp)
1094 {
1095 tree t_label, t_save_area;
1096 rtx r_label, r_save_area, r_fp, r_sp;
1097 rtx_insn *insn;
1098
1099 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1100 return NULL_RTX;
1101
1102 t_label = CALL_EXPR_ARG (exp, 0);
1103 t_save_area = CALL_EXPR_ARG (exp, 1);
1104
1105 r_label = expand_normal (t_label);
1106 r_label = convert_memory_address (Pmode, r_label);
1107 r_save_area = expand_normal (t_save_area);
1108 r_save_area = convert_memory_address (Pmode, r_save_area);
1109 /* Copy the address of the save location to a register just in case it was
1110 based on the frame pointer. */
1111 r_save_area = copy_to_reg (r_save_area);
1112 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1113 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1114 plus_constant (Pmode, r_save_area,
1115 GET_MODE_SIZE (Pmode)));
1116
1117 crtl->has_nonlocal_goto = 1;
1118
1119 /* ??? We no longer need to pass the static chain value, afaik. */
1120 if (targetm.have_nonlocal_goto ())
1121 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1122 else
1123 {
1124 r_label = copy_to_reg (r_label);
1125
1126 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1127 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1128
1129 /* Restore frame pointer for containing function. */
1130 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1131 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1132
1133 /* USE of hard_frame_pointer_rtx added for consistency;
1134 not clear if really needed. */
1135 emit_use (hard_frame_pointer_rtx);
1136 emit_use (stack_pointer_rtx);
1137
1138 /* If the architecture is using a GP register, we must
1139 conservatively assume that the target function makes use of it.
1140 The prologue of functions with nonlocal gotos must therefore
1141 initialize the GP register to the appropriate value, and we
1142 must then make sure that this value is live at the point
1143 of the jump. (Note that this doesn't necessarily apply
1144 to targets with a nonlocal_goto pattern; they are free
1145 to implement it in their own way. Note also that this is
1146 a no-op if the GP register is a global invariant.) */
1147 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1148 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1149 emit_use (pic_offset_table_rtx);
1150
1151 emit_indirect_jump (r_label);
1152 }
1153
1154 /* Search backwards to the jump insn and mark it as a
1155 non-local goto. */
1156 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1157 {
1158 if (JUMP_P (insn))
1159 {
1160 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1161 break;
1162 }
1163 else if (CALL_P (insn))
1164 break;
1165 }
1166
1167 return const0_rtx;
1168 }
1169
1170 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1171 (not all will be used on all machines) that was passed to __builtin_setjmp.
1172 It updates the stack pointer in that block to the current value. This is
1173 also called directly by the SJLJ exception handling code. */
1174
1175 void
1176 expand_builtin_update_setjmp_buf (rtx buf_addr)
1177 {
1178 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1179 rtx stack_save
1180 = gen_rtx_MEM (sa_mode,
1181 memory_address
1182 (sa_mode,
1183 plus_constant (Pmode, buf_addr,
1184 2 * GET_MODE_SIZE (Pmode))));
1185
1186 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1187 }
1188
1189 /* Expand a call to __builtin_prefetch. For a target that does not support
1190 data prefetch, evaluate the memory address argument in case it has side
1191 effects. */
1192
1193 static void
1194 expand_builtin_prefetch (tree exp)
1195 {
1196 tree arg0, arg1, arg2;
1197 int nargs;
1198 rtx op0, op1, op2;
1199
1200 if (!validate_arglist (exp, POINTER_TYPE, 0))
1201 return;
1202
1203 arg0 = CALL_EXPR_ARG (exp, 0);
1204
1205 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1206 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1207 locality). */
1208 nargs = call_expr_nargs (exp);
1209 if (nargs > 1)
1210 arg1 = CALL_EXPR_ARG (exp, 1);
1211 else
1212 arg1 = integer_zero_node;
1213 if (nargs > 2)
1214 arg2 = CALL_EXPR_ARG (exp, 2);
1215 else
1216 arg2 = integer_three_node;
1217
1218 /* Argument 0 is an address. */
1219 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1220
1221 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1222 if (TREE_CODE (arg1) != INTEGER_CST)
1223 {
1224 error ("second argument to %<__builtin_prefetch%> must be a constant");
1225 arg1 = integer_zero_node;
1226 }
1227 op1 = expand_normal (arg1);
1228 /* Argument 1 must be either zero or one. */
1229 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1230 {
1231 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1232 " using zero");
1233 op1 = const0_rtx;
1234 }
1235
1236 /* Argument 2 (locality) must be a compile-time constant int. */
1237 if (TREE_CODE (arg2) != INTEGER_CST)
1238 {
1239 error ("third argument to %<__builtin_prefetch%> must be a constant");
1240 arg2 = integer_zero_node;
1241 }
1242 op2 = expand_normal (arg2);
1243 /* Argument 2 must be 0, 1, 2, or 3. */
1244 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1245 {
1246 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1247 op2 = const0_rtx;
1248 }
1249
1250 if (targetm.have_prefetch ())
1251 {
1252 struct expand_operand ops[3];
1253
1254 create_address_operand (&ops[0], op0);
1255 create_integer_operand (&ops[1], INTVAL (op1));
1256 create_integer_operand (&ops[2], INTVAL (op2));
1257 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1258 return;
1259 }
1260
1261 /* Don't do anything with direct references to volatile memory, but
1262 generate code to handle other side effects. */
1263 if (!MEM_P (op0) && side_effects_p (op0))
1264 emit_insn (op0);
1265 }
1266
1267 /* Get a MEM rtx for expression EXP which is the address of an operand
1268 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1269 the maximum length of the block of memory that might be accessed or
1270 NULL if unknown. */
1271
1272 static rtx
1273 get_memory_rtx (tree exp, tree len)
1274 {
1275 tree orig_exp = exp;
1276 rtx addr, mem;
1277
1278 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1279 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1280 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1281 exp = TREE_OPERAND (exp, 0);
1282
1283 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1284 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1285
1286 /* Get an expression we can use to find the attributes to assign to MEM.
1287 First remove any nops. */
1288 while (CONVERT_EXPR_P (exp)
1289 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1290 exp = TREE_OPERAND (exp, 0);
1291
1292 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1293 (as builtin stringops may alias with anything). */
1294 exp = fold_build2 (MEM_REF,
1295 build_array_type (char_type_node,
1296 build_range_type (sizetype,
1297 size_one_node, len)),
1298 exp, build_int_cst (ptr_type_node, 0));
1299
1300 /* If the MEM_REF has no acceptable address, try to get the base object
1301 from the original address we got, and build an all-aliasing
1302 unknown-sized access to that one. */
1303 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1304 set_mem_attributes (mem, exp, 0);
1305 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1306 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1307 0))))
1308 {
1309 exp = build_fold_addr_expr (exp);
1310 exp = fold_build2 (MEM_REF,
1311 build_array_type (char_type_node,
1312 build_range_type (sizetype,
1313 size_zero_node,
1314 NULL)),
1315 exp, build_int_cst (ptr_type_node, 0));
1316 set_mem_attributes (mem, exp, 0);
1317 }
1318 set_mem_alias_set (mem, 0);
1319 return mem;
1320 }
1321 \f
1322 /* Built-in functions to perform an untyped call and return. */
1323
1324 #define apply_args_mode \
1325 (this_target_builtins->x_apply_args_mode)
1326 #define apply_result_mode \
1327 (this_target_builtins->x_apply_result_mode)
1328
1329 /* Return the size required for the block returned by __builtin_apply_args,
1330 and initialize apply_args_mode. */
1331
1332 static int
1333 apply_args_size (void)
1334 {
1335 static int size = -1;
1336 int align;
1337 unsigned int regno;
1338 machine_mode mode;
1339
1340 /* The values computed by this function never change. */
1341 if (size < 0)
1342 {
1343 /* The first value is the incoming arg-pointer. */
1344 size = GET_MODE_SIZE (Pmode);
1345
1346 /* The second value is the structure value address unless this is
1347 passed as an "invisible" first argument. */
1348 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1349 size += GET_MODE_SIZE (Pmode);
1350
1351 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1352 if (FUNCTION_ARG_REGNO_P (regno))
1353 {
1354 mode = targetm.calls.get_raw_arg_mode (regno);
1355
1356 gcc_assert (mode != VOIDmode);
1357
1358 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1359 if (size % align != 0)
1360 size = CEIL (size, align) * align;
1361 size += GET_MODE_SIZE (mode);
1362 apply_args_mode[regno] = mode;
1363 }
1364 else
1365 {
1366 apply_args_mode[regno] = VOIDmode;
1367 }
1368 }
1369 return size;
1370 }
1371
1372 /* Return the size required for the block returned by __builtin_apply,
1373 and initialize apply_result_mode. */
1374
1375 static int
1376 apply_result_size (void)
1377 {
1378 static int size = -1;
1379 int align, regno;
1380 machine_mode mode;
1381
1382 /* The values computed by this function never change. */
1383 if (size < 0)
1384 {
1385 size = 0;
1386
1387 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1388 if (targetm.calls.function_value_regno_p (regno))
1389 {
1390 mode = targetm.calls.get_raw_result_mode (regno);
1391
1392 gcc_assert (mode != VOIDmode);
1393
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1397 size += GET_MODE_SIZE (mode);
1398 apply_result_mode[regno] = mode;
1399 }
1400 else
1401 apply_result_mode[regno] = VOIDmode;
1402
1403 /* Allow targets that use untyped_call and untyped_return to override
1404 the size so that machine-specific information can be stored here. */
1405 #ifdef APPLY_RESULT_SIZE
1406 size = APPLY_RESULT_SIZE;
1407 #endif
1408 }
1409 return size;
1410 }
1411
1412 /* Create a vector describing the result block RESULT. If SAVEP is true,
1413 the result block is used to save the values; otherwise it is used to
1414 restore the values. */
1415
1416 static rtx
1417 result_vector (int savep, rtx result)
1418 {
1419 int regno, size, align, nelts;
1420 machine_mode mode;
1421 rtx reg, mem;
1422 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1423
1424 size = nelts = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if ((mode = apply_result_mode[regno]) != VOIDmode)
1427 {
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1432 mem = adjust_address (result, mode, size);
1433 savevec[nelts++] = (savep
1434 ? gen_rtx_SET (mem, reg)
1435 : gen_rtx_SET (reg, mem));
1436 size += GET_MODE_SIZE (mode);
1437 }
1438 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1439 }
1440
1441 /* Save the state required to perform an untyped call with the same
1442 arguments as were passed to the current function. */
1443
1444 static rtx
1445 expand_builtin_apply_args_1 (void)
1446 {
1447 rtx registers, tem;
1448 int size, align, regno;
1449 machine_mode mode;
1450 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1451
1452 /* Create a block where the arg-pointer, structure value address,
1453 and argument registers can be saved. */
1454 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1455
1456 /* Walk past the arg-pointer and structure value address. */
1457 size = GET_MODE_SIZE (Pmode);
1458 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1459 size += GET_MODE_SIZE (Pmode);
1460
1461 /* Save each register used in calling a function to the block. */
1462 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1463 if ((mode = apply_args_mode[regno]) != VOIDmode)
1464 {
1465 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1466 if (size % align != 0)
1467 size = CEIL (size, align) * align;
1468
1469 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1470
1471 emit_move_insn (adjust_address (registers, mode, size), tem);
1472 size += GET_MODE_SIZE (mode);
1473 }
1474
1475 /* Save the arg pointer to the block. */
1476 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1477 /* We need the pointer as the caller actually passed them to us, not
1478 as we might have pretended they were passed. Make sure it's a valid
1479 operand, as emit_move_insn isn't expected to handle a PLUS. */
1480 if (STACK_GROWS_DOWNWARD)
1481 tem
1482 = force_operand (plus_constant (Pmode, tem,
1483 crtl->args.pretend_args_size),
1484 NULL_RTX);
1485 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1486
1487 size = GET_MODE_SIZE (Pmode);
1488
1489 /* Save the structure value address unless this is passed as an
1490 "invisible" first argument. */
1491 if (struct_incoming_value)
1492 {
1493 emit_move_insn (adjust_address (registers, Pmode, size),
1494 copy_to_reg (struct_incoming_value));
1495 size += GET_MODE_SIZE (Pmode);
1496 }
1497
1498 /* Return the address of the block. */
1499 return copy_addr_to_reg (XEXP (registers, 0));
1500 }
1501
1502 /* __builtin_apply_args returns block of memory allocated on
1503 the stack into which is stored the arg pointer, structure
1504 value address, static chain, and all the registers that might
1505 possibly be used in performing a function call. The code is
1506 moved to the start of the function so the incoming values are
1507 saved. */
1508
1509 static rtx
1510 expand_builtin_apply_args (void)
1511 {
1512 /* Don't do __builtin_apply_args more than once in a function.
1513 Save the result of the first call and reuse it. */
1514 if (apply_args_value != 0)
1515 return apply_args_value;
1516 {
1517 /* When this function is called, it means that registers must be
1518 saved on entry to this function. So we migrate the
1519 call to the first insn of this function. */
1520 rtx temp;
1521
1522 start_sequence ();
1523 temp = expand_builtin_apply_args_1 ();
1524 rtx_insn *seq = get_insns ();
1525 end_sequence ();
1526
1527 apply_args_value = temp;
1528
1529 /* Put the insns after the NOTE that starts the function.
1530 If this is inside a start_sequence, make the outer-level insn
1531 chain current, so the code is placed at the start of the
1532 function. If internal_arg_pointer is a non-virtual pseudo,
1533 it needs to be placed after the function that initializes
1534 that pseudo. */
1535 push_topmost_sequence ();
1536 if (REG_P (crtl->args.internal_arg_pointer)
1537 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1538 emit_insn_before (seq, parm_birth_insn);
1539 else
1540 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1541 pop_topmost_sequence ();
1542 return temp;
1543 }
1544 }
1545
1546 /* Perform an untyped call and save the state required to perform an
1547 untyped return of whatever value was returned by the given function. */
1548
1549 static rtx
1550 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1551 {
1552 int size, align, regno;
1553 machine_mode mode;
1554 rtx incoming_args, result, reg, dest, src;
1555 rtx_call_insn *call_insn;
1556 rtx old_stack_level = 0;
1557 rtx call_fusage = 0;
1558 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1559
1560 arguments = convert_memory_address (Pmode, arguments);
1561
1562 /* Create a block where the return registers can be saved. */
1563 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1564
1565 /* Fetch the arg pointer from the ARGUMENTS block. */
1566 incoming_args = gen_reg_rtx (Pmode);
1567 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1568 if (!STACK_GROWS_DOWNWARD)
1569 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1570 incoming_args, 0, OPTAB_LIB_WIDEN);
1571
1572 /* Push a new argument block and copy the arguments. Do not allow
1573 the (potential) memcpy call below to interfere with our stack
1574 manipulations. */
1575 do_pending_stack_adjust ();
1576 NO_DEFER_POP;
1577
1578 /* Save the stack with nonlocal if available. */
1579 if (targetm.have_save_stack_nonlocal ())
1580 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1581 else
1582 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1583
1584 /* Allocate a block of memory onto the stack and copy the memory
1585 arguments to the outgoing arguments address. We can pass TRUE
1586 as the 4th argument because we just saved the stack pointer
1587 and will restore it right after the call. */
1588 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1589
1590 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1591 may have already set current_function_calls_alloca to true.
1592 current_function_calls_alloca won't be set if argsize is zero,
1593 so we have to guarantee need_drap is true here. */
1594 if (SUPPORTS_STACK_ALIGNMENT)
1595 crtl->need_drap = true;
1596
1597 dest = virtual_outgoing_args_rtx;
1598 if (!STACK_GROWS_DOWNWARD)
1599 {
1600 if (CONST_INT_P (argsize))
1601 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1602 else
1603 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1604 }
1605 dest = gen_rtx_MEM (BLKmode, dest);
1606 set_mem_align (dest, PARM_BOUNDARY);
1607 src = gen_rtx_MEM (BLKmode, incoming_args);
1608 set_mem_align (src, PARM_BOUNDARY);
1609 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1610
1611 /* Refer to the argument block. */
1612 apply_args_size ();
1613 arguments = gen_rtx_MEM (BLKmode, arguments);
1614 set_mem_align (arguments, PARM_BOUNDARY);
1615
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1619 size += GET_MODE_SIZE (Pmode);
1620
1621 /* Restore each of the registers previously saved. Make USE insns
1622 for each of these registers for use in making the call. */
1623 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1624 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 {
1626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1627 if (size % align != 0)
1628 size = CEIL (size, align) * align;
1629 reg = gen_rtx_REG (mode, regno);
1630 emit_move_insn (reg, adjust_address (arguments, mode, size));
1631 use_reg (&call_fusage, reg);
1632 size += GET_MODE_SIZE (mode);
1633 }
1634
1635 /* Restore the structure value address unless this is passed as an
1636 "invisible" first argument. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 {
1640 rtx value = gen_reg_rtx (Pmode);
1641 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1642 emit_move_insn (struct_value, value);
1643 if (REG_P (struct_value))
1644 use_reg (&call_fusage, struct_value);
1645 size += GET_MODE_SIZE (Pmode);
1646 }
1647
1648 /* All arguments and registers used for the call are set up by now! */
1649 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1650
1651 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1652 and we don't want to load it into a register as an optimization,
1653 because prepare_call_address already did it if it should be done. */
1654 if (GET_CODE (function) != SYMBOL_REF)
1655 function = memory_address (FUNCTION_MODE, function);
1656
1657 /* Generate the actual call instruction and save the return value. */
1658 if (targetm.have_untyped_call ())
1659 {
1660 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1661 emit_call_insn (targetm.gen_untyped_call (mem, result,
1662 result_vector (1, result)));
1663 }
1664 else if (targetm.have_call_value ())
1665 {
1666 rtx valreg = 0;
1667
1668 /* Locate the unique return register. It is not possible to
1669 express a call that sets more than one return register using
1670 call_value; use untyped_call for that. In fact, untyped_call
1671 only needs to save the return registers in the given block. */
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1674 {
1675 gcc_assert (!valreg); /* have_untyped_call required. */
1676
1677 valreg = gen_rtx_REG (mode, regno);
1678 }
1679
1680 emit_insn (targetm.gen_call_value (valreg,
1681 gen_rtx_MEM (FUNCTION_MODE, function),
1682 const0_rtx, NULL_RTX, const0_rtx));
1683
1684 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1685 }
1686 else
1687 gcc_unreachable ();
1688
1689 /* Find the CALL insn we just emitted, and attach the register usage
1690 information. */
1691 call_insn = last_call_insn ();
1692 add_function_usage_to (call_insn, call_fusage);
1693
1694 /* Restore the stack. */
1695 if (targetm.have_save_stack_nonlocal ())
1696 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1697 else
1698 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1699 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1700
1701 OK_DEFER_POP;
1702
1703 /* Return the address of the result block. */
1704 result = copy_addr_to_reg (XEXP (result, 0));
1705 return convert_memory_address (ptr_mode, result);
1706 }
1707
1708 /* Perform an untyped return. */
1709
1710 static void
1711 expand_builtin_return (rtx result)
1712 {
1713 int size, align, regno;
1714 machine_mode mode;
1715 rtx reg;
1716 rtx_insn *call_fusage = 0;
1717
1718 result = convert_memory_address (Pmode, result);
1719
1720 apply_result_size ();
1721 result = gen_rtx_MEM (BLKmode, result);
1722
1723 if (targetm.have_untyped_return ())
1724 {
1725 rtx vector = result_vector (0, result);
1726 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1727 emit_barrier ();
1728 return;
1729 }
1730
1731 /* Restore the return value and note that each value is used. */
1732 size = 0;
1733 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1734 if ((mode = apply_result_mode[regno]) != VOIDmode)
1735 {
1736 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1737 if (size % align != 0)
1738 size = CEIL (size, align) * align;
1739 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1740 emit_move_insn (reg, adjust_address (result, mode, size));
1741
1742 push_to_sequence (call_fusage);
1743 emit_use (reg);
1744 call_fusage = get_insns ();
1745 end_sequence ();
1746 size += GET_MODE_SIZE (mode);
1747 }
1748
1749 /* Put the USE insns before the return. */
1750 emit_insn (call_fusage);
1751
1752 /* Return whatever values was restored by jumping directly to the end
1753 of the function. */
1754 expand_naked_return ();
1755 }
1756
1757 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1758
1759 static enum type_class
1760 type_to_class (tree type)
1761 {
1762 switch (TREE_CODE (type))
1763 {
1764 case VOID_TYPE: return void_type_class;
1765 case INTEGER_TYPE: return integer_type_class;
1766 case ENUMERAL_TYPE: return enumeral_type_class;
1767 case BOOLEAN_TYPE: return boolean_type_class;
1768 case POINTER_TYPE: return pointer_type_class;
1769 case REFERENCE_TYPE: return reference_type_class;
1770 case OFFSET_TYPE: return offset_type_class;
1771 case REAL_TYPE: return real_type_class;
1772 case COMPLEX_TYPE: return complex_type_class;
1773 case FUNCTION_TYPE: return function_type_class;
1774 case METHOD_TYPE: return method_type_class;
1775 case RECORD_TYPE: return record_type_class;
1776 case UNION_TYPE:
1777 case QUAL_UNION_TYPE: return union_type_class;
1778 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1779 ? string_type_class : array_type_class);
1780 case LANG_TYPE: return lang_type_class;
1781 default: return no_type_class;
1782 }
1783 }
1784
1785 /* Expand a call EXP to __builtin_classify_type. */
1786
1787 static rtx
1788 expand_builtin_classify_type (tree exp)
1789 {
1790 if (call_expr_nargs (exp))
1791 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1792 return GEN_INT (no_type_class);
1793 }
1794
1795 /* This helper macro, meant to be used in mathfn_built_in below,
1796 determines which among a set of three builtin math functions is
1797 appropriate for a given type mode. The `F' and `L' cases are
1798 automatically generated from the `double' case. */
1799 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1800 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1801 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1802 fcodel = BUILT_IN_MATHFN##L ; break;
1803 /* Similar to above, but appends _R after any F/L suffix. */
1804 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1805 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1806 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1807 fcodel = BUILT_IN_MATHFN##L_R ; break;
1808
1809 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1810 if available. If IMPLICIT is true use the implicit builtin declaration,
1811 otherwise use the explicit declaration. If we can't do the conversion,
1812 return zero. */
1813
1814 static tree
1815 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1816 {
1817 enum built_in_function fcode, fcodef, fcodel, fcode2;
1818
1819 switch (fn)
1820 {
1821 CASE_MATHFN (BUILT_IN_ACOS)
1822 CASE_MATHFN (BUILT_IN_ACOSH)
1823 CASE_MATHFN (BUILT_IN_ASIN)
1824 CASE_MATHFN (BUILT_IN_ASINH)
1825 CASE_MATHFN (BUILT_IN_ATAN)
1826 CASE_MATHFN (BUILT_IN_ATAN2)
1827 CASE_MATHFN (BUILT_IN_ATANH)
1828 CASE_MATHFN (BUILT_IN_CBRT)
1829 CASE_MATHFN (BUILT_IN_CEIL)
1830 CASE_MATHFN (BUILT_IN_CEXPI)
1831 CASE_MATHFN (BUILT_IN_COPYSIGN)
1832 CASE_MATHFN (BUILT_IN_COS)
1833 CASE_MATHFN (BUILT_IN_COSH)
1834 CASE_MATHFN (BUILT_IN_DREM)
1835 CASE_MATHFN (BUILT_IN_ERF)
1836 CASE_MATHFN (BUILT_IN_ERFC)
1837 CASE_MATHFN (BUILT_IN_EXP)
1838 CASE_MATHFN (BUILT_IN_EXP10)
1839 CASE_MATHFN (BUILT_IN_EXP2)
1840 CASE_MATHFN (BUILT_IN_EXPM1)
1841 CASE_MATHFN (BUILT_IN_FABS)
1842 CASE_MATHFN (BUILT_IN_FDIM)
1843 CASE_MATHFN (BUILT_IN_FLOOR)
1844 CASE_MATHFN (BUILT_IN_FMA)
1845 CASE_MATHFN (BUILT_IN_FMAX)
1846 CASE_MATHFN (BUILT_IN_FMIN)
1847 CASE_MATHFN (BUILT_IN_FMOD)
1848 CASE_MATHFN (BUILT_IN_FREXP)
1849 CASE_MATHFN (BUILT_IN_GAMMA)
1850 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1851 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1852 CASE_MATHFN (BUILT_IN_HYPOT)
1853 CASE_MATHFN (BUILT_IN_ILOGB)
1854 CASE_MATHFN (BUILT_IN_ICEIL)
1855 CASE_MATHFN (BUILT_IN_IFLOOR)
1856 CASE_MATHFN (BUILT_IN_INF)
1857 CASE_MATHFN (BUILT_IN_IRINT)
1858 CASE_MATHFN (BUILT_IN_IROUND)
1859 CASE_MATHFN (BUILT_IN_ISINF)
1860 CASE_MATHFN (BUILT_IN_J0)
1861 CASE_MATHFN (BUILT_IN_J1)
1862 CASE_MATHFN (BUILT_IN_JN)
1863 CASE_MATHFN (BUILT_IN_LCEIL)
1864 CASE_MATHFN (BUILT_IN_LDEXP)
1865 CASE_MATHFN (BUILT_IN_LFLOOR)
1866 CASE_MATHFN (BUILT_IN_LGAMMA)
1867 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1868 CASE_MATHFN (BUILT_IN_LLCEIL)
1869 CASE_MATHFN (BUILT_IN_LLFLOOR)
1870 CASE_MATHFN (BUILT_IN_LLRINT)
1871 CASE_MATHFN (BUILT_IN_LLROUND)
1872 CASE_MATHFN (BUILT_IN_LOG)
1873 CASE_MATHFN (BUILT_IN_LOG10)
1874 CASE_MATHFN (BUILT_IN_LOG1P)
1875 CASE_MATHFN (BUILT_IN_LOG2)
1876 CASE_MATHFN (BUILT_IN_LOGB)
1877 CASE_MATHFN (BUILT_IN_LRINT)
1878 CASE_MATHFN (BUILT_IN_LROUND)
1879 CASE_MATHFN (BUILT_IN_MODF)
1880 CASE_MATHFN (BUILT_IN_NAN)
1881 CASE_MATHFN (BUILT_IN_NANS)
1882 CASE_MATHFN (BUILT_IN_NEARBYINT)
1883 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1884 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1885 CASE_MATHFN (BUILT_IN_POW)
1886 CASE_MATHFN (BUILT_IN_POWI)
1887 CASE_MATHFN (BUILT_IN_POW10)
1888 CASE_MATHFN (BUILT_IN_REMAINDER)
1889 CASE_MATHFN (BUILT_IN_REMQUO)
1890 CASE_MATHFN (BUILT_IN_RINT)
1891 CASE_MATHFN (BUILT_IN_ROUND)
1892 CASE_MATHFN (BUILT_IN_SCALB)
1893 CASE_MATHFN (BUILT_IN_SCALBLN)
1894 CASE_MATHFN (BUILT_IN_SCALBN)
1895 CASE_MATHFN (BUILT_IN_SIGNBIT)
1896 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1897 CASE_MATHFN (BUILT_IN_SIN)
1898 CASE_MATHFN (BUILT_IN_SINCOS)
1899 CASE_MATHFN (BUILT_IN_SINH)
1900 CASE_MATHFN (BUILT_IN_SQRT)
1901 CASE_MATHFN (BUILT_IN_TAN)
1902 CASE_MATHFN (BUILT_IN_TANH)
1903 CASE_MATHFN (BUILT_IN_TGAMMA)
1904 CASE_MATHFN (BUILT_IN_TRUNC)
1905 CASE_MATHFN (BUILT_IN_Y0)
1906 CASE_MATHFN (BUILT_IN_Y1)
1907 CASE_MATHFN (BUILT_IN_YN)
1908
1909 default:
1910 return NULL_TREE;
1911 }
1912
1913 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1914 fcode2 = fcode;
1915 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1916 fcode2 = fcodef;
1917 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1918 fcode2 = fcodel;
1919 else
1920 return NULL_TREE;
1921
1922 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1923 return NULL_TREE;
1924
1925 return builtin_decl_explicit (fcode2);
1926 }
1927
1928 /* Like mathfn_built_in_1(), but always use the implicit array. */
1929
1930 tree
1931 mathfn_built_in (tree type, enum built_in_function fn)
1932 {
1933 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1934 }
1935
1936 /* If errno must be maintained, expand the RTL to check if the result,
1937 TARGET, of a built-in function call, EXP, is NaN, and if so set
1938 errno to EDOM. */
1939
1940 static void
1941 expand_errno_check (tree exp, rtx target)
1942 {
1943 rtx_code_label *lab = gen_label_rtx ();
1944
1945 /* Test the result; if it is NaN, set errno=EDOM because
1946 the argument was not in the domain. */
1947 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1948 NULL_RTX, NULL, lab,
1949 /* The jump is very likely. */
1950 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1951
1952 #ifdef TARGET_EDOM
1953 /* If this built-in doesn't throw an exception, set errno directly. */
1954 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1955 {
1956 #ifdef GEN_ERRNO_RTX
1957 rtx errno_rtx = GEN_ERRNO_RTX;
1958 #else
1959 rtx errno_rtx
1960 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1961 #endif
1962 emit_move_insn (errno_rtx,
1963 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1964 emit_label (lab);
1965 return;
1966 }
1967 #endif
1968
1969 /* Make sure the library call isn't expanded as a tail call. */
1970 CALL_EXPR_TAILCALL (exp) = 0;
1971
1972 /* We can't set errno=EDOM directly; let the library call do it.
1973 Pop the arguments right away in case the call gets deleted. */
1974 NO_DEFER_POP;
1975 expand_call (exp, target, 0);
1976 OK_DEFER_POP;
1977 emit_label (lab);
1978 }
1979
1980 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1981 Return NULL_RTX if a normal call should be emitted rather than expanding
1982 the function in-line. EXP is the expression that is a call to the builtin
1983 function; if convenient, the result should be placed in TARGET.
1984 SUBTARGET may be used as the target for computing one of EXP's operands. */
1985
1986 static rtx
1987 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1988 {
1989 optab builtin_optab;
1990 rtx op0;
1991 rtx_insn *insns;
1992 tree fndecl = get_callee_fndecl (exp);
1993 machine_mode mode;
1994 bool errno_set = false;
1995 bool try_widening = false;
1996 tree arg;
1997
1998 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1999 return NULL_RTX;
2000
2001 arg = CALL_EXPR_ARG (exp, 0);
2002
2003 switch (DECL_FUNCTION_CODE (fndecl))
2004 {
2005 CASE_FLT_FN (BUILT_IN_SQRT):
2006 errno_set = ! tree_expr_nonnegative_p (arg);
2007 try_widening = true;
2008 builtin_optab = sqrt_optab;
2009 break;
2010 CASE_FLT_FN (BUILT_IN_EXP):
2011 errno_set = true; builtin_optab = exp_optab; break;
2012 CASE_FLT_FN (BUILT_IN_EXP10):
2013 CASE_FLT_FN (BUILT_IN_POW10):
2014 errno_set = true; builtin_optab = exp10_optab; break;
2015 CASE_FLT_FN (BUILT_IN_EXP2):
2016 errno_set = true; builtin_optab = exp2_optab; break;
2017 CASE_FLT_FN (BUILT_IN_EXPM1):
2018 errno_set = true; builtin_optab = expm1_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOGB):
2020 errno_set = true; builtin_optab = logb_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG):
2022 errno_set = true; builtin_optab = log_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG10):
2024 errno_set = true; builtin_optab = log10_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOG2):
2026 errno_set = true; builtin_optab = log2_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG1P):
2028 errno_set = true; builtin_optab = log1p_optab; break;
2029 CASE_FLT_FN (BUILT_IN_ASIN):
2030 builtin_optab = asin_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ACOS):
2032 builtin_optab = acos_optab; break;
2033 CASE_FLT_FN (BUILT_IN_TAN):
2034 builtin_optab = tan_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ATAN):
2036 builtin_optab = atan_optab; break;
2037 CASE_FLT_FN (BUILT_IN_FLOOR):
2038 builtin_optab = floor_optab; break;
2039 CASE_FLT_FN (BUILT_IN_CEIL):
2040 builtin_optab = ceil_optab; break;
2041 CASE_FLT_FN (BUILT_IN_TRUNC):
2042 builtin_optab = btrunc_optab; break;
2043 CASE_FLT_FN (BUILT_IN_ROUND):
2044 builtin_optab = round_optab; break;
2045 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2046 builtin_optab = nearbyint_optab;
2047 if (flag_trapping_math)
2048 break;
2049 /* Else fallthrough and expand as rint. */
2050 CASE_FLT_FN (BUILT_IN_RINT):
2051 builtin_optab = rint_optab; break;
2052 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2053 builtin_optab = significand_optab; break;
2054 default:
2055 gcc_unreachable ();
2056 }
2057
2058 /* Make a suitable register to place result in. */
2059 mode = TYPE_MODE (TREE_TYPE (exp));
2060
2061 if (! flag_errno_math || ! HONOR_NANS (mode))
2062 errno_set = false;
2063
2064 /* Before working hard, check whether the instruction is available, but try
2065 to widen the mode for specific operations. */
2066 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2067 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2068 && (!errno_set || !optimize_insn_for_size_p ()))
2069 {
2070 rtx result = gen_reg_rtx (mode);
2071
2072 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2073 need to expand the argument again. This way, we will not perform
2074 side-effects more the once. */
2075 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2076
2077 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2078
2079 start_sequence ();
2080
2081 /* Compute into RESULT.
2082 Set RESULT to wherever the result comes back. */
2083 result = expand_unop (mode, builtin_optab, op0, result, 0);
2084
2085 if (result != 0)
2086 {
2087 if (errno_set)
2088 expand_errno_check (exp, result);
2089
2090 /* Output the entire sequence. */
2091 insns = get_insns ();
2092 end_sequence ();
2093 emit_insn (insns);
2094 return result;
2095 }
2096
2097 /* If we were unable to expand via the builtin, stop the sequence
2098 (without outputting the insns) and call to the library function
2099 with the stabilized argument list. */
2100 end_sequence ();
2101 }
2102
2103 return expand_call (exp, target, target == const0_rtx);
2104 }
2105
2106 /* Expand a call to the builtin binary math functions (pow and atan2).
2107 Return NULL_RTX if a normal call should be emitted rather than expanding the
2108 function in-line. EXP is the expression that is a call to the builtin
2109 function; if convenient, the result should be placed in TARGET.
2110 SUBTARGET may be used as the target for computing one of EXP's
2111 operands. */
2112
2113 static rtx
2114 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2115 {
2116 optab builtin_optab;
2117 rtx op0, op1, result;
2118 rtx_insn *insns;
2119 int op1_type = REAL_TYPE;
2120 tree fndecl = get_callee_fndecl (exp);
2121 tree arg0, arg1;
2122 machine_mode mode;
2123 bool errno_set = true;
2124
2125 switch (DECL_FUNCTION_CODE (fndecl))
2126 {
2127 CASE_FLT_FN (BUILT_IN_SCALBN):
2128 CASE_FLT_FN (BUILT_IN_SCALBLN):
2129 CASE_FLT_FN (BUILT_IN_LDEXP):
2130 op1_type = INTEGER_TYPE;
2131 default:
2132 break;
2133 }
2134
2135 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2136 return NULL_RTX;
2137
2138 arg0 = CALL_EXPR_ARG (exp, 0);
2139 arg1 = CALL_EXPR_ARG (exp, 1);
2140
2141 switch (DECL_FUNCTION_CODE (fndecl))
2142 {
2143 CASE_FLT_FN (BUILT_IN_POW):
2144 builtin_optab = pow_optab; break;
2145 CASE_FLT_FN (BUILT_IN_ATAN2):
2146 builtin_optab = atan2_optab; break;
2147 CASE_FLT_FN (BUILT_IN_SCALB):
2148 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2149 return 0;
2150 builtin_optab = scalb_optab; break;
2151 CASE_FLT_FN (BUILT_IN_SCALBN):
2152 CASE_FLT_FN (BUILT_IN_SCALBLN):
2153 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2154 return 0;
2155 /* Fall through... */
2156 CASE_FLT_FN (BUILT_IN_LDEXP):
2157 builtin_optab = ldexp_optab; break;
2158 CASE_FLT_FN (BUILT_IN_FMOD):
2159 builtin_optab = fmod_optab; break;
2160 CASE_FLT_FN (BUILT_IN_REMAINDER):
2161 CASE_FLT_FN (BUILT_IN_DREM):
2162 builtin_optab = remainder_optab; break;
2163 default:
2164 gcc_unreachable ();
2165 }
2166
2167 /* Make a suitable register to place result in. */
2168 mode = TYPE_MODE (TREE_TYPE (exp));
2169
2170 /* Before working hard, check whether the instruction is available. */
2171 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2172 return NULL_RTX;
2173
2174 result = gen_reg_rtx (mode);
2175
2176 if (! flag_errno_math || ! HONOR_NANS (mode))
2177 errno_set = false;
2178
2179 if (errno_set && optimize_insn_for_size_p ())
2180 return 0;
2181
2182 /* Always stabilize the argument list. */
2183 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2184 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2185
2186 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2187 op1 = expand_normal (arg1);
2188
2189 start_sequence ();
2190
2191 /* Compute into RESULT.
2192 Set RESULT to wherever the result comes back. */
2193 result = expand_binop (mode, builtin_optab, op0, op1,
2194 result, 0, OPTAB_DIRECT);
2195
2196 /* If we were unable to expand via the builtin, stop the sequence
2197 (without outputting the insns) and call to the library function
2198 with the stabilized argument list. */
2199 if (result == 0)
2200 {
2201 end_sequence ();
2202 return expand_call (exp, target, target == const0_rtx);
2203 }
2204
2205 if (errno_set)
2206 expand_errno_check (exp, result);
2207
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2210 end_sequence ();
2211 emit_insn (insns);
2212
2213 return result;
2214 }
2215
2216 /* Expand a call to the builtin trinary math functions (fma).
2217 Return NULL_RTX if a normal call should be emitted rather than expanding the
2218 function in-line. EXP is the expression that is a call to the builtin
2219 function; if convenient, the result should be placed in TARGET.
2220 SUBTARGET may be used as the target for computing one of EXP's
2221 operands. */
2222
2223 static rtx
2224 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2225 {
2226 optab builtin_optab;
2227 rtx op0, op1, op2, result;
2228 rtx_insn *insns;
2229 tree fndecl = get_callee_fndecl (exp);
2230 tree arg0, arg1, arg2;
2231 machine_mode mode;
2232
2233 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2234 return NULL_RTX;
2235
2236 arg0 = CALL_EXPR_ARG (exp, 0);
2237 arg1 = CALL_EXPR_ARG (exp, 1);
2238 arg2 = CALL_EXPR_ARG (exp, 2);
2239
2240 switch (DECL_FUNCTION_CODE (fndecl))
2241 {
2242 CASE_FLT_FN (BUILT_IN_FMA):
2243 builtin_optab = fma_optab; break;
2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 /* Make a suitable register to place result in. */
2249 mode = TYPE_MODE (TREE_TYPE (exp));
2250
2251 /* Before working hard, check whether the instruction is available. */
2252 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2253 return NULL_RTX;
2254
2255 result = gen_reg_rtx (mode);
2256
2257 /* Always stabilize the argument list. */
2258 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2259 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2260 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2261
2262 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2263 op1 = expand_normal (arg1);
2264 op2 = expand_normal (arg2);
2265
2266 start_sequence ();
2267
2268 /* Compute into RESULT.
2269 Set RESULT to wherever the result comes back. */
2270 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2271 result, 0);
2272
2273 /* If we were unable to expand via the builtin, stop the sequence
2274 (without outputting the insns) and call to the library function
2275 with the stabilized argument list. */
2276 if (result == 0)
2277 {
2278 end_sequence ();
2279 return expand_call (exp, target, target == const0_rtx);
2280 }
2281
2282 /* Output the entire sequence. */
2283 insns = get_insns ();
2284 end_sequence ();
2285 emit_insn (insns);
2286
2287 return result;
2288 }
2289
2290 /* Expand a call to the builtin sin and cos math functions.
2291 Return NULL_RTX if a normal call should be emitted rather than expanding the
2292 function in-line. EXP is the expression that is a call to the builtin
2293 function; if convenient, the result should be placed in TARGET.
2294 SUBTARGET may be used as the target for computing one of EXP's
2295 operands. */
2296
2297 static rtx
2298 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2299 {
2300 optab builtin_optab;
2301 rtx op0;
2302 rtx_insn *insns;
2303 tree fndecl = get_callee_fndecl (exp);
2304 machine_mode mode;
2305 tree arg;
2306
2307 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2308 return NULL_RTX;
2309
2310 arg = CALL_EXPR_ARG (exp, 0);
2311
2312 switch (DECL_FUNCTION_CODE (fndecl))
2313 {
2314 CASE_FLT_FN (BUILT_IN_SIN):
2315 CASE_FLT_FN (BUILT_IN_COS):
2316 builtin_optab = sincos_optab; break;
2317 default:
2318 gcc_unreachable ();
2319 }
2320
2321 /* Make a suitable register to place result in. */
2322 mode = TYPE_MODE (TREE_TYPE (exp));
2323
2324 /* Check if sincos insn is available, otherwise fallback
2325 to sin or cos insn. */
2326 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2327 switch (DECL_FUNCTION_CODE (fndecl))
2328 {
2329 CASE_FLT_FN (BUILT_IN_SIN):
2330 builtin_optab = sin_optab; break;
2331 CASE_FLT_FN (BUILT_IN_COS):
2332 builtin_optab = cos_optab; break;
2333 default:
2334 gcc_unreachable ();
2335 }
2336
2337 /* Before working hard, check whether the instruction is available. */
2338 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2339 {
2340 rtx result = gen_reg_rtx (mode);
2341
2342 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2343 need to expand the argument again. This way, we will not perform
2344 side-effects more the once. */
2345 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2346
2347 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2348
2349 start_sequence ();
2350
2351 /* Compute into RESULT.
2352 Set RESULT to wherever the result comes back. */
2353 if (builtin_optab == sincos_optab)
2354 {
2355 int ok;
2356
2357 switch (DECL_FUNCTION_CODE (fndecl))
2358 {
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2361 break;
2362 CASE_FLT_FN (BUILT_IN_COS):
2363 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2364 break;
2365 default:
2366 gcc_unreachable ();
2367 }
2368 gcc_assert (ok);
2369 }
2370 else
2371 result = expand_unop (mode, builtin_optab, op0, result, 0);
2372
2373 if (result != 0)
2374 {
2375 /* Output the entire sequence. */
2376 insns = get_insns ();
2377 end_sequence ();
2378 emit_insn (insns);
2379 return result;
2380 }
2381
2382 /* If we were unable to expand via the builtin, stop the sequence
2383 (without outputting the insns) and call to the library function
2384 with the stabilized argument list. */
2385 end_sequence ();
2386 }
2387
2388 return expand_call (exp, target, target == const0_rtx);
2389 }
2390
2391 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2392 return an RTL instruction code that implements the functionality.
2393 If that isn't possible or available return CODE_FOR_nothing. */
2394
2395 static enum insn_code
2396 interclass_mathfn_icode (tree arg, tree fndecl)
2397 {
2398 bool errno_set = false;
2399 optab builtin_optab = unknown_optab;
2400 machine_mode mode;
2401
2402 switch (DECL_FUNCTION_CODE (fndecl))
2403 {
2404 CASE_FLT_FN (BUILT_IN_ILOGB):
2405 errno_set = true; builtin_optab = ilogb_optab; break;
2406 CASE_FLT_FN (BUILT_IN_ISINF):
2407 builtin_optab = isinf_optab; break;
2408 case BUILT_IN_ISNORMAL:
2409 case BUILT_IN_ISFINITE:
2410 CASE_FLT_FN (BUILT_IN_FINITE):
2411 case BUILT_IN_FINITED32:
2412 case BUILT_IN_FINITED64:
2413 case BUILT_IN_FINITED128:
2414 case BUILT_IN_ISINFD32:
2415 case BUILT_IN_ISINFD64:
2416 case BUILT_IN_ISINFD128:
2417 /* These builtins have no optabs (yet). */
2418 break;
2419 default:
2420 gcc_unreachable ();
2421 }
2422
2423 /* There's no easy way to detect the case we need to set EDOM. */
2424 if (flag_errno_math && errno_set)
2425 return CODE_FOR_nothing;
2426
2427 /* Optab mode depends on the mode of the input argument. */
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2429
2430 if (builtin_optab)
2431 return optab_handler (builtin_optab, mode);
2432 return CODE_FOR_nothing;
2433 }
2434
2435 /* Expand a call to one of the builtin math functions that operate on
2436 floating point argument and output an integer result (ilogb, isinf,
2437 isnan, etc).
2438 Return 0 if a normal call should be emitted rather than expanding the
2439 function in-line. EXP is the expression that is a call to the builtin
2440 function; if convenient, the result should be placed in TARGET. */
2441
2442 static rtx
2443 expand_builtin_interclass_mathfn (tree exp, rtx target)
2444 {
2445 enum insn_code icode = CODE_FOR_nothing;
2446 rtx op0;
2447 tree fndecl = get_callee_fndecl (exp);
2448 machine_mode mode;
2449 tree arg;
2450
2451 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2452 return NULL_RTX;
2453
2454 arg = CALL_EXPR_ARG (exp, 0);
2455 icode = interclass_mathfn_icode (arg, fndecl);
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
2458 if (icode != CODE_FOR_nothing)
2459 {
2460 struct expand_operand ops[1];
2461 rtx_insn *last = get_last_insn ();
2462 tree orig_arg = arg;
2463
2464 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2465 need to expand the argument again. This way, we will not perform
2466 side-effects more the once. */
2467 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2468
2469 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2470
2471 if (mode != GET_MODE (op0))
2472 op0 = convert_to_mode (mode, op0, 0);
2473
2474 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2475 if (maybe_legitimize_operands (icode, 0, 1, ops)
2476 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2477 return ops[0].value;
2478
2479 delete_insns_since (last);
2480 CALL_EXPR_ARG (exp, 0) = orig_arg;
2481 }
2482
2483 return NULL_RTX;
2484 }
2485
2486 /* Expand a call to the builtin sincos math function.
2487 Return NULL_RTX if a normal call should be emitted rather than expanding the
2488 function in-line. EXP is the expression that is a call to the builtin
2489 function. */
2490
2491 static rtx
2492 expand_builtin_sincos (tree exp)
2493 {
2494 rtx op0, op1, op2, target1, target2;
2495 machine_mode mode;
2496 tree arg, sinp, cosp;
2497 int result;
2498 location_t loc = EXPR_LOCATION (exp);
2499 tree alias_type, alias_off;
2500
2501 if (!validate_arglist (exp, REAL_TYPE,
2502 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2503 return NULL_RTX;
2504
2505 arg = CALL_EXPR_ARG (exp, 0);
2506 sinp = CALL_EXPR_ARG (exp, 1);
2507 cosp = CALL_EXPR_ARG (exp, 2);
2508
2509 /* Make a suitable register to place result in. */
2510 mode = TYPE_MODE (TREE_TYPE (arg));
2511
2512 /* Check if sincos insn is available, otherwise emit the call. */
2513 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2514 return NULL_RTX;
2515
2516 target1 = gen_reg_rtx (mode);
2517 target2 = gen_reg_rtx (mode);
2518
2519 op0 = expand_normal (arg);
2520 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2521 alias_off = build_int_cst (alias_type, 0);
2522 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2523 sinp, alias_off));
2524 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2525 cosp, alias_off));
2526
2527 /* Compute into target1 and target2.
2528 Set TARGET to wherever the result comes back. */
2529 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2530 gcc_assert (result);
2531
2532 /* Move target1 and target2 to the memory locations indicated
2533 by op1 and op2. */
2534 emit_move_insn (op1, target1);
2535 emit_move_insn (op2, target2);
2536
2537 return const0_rtx;
2538 }
2539
2540 /* Expand a call to the internal cexpi builtin to the sincos math function.
2541 EXP is the expression that is a call to the builtin function; if convenient,
2542 the result should be placed in TARGET. */
2543
2544 static rtx
2545 expand_builtin_cexpi (tree exp, rtx target)
2546 {
2547 tree fndecl = get_callee_fndecl (exp);
2548 tree arg, type;
2549 machine_mode mode;
2550 rtx op0, op1, op2;
2551 location_t loc = EXPR_LOCATION (exp);
2552
2553 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2554 return NULL_RTX;
2555
2556 arg = CALL_EXPR_ARG (exp, 0);
2557 type = TREE_TYPE (arg);
2558 mode = TYPE_MODE (TREE_TYPE (arg));
2559
2560 /* Try expanding via a sincos optab, fall back to emitting a libcall
2561 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2562 is only generated from sincos, cexp or if we have either of them. */
2563 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2564 {
2565 op1 = gen_reg_rtx (mode);
2566 op2 = gen_reg_rtx (mode);
2567
2568 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2569
2570 /* Compute into op1 and op2. */
2571 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2572 }
2573 else if (targetm.libc_has_function (function_sincos))
2574 {
2575 tree call, fn = NULL_TREE;
2576 tree top1, top2;
2577 rtx op1a, op2a;
2578
2579 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2581 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2582 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2584 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2585 else
2586 gcc_unreachable ();
2587
2588 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2589 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2590 op1a = copy_addr_to_reg (XEXP (op1, 0));
2591 op2a = copy_addr_to_reg (XEXP (op2, 0));
2592 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2593 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2594
2595 /* Make sure not to fold the sincos call again. */
2596 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2597 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2598 call, 3, arg, top1, top2));
2599 }
2600 else
2601 {
2602 tree call, fn = NULL_TREE, narg;
2603 tree ctype = build_complex_type (type);
2604
2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2608 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2611 else
2612 gcc_unreachable ();
2613
2614 /* If we don't have a decl for cexp create one. This is the
2615 friendliest fallback if the user calls __builtin_cexpi
2616 without full target C99 function support. */
2617 if (fn == NULL_TREE)
2618 {
2619 tree fntype;
2620 const char *name = NULL;
2621
2622 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2623 name = "cexpf";
2624 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2625 name = "cexp";
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2627 name = "cexpl";
2628
2629 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2630 fn = build_fn_decl (name, fntype);
2631 }
2632
2633 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2634 build_real (type, dconst0), arg);
2635
2636 /* Make sure not to fold the cexp call again. */
2637 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2638 return expand_expr (build_call_nary (ctype, call, 1, narg),
2639 target, VOIDmode, EXPAND_NORMAL);
2640 }
2641
2642 /* Now build the proper return type. */
2643 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2644 make_tree (TREE_TYPE (arg), op2),
2645 make_tree (TREE_TYPE (arg), op1)),
2646 target, VOIDmode, EXPAND_NORMAL);
2647 }
2648
2649 /* Conveniently construct a function call expression. FNDECL names the
2650 function to be called, N is the number of arguments, and the "..."
2651 parameters are the argument expressions. Unlike build_call_exr
2652 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2653
2654 static tree
2655 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2656 {
2657 va_list ap;
2658 tree fntype = TREE_TYPE (fndecl);
2659 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2660
2661 va_start (ap, n);
2662 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2663 va_end (ap);
2664 SET_EXPR_LOCATION (fn, loc);
2665 return fn;
2666 }
2667
2668 /* Expand a call to one of the builtin rounding functions gcc defines
2669 as an extension (lfloor and lceil). As these are gcc extensions we
2670 do not need to worry about setting errno to EDOM.
2671 If expanding via optab fails, lower expression to (int)(floor(x)).
2672 EXP is the expression that is a call to the builtin function;
2673 if convenient, the result should be placed in TARGET. */
2674
2675 static rtx
2676 expand_builtin_int_roundingfn (tree exp, rtx target)
2677 {
2678 convert_optab builtin_optab;
2679 rtx op0, tmp;
2680 rtx_insn *insns;
2681 tree fndecl = get_callee_fndecl (exp);
2682 enum built_in_function fallback_fn;
2683 tree fallback_fndecl;
2684 machine_mode mode;
2685 tree arg;
2686
2687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2688 gcc_unreachable ();
2689
2690 arg = CALL_EXPR_ARG (exp, 0);
2691
2692 switch (DECL_FUNCTION_CODE (fndecl))
2693 {
2694 CASE_FLT_FN (BUILT_IN_ICEIL):
2695 CASE_FLT_FN (BUILT_IN_LCEIL):
2696 CASE_FLT_FN (BUILT_IN_LLCEIL):
2697 builtin_optab = lceil_optab;
2698 fallback_fn = BUILT_IN_CEIL;
2699 break;
2700
2701 CASE_FLT_FN (BUILT_IN_IFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2704 builtin_optab = lfloor_optab;
2705 fallback_fn = BUILT_IN_FLOOR;
2706 break;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711
2712 /* Make a suitable register to place result in. */
2713 mode = TYPE_MODE (TREE_TYPE (exp));
2714
2715 target = gen_reg_rtx (mode);
2716
2717 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2718 need to expand the argument again. This way, we will not perform
2719 side-effects more the once. */
2720 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721
2722 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723
2724 start_sequence ();
2725
2726 /* Compute into TARGET. */
2727 if (expand_sfix_optab (target, op0, builtin_optab))
2728 {
2729 /* Output the entire sequence. */
2730 insns = get_insns ();
2731 end_sequence ();
2732 emit_insn (insns);
2733 return target;
2734 }
2735
2736 /* If we were unable to expand via the builtin, stop the sequence
2737 (without outputting the insns). */
2738 end_sequence ();
2739
2740 /* Fall back to floating point rounding optab. */
2741 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742
2743 /* For non-C99 targets we may end up without a fallback fndecl here
2744 if the user called __builtin_lfloor directly. In this case emit
2745 a call to the floor/ceil variants nevertheless. This should result
2746 in the best user experience for not full C99 targets. */
2747 if (fallback_fndecl == NULL_TREE)
2748 {
2749 tree fntype;
2750 const char *name = NULL;
2751
2752 switch (DECL_FUNCTION_CODE (fndecl))
2753 {
2754 case BUILT_IN_ICEIL:
2755 case BUILT_IN_LCEIL:
2756 case BUILT_IN_LLCEIL:
2757 name = "ceil";
2758 break;
2759 case BUILT_IN_ICEILF:
2760 case BUILT_IN_LCEILF:
2761 case BUILT_IN_LLCEILF:
2762 name = "ceilf";
2763 break;
2764 case BUILT_IN_ICEILL:
2765 case BUILT_IN_LCEILL:
2766 case BUILT_IN_LLCEILL:
2767 name = "ceill";
2768 break;
2769 case BUILT_IN_IFLOOR:
2770 case BUILT_IN_LFLOOR:
2771 case BUILT_IN_LLFLOOR:
2772 name = "floor";
2773 break;
2774 case BUILT_IN_IFLOORF:
2775 case BUILT_IN_LFLOORF:
2776 case BUILT_IN_LLFLOORF:
2777 name = "floorf";
2778 break;
2779 case BUILT_IN_IFLOORL:
2780 case BUILT_IN_LFLOORL:
2781 case BUILT_IN_LLFLOORL:
2782 name = "floorl";
2783 break;
2784 default:
2785 gcc_unreachable ();
2786 }
2787
2788 fntype = build_function_type_list (TREE_TYPE (arg),
2789 TREE_TYPE (arg), NULL_TREE);
2790 fallback_fndecl = build_fn_decl (name, fntype);
2791 }
2792
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794
2795 tmp = expand_normal (exp);
2796 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797
2798 /* Truncate the result of floating point optab to integer
2799 via expand_fix (). */
2800 target = gen_reg_rtx (mode);
2801 expand_fix (target, tmp, 0);
2802
2803 return target;
2804 }
2805
2806 /* Expand a call to one of the builtin math functions doing integer
2807 conversion (lrint).
2808 Return 0 if a normal call should be emitted rather than expanding the
2809 function in-line. EXP is the expression that is a call to the builtin
2810 function; if convenient, the result should be placed in TARGET. */
2811
2812 static rtx
2813 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 {
2815 convert_optab builtin_optab;
2816 rtx op0;
2817 rtx_insn *insns;
2818 tree fndecl = get_callee_fndecl (exp);
2819 tree arg;
2820 machine_mode mode;
2821 enum built_in_function fallback_fn = BUILT_IN_NONE;
2822
2823 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2824 gcc_unreachable ();
2825
2826 arg = CALL_EXPR_ARG (exp, 0);
2827
2828 switch (DECL_FUNCTION_CODE (fndecl))
2829 {
2830 CASE_FLT_FN (BUILT_IN_IRINT):
2831 fallback_fn = BUILT_IN_LRINT;
2832 /* FALLTHRU */
2833 CASE_FLT_FN (BUILT_IN_LRINT):
2834 CASE_FLT_FN (BUILT_IN_LLRINT):
2835 builtin_optab = lrint_optab;
2836 break;
2837
2838 CASE_FLT_FN (BUILT_IN_IROUND):
2839 fallback_fn = BUILT_IN_LROUND;
2840 /* FALLTHRU */
2841 CASE_FLT_FN (BUILT_IN_LROUND):
2842 CASE_FLT_FN (BUILT_IN_LLROUND):
2843 builtin_optab = lround_optab;
2844 break;
2845
2846 default:
2847 gcc_unreachable ();
2848 }
2849
2850 /* There's no easy way to detect the case we need to set EDOM. */
2851 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2852 return NULL_RTX;
2853
2854 /* Make a suitable register to place result in. */
2855 mode = TYPE_MODE (TREE_TYPE (exp));
2856
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (!flag_errno_math)
2859 {
2860 rtx result = gen_reg_rtx (mode);
2861
2862 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2863 need to expand the argument again. This way, we will not perform
2864 side-effects more the once. */
2865 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2866
2867 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2868
2869 start_sequence ();
2870
2871 if (expand_sfix_optab (result, op0, builtin_optab))
2872 {
2873 /* Output the entire sequence. */
2874 insns = get_insns ();
2875 end_sequence ();
2876 emit_insn (insns);
2877 return result;
2878 }
2879
2880 /* If we were unable to expand via the builtin, stop the sequence
2881 (without outputting the insns) and call to the library function
2882 with the stabilized argument list. */
2883 end_sequence ();
2884 }
2885
2886 if (fallback_fn != BUILT_IN_NONE)
2887 {
2888 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2889 targets, (int) round (x) should never be transformed into
2890 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2891 a call to lround in the hope that the target provides at least some
2892 C99 functions. This should result in the best user experience for
2893 not full C99 targets. */
2894 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2895 fallback_fn, 0);
2896
2897 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2898 fallback_fndecl, 1, arg);
2899
2900 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2901 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2902 return convert_to_mode (mode, target, 0);
2903 }
2904
2905 return expand_call (exp, target, target == const0_rtx);
2906 }
2907
2908 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2909 a normal call should be emitted rather than expanding the function
2910 in-line. EXP is the expression that is a call to the builtin
2911 function; if convenient, the result should be placed in TARGET. */
2912
2913 static rtx
2914 expand_builtin_powi (tree exp, rtx target)
2915 {
2916 tree arg0, arg1;
2917 rtx op0, op1;
2918 machine_mode mode;
2919 machine_mode mode2;
2920
2921 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2922 return NULL_RTX;
2923
2924 arg0 = CALL_EXPR_ARG (exp, 0);
2925 arg1 = CALL_EXPR_ARG (exp, 1);
2926 mode = TYPE_MODE (TREE_TYPE (exp));
2927
2928 /* Emit a libcall to libgcc. */
2929
2930 /* Mode of the 2nd argument must match that of an int. */
2931 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2932
2933 if (target == NULL_RTX)
2934 target = gen_reg_rtx (mode);
2935
2936 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2937 if (GET_MODE (op0) != mode)
2938 op0 = convert_to_mode (mode, op0, 0);
2939 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2940 if (GET_MODE (op1) != mode2)
2941 op1 = convert_to_mode (mode2, op1, 0);
2942
2943 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2944 target, LCT_CONST, mode, 2,
2945 op0, mode, op1, mode2);
2946
2947 return target;
2948 }
2949
2950 /* Expand expression EXP which is a call to the strlen builtin. Return
2951 NULL_RTX if we failed the caller should emit a normal call, otherwise
2952 try to get the result in TARGET, if convenient. */
2953
2954 static rtx
2955 expand_builtin_strlen (tree exp, rtx target,
2956 machine_mode target_mode)
2957 {
2958 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2960 else
2961 {
2962 struct expand_operand ops[4];
2963 rtx pat;
2964 tree len;
2965 tree src = CALL_EXPR_ARG (exp, 0);
2966 rtx src_reg;
2967 rtx_insn *before_strlen;
2968 machine_mode insn_mode = target_mode;
2969 enum insn_code icode = CODE_FOR_nothing;
2970 unsigned int align;
2971
2972 /* If the length can be computed at compile-time, return it. */
2973 len = c_strlen (src, 0);
2974 if (len)
2975 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2976
2977 /* If the length can be computed at compile-time and is constant
2978 integer, but there are side-effects in src, evaluate
2979 src for side-effects, then return len.
2980 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2981 can be optimized into: i++; x = 3; */
2982 len = c_strlen (src, 1);
2983 if (len && TREE_CODE (len) == INTEGER_CST)
2984 {
2985 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2986 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2987 }
2988
2989 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2990
2991 /* If SRC is not a pointer type, don't do this operation inline. */
2992 if (align == 0)
2993 return NULL_RTX;
2994
2995 /* Bail out if we can't compute strlen in the right mode. */
2996 while (insn_mode != VOIDmode)
2997 {
2998 icode = optab_handler (strlen_optab, insn_mode);
2999 if (icode != CODE_FOR_nothing)
3000 break;
3001
3002 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3003 }
3004 if (insn_mode == VOIDmode)
3005 return NULL_RTX;
3006
3007 /* Make a place to hold the source address. We will not expand
3008 the actual source until we are sure that the expansion will
3009 not fail -- there are trees that cannot be expanded twice. */
3010 src_reg = gen_reg_rtx (Pmode);
3011
3012 /* Mark the beginning of the strlen sequence so we can emit the
3013 source operand later. */
3014 before_strlen = get_last_insn ();
3015
3016 create_output_operand (&ops[0], target, insn_mode);
3017 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3018 create_integer_operand (&ops[2], 0);
3019 create_integer_operand (&ops[3], align);
3020 if (!maybe_expand_insn (icode, 4, ops))
3021 return NULL_RTX;
3022
3023 /* Now that we are assured of success, expand the source. */
3024 start_sequence ();
3025 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3026 if (pat != src_reg)
3027 {
3028 #ifdef POINTERS_EXTEND_UNSIGNED
3029 if (GET_MODE (pat) != Pmode)
3030 pat = convert_to_mode (Pmode, pat,
3031 POINTERS_EXTEND_UNSIGNED);
3032 #endif
3033 emit_move_insn (src_reg, pat);
3034 }
3035 pat = get_insns ();
3036 end_sequence ();
3037
3038 if (before_strlen)
3039 emit_insn_after (pat, before_strlen);
3040 else
3041 emit_insn_before (pat, get_insns ());
3042
3043 /* Return the value in the proper mode for this function. */
3044 if (GET_MODE (ops[0].value) == target_mode)
3045 target = ops[0].value;
3046 else if (target != 0)
3047 convert_move (target, ops[0].value, 0);
3048 else
3049 target = convert_to_mode (target_mode, ops[0].value, 0);
3050
3051 return target;
3052 }
3053 }
3054
3055 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3056 bytes from constant string DATA + OFFSET and return it as target
3057 constant. */
3058
3059 static rtx
3060 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3061 machine_mode mode)
3062 {
3063 const char *str = (const char *) data;
3064
3065 gcc_assert (offset >= 0
3066 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3067 <= strlen (str) + 1));
3068
3069 return c_readstr (str + offset, mode);
3070 }
3071
3072 /* LEN specify length of the block of memcpy/memset operation.
3073 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3074 In some cases we can make very likely guess on max size, then we
3075 set it into PROBABLE_MAX_SIZE. */
3076
3077 static void
3078 determine_block_size (tree len, rtx len_rtx,
3079 unsigned HOST_WIDE_INT *min_size,
3080 unsigned HOST_WIDE_INT *max_size,
3081 unsigned HOST_WIDE_INT *probable_max_size)
3082 {
3083 if (CONST_INT_P (len_rtx))
3084 {
3085 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3086 return;
3087 }
3088 else
3089 {
3090 wide_int min, max;
3091 enum value_range_type range_type = VR_UNDEFINED;
3092
3093 /* Determine bounds from the type. */
3094 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3095 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3096 else
3097 *min_size = 0;
3098 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3099 *probable_max_size = *max_size
3100 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3101 else
3102 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3103
3104 if (TREE_CODE (len) == SSA_NAME)
3105 range_type = get_range_info (len, &min, &max);
3106 if (range_type == VR_RANGE)
3107 {
3108 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3109 *min_size = min.to_uhwi ();
3110 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3111 *probable_max_size = *max_size = max.to_uhwi ();
3112 }
3113 else if (range_type == VR_ANTI_RANGE)
3114 {
3115 /* Anti range 0...N lets us to determine minimal size to N+1. */
3116 if (min == 0)
3117 {
3118 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3119 *min_size = max.to_uhwi () + 1;
3120 }
3121 /* Code like
3122
3123 int n;
3124 if (n < 100)
3125 memcpy (a, b, n)
3126
3127 Produce anti range allowing negative values of N. We still
3128 can use the information and make a guess that N is not negative.
3129 */
3130 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3131 *probable_max_size = min.to_uhwi () - 1;
3132 }
3133 }
3134 gcc_checking_assert (*max_size <=
3135 (unsigned HOST_WIDE_INT)
3136 GET_MODE_MASK (GET_MODE (len_rtx)));
3137 }
3138
3139 /* Helper function to do the actual work for expand_builtin_memcpy. */
3140
3141 static rtx
3142 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3143 {
3144 const char *src_str;
3145 unsigned int src_align = get_pointer_alignment (src);
3146 unsigned int dest_align = get_pointer_alignment (dest);
3147 rtx dest_mem, src_mem, dest_addr, len_rtx;
3148 HOST_WIDE_INT expected_size = -1;
3149 unsigned int expected_align = 0;
3150 unsigned HOST_WIDE_INT min_size;
3151 unsigned HOST_WIDE_INT max_size;
3152 unsigned HOST_WIDE_INT probable_max_size;
3153
3154 /* If DEST is not a pointer type, call the normal function. */
3155 if (dest_align == 0)
3156 return NULL_RTX;
3157
3158 /* If either SRC is not a pointer type, don't do this
3159 operation in-line. */
3160 if (src_align == 0)
3161 return NULL_RTX;
3162
3163 if (currently_expanding_gimple_stmt)
3164 stringop_block_profile (currently_expanding_gimple_stmt,
3165 &expected_align, &expected_size);
3166
3167 if (expected_align < dest_align)
3168 expected_align = dest_align;
3169 dest_mem = get_memory_rtx (dest, len);
3170 set_mem_align (dest_mem, dest_align);
3171 len_rtx = expand_normal (len);
3172 determine_block_size (len, len_rtx, &min_size, &max_size,
3173 &probable_max_size);
3174 src_str = c_getstr (src);
3175
3176 /* If SRC is a string constant and block move would be done
3177 by pieces, we can avoid loading the string from memory
3178 and only stored the computed constants. */
3179 if (src_str
3180 && CONST_INT_P (len_rtx)
3181 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3182 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3183 CONST_CAST (char *, src_str),
3184 dest_align, false))
3185 {
3186 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3187 builtin_memcpy_read_str,
3188 CONST_CAST (char *, src_str),
3189 dest_align, false, 0);
3190 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3191 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3192 return dest_mem;
3193 }
3194
3195 src_mem = get_memory_rtx (src, len);
3196 set_mem_align (src_mem, src_align);
3197
3198 /* Copy word part most expediently. */
3199 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3200 CALL_EXPR_TAILCALL (exp)
3201 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3202 expected_align, expected_size,
3203 min_size, max_size, probable_max_size);
3204
3205 if (dest_addr == 0)
3206 {
3207 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3208 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3209 }
3210
3211 return dest_addr;
3212 }
3213
3214 /* Expand a call EXP to the memcpy builtin.
3215 Return NULL_RTX if we failed, the caller should emit a normal call,
3216 otherwise try to get the result in TARGET, if convenient (and in
3217 mode MODE if that's convenient). */
3218
3219 static rtx
3220 expand_builtin_memcpy (tree exp, rtx target)
3221 {
3222 if (!validate_arglist (exp,
3223 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3224 return NULL_RTX;
3225 else
3226 {
3227 tree dest = CALL_EXPR_ARG (exp, 0);
3228 tree src = CALL_EXPR_ARG (exp, 1);
3229 tree len = CALL_EXPR_ARG (exp, 2);
3230 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3231 }
3232 }
3233
3234 /* Expand an instrumented call EXP to the memcpy builtin.
3235 Return NULL_RTX if we failed, the caller should emit a normal call,
3236 otherwise try to get the result in TARGET, if convenient (and in
3237 mode MODE if that's convenient). */
3238
3239 static rtx
3240 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3241 {
3242 if (!validate_arglist (exp,
3243 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3244 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3245 INTEGER_TYPE, VOID_TYPE))
3246 return NULL_RTX;
3247 else
3248 {
3249 tree dest = CALL_EXPR_ARG (exp, 0);
3250 tree src = CALL_EXPR_ARG (exp, 2);
3251 tree len = CALL_EXPR_ARG (exp, 4);
3252 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3253
3254 /* Return src bounds with the result. */
3255 if (res)
3256 {
3257 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3258 expand_normal (CALL_EXPR_ARG (exp, 1)));
3259 res = chkp_join_splitted_slot (res, bnd);
3260 }
3261 return res;
3262 }
3263 }
3264
3265 /* Expand a call EXP to the mempcpy builtin.
3266 Return NULL_RTX if we failed; the caller should emit a normal call,
3267 otherwise try to get the result in TARGET, if convenient (and in
3268 mode MODE if that's convenient). If ENDP is 0 return the
3269 destination pointer, if ENDP is 1 return the end pointer ala
3270 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3271 stpcpy. */
3272
3273 static rtx
3274 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3275 {
3276 if (!validate_arglist (exp,
3277 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3278 return NULL_RTX;
3279 else
3280 {
3281 tree dest = CALL_EXPR_ARG (exp, 0);
3282 tree src = CALL_EXPR_ARG (exp, 1);
3283 tree len = CALL_EXPR_ARG (exp, 2);
3284 return expand_builtin_mempcpy_args (dest, src, len,
3285 target, mode, /*endp=*/ 1,
3286 exp);
3287 }
3288 }
3289
3290 /* Expand an instrumented call EXP to the mempcpy builtin.
3291 Return NULL_RTX if we failed, the caller should emit a normal call,
3292 otherwise try to get the result in TARGET, if convenient (and in
3293 mode MODE if that's convenient). */
3294
3295 static rtx
3296 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3297 {
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3300 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3301 INTEGER_TYPE, VOID_TYPE))
3302 return NULL_RTX;
3303 else
3304 {
3305 tree dest = CALL_EXPR_ARG (exp, 0);
3306 tree src = CALL_EXPR_ARG (exp, 2);
3307 tree len = CALL_EXPR_ARG (exp, 4);
3308 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3309 mode, 1, exp);
3310
3311 /* Return src bounds with the result. */
3312 if (res)
3313 {
3314 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3315 expand_normal (CALL_EXPR_ARG (exp, 1)));
3316 res = chkp_join_splitted_slot (res, bnd);
3317 }
3318 return res;
3319 }
3320 }
3321
3322 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3323 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3324 so that this can also be called without constructing an actual CALL_EXPR.
3325 The other arguments and return value are the same as for
3326 expand_builtin_mempcpy. */
3327
3328 static rtx
3329 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3330 rtx target, machine_mode mode, int endp,
3331 tree orig_exp)
3332 {
3333 tree fndecl = get_callee_fndecl (orig_exp);
3334
3335 /* If return value is ignored, transform mempcpy into memcpy. */
3336 if (target == const0_rtx
3337 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3338 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3339 {
3340 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3341 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3342 dest, src, len);
3343 return expand_expr (result, target, mode, EXPAND_NORMAL);
3344 }
3345 else if (target == const0_rtx
3346 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3347 {
3348 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3349 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3350 dest, src, len);
3351 return expand_expr (result, target, mode, EXPAND_NORMAL);
3352 }
3353 else
3354 {
3355 const char *src_str;
3356 unsigned int src_align = get_pointer_alignment (src);
3357 unsigned int dest_align = get_pointer_alignment (dest);
3358 rtx dest_mem, src_mem, len_rtx;
3359
3360 /* If either SRC or DEST is not a pointer type, don't do this
3361 operation in-line. */
3362 if (dest_align == 0 || src_align == 0)
3363 return NULL_RTX;
3364
3365 /* If LEN is not constant, call the normal function. */
3366 if (! tree_fits_uhwi_p (len))
3367 return NULL_RTX;
3368
3369 len_rtx = expand_normal (len);
3370 src_str = c_getstr (src);
3371
3372 /* If SRC is a string constant and block move would be done
3373 by pieces, we can avoid loading the string from memory
3374 and only stored the computed constants. */
3375 if (src_str
3376 && CONST_INT_P (len_rtx)
3377 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3378 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3379 CONST_CAST (char *, src_str),
3380 dest_align, false))
3381 {
3382 dest_mem = get_memory_rtx (dest, len);
3383 set_mem_align (dest_mem, dest_align);
3384 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3385 builtin_memcpy_read_str,
3386 CONST_CAST (char *, src_str),
3387 dest_align, false, endp);
3388 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3389 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3390 return dest_mem;
3391 }
3392
3393 if (CONST_INT_P (len_rtx)
3394 && can_move_by_pieces (INTVAL (len_rtx),
3395 MIN (dest_align, src_align)))
3396 {
3397 dest_mem = get_memory_rtx (dest, len);
3398 set_mem_align (dest_mem, dest_align);
3399 src_mem = get_memory_rtx (src, len);
3400 set_mem_align (src_mem, src_align);
3401 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3402 MIN (dest_align, src_align), endp);
3403 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3404 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3405 return dest_mem;
3406 }
3407
3408 return NULL_RTX;
3409 }
3410 }
3411
3412 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3413 we failed, the caller should emit a normal call, otherwise try to
3414 get the result in TARGET, if convenient. If ENDP is 0 return the
3415 destination pointer, if ENDP is 1 return the end pointer ala
3416 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3417 stpcpy. */
3418
3419 static rtx
3420 expand_movstr (tree dest, tree src, rtx target, int endp)
3421 {
3422 struct expand_operand ops[3];
3423 rtx dest_mem;
3424 rtx src_mem;
3425
3426 if (!targetm.have_movstr ())
3427 return NULL_RTX;
3428
3429 dest_mem = get_memory_rtx (dest, NULL);
3430 src_mem = get_memory_rtx (src, NULL);
3431 if (!endp)
3432 {
3433 target = force_reg (Pmode, XEXP (dest_mem, 0));
3434 dest_mem = replace_equiv_address (dest_mem, target);
3435 }
3436
3437 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3438 create_fixed_operand (&ops[1], dest_mem);
3439 create_fixed_operand (&ops[2], src_mem);
3440 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3441 return NULL_RTX;
3442
3443 if (endp && target != const0_rtx)
3444 {
3445 target = ops[0].value;
3446 /* movstr is supposed to set end to the address of the NUL
3447 terminator. If the caller requested a mempcpy-like return value,
3448 adjust it. */
3449 if (endp == 1)
3450 {
3451 rtx tem = plus_constant (GET_MODE (target),
3452 gen_lowpart (GET_MODE (target), target), 1);
3453 emit_move_insn (target, force_operand (tem, NULL_RTX));
3454 }
3455 }
3456 return target;
3457 }
3458
3459 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3460 NULL_RTX if we failed the caller should emit a normal call, otherwise
3461 try to get the result in TARGET, if convenient (and in mode MODE if that's
3462 convenient). */
3463
3464 static rtx
3465 expand_builtin_strcpy (tree exp, rtx target)
3466 {
3467 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3468 {
3469 tree dest = CALL_EXPR_ARG (exp, 0);
3470 tree src = CALL_EXPR_ARG (exp, 1);
3471 return expand_builtin_strcpy_args (dest, src, target);
3472 }
3473 return NULL_RTX;
3474 }
3475
3476 /* Helper function to do the actual work for expand_builtin_strcpy. The
3477 arguments to the builtin_strcpy call DEST and SRC are broken out
3478 so that this can also be called without constructing an actual CALL_EXPR.
3479 The other arguments and return value are the same as for
3480 expand_builtin_strcpy. */
3481
3482 static rtx
3483 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3484 {
3485 return expand_movstr (dest, src, target, /*endp=*/0);
3486 }
3487
3488 /* Expand a call EXP to the stpcpy builtin.
3489 Return NULL_RTX if we failed the caller should emit a normal call,
3490 otherwise try to get the result in TARGET, if convenient (and in
3491 mode MODE if that's convenient). */
3492
3493 static rtx
3494 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3495 {
3496 tree dst, src;
3497 location_t loc = EXPR_LOCATION (exp);
3498
3499 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3500 return NULL_RTX;
3501
3502 dst = CALL_EXPR_ARG (exp, 0);
3503 src = CALL_EXPR_ARG (exp, 1);
3504
3505 /* If return value is ignored, transform stpcpy into strcpy. */
3506 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3507 {
3508 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3509 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3510 return expand_expr (result, target, mode, EXPAND_NORMAL);
3511 }
3512 else
3513 {
3514 tree len, lenp1;
3515 rtx ret;
3516
3517 /* Ensure we get an actual string whose length can be evaluated at
3518 compile-time, not an expression containing a string. This is
3519 because the latter will potentially produce pessimized code
3520 when used to produce the return value. */
3521 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3522 return expand_movstr (dst, src, target, /*endp=*/2);
3523
3524 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3525 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3526 target, mode, /*endp=*/2,
3527 exp);
3528
3529 if (ret)
3530 return ret;
3531
3532 if (TREE_CODE (len) == INTEGER_CST)
3533 {
3534 rtx len_rtx = expand_normal (len);
3535
3536 if (CONST_INT_P (len_rtx))
3537 {
3538 ret = expand_builtin_strcpy_args (dst, src, target);
3539
3540 if (ret)
3541 {
3542 if (! target)
3543 {
3544 if (mode != VOIDmode)
3545 target = gen_reg_rtx (mode);
3546 else
3547 target = gen_reg_rtx (GET_MODE (ret));
3548 }
3549 if (GET_MODE (target) != GET_MODE (ret))
3550 ret = gen_lowpart (GET_MODE (target), ret);
3551
3552 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3553 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3554 gcc_assert (ret);
3555
3556 return target;
3557 }
3558 }
3559 }
3560
3561 return expand_movstr (dst, src, target, /*endp=*/2);
3562 }
3563 }
3564
3565 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3566 bytes from constant string DATA + OFFSET and return it as target
3567 constant. */
3568
3569 rtx
3570 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3571 machine_mode mode)
3572 {
3573 const char *str = (const char *) data;
3574
3575 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3576 return const0_rtx;
3577
3578 return c_readstr (str + offset, mode);
3579 }
3580
3581 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3582 NULL_RTX if we failed the caller should emit a normal call. */
3583
3584 static rtx
3585 expand_builtin_strncpy (tree exp, rtx target)
3586 {
3587 location_t loc = EXPR_LOCATION (exp);
3588
3589 if (validate_arglist (exp,
3590 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3591 {
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree src = CALL_EXPR_ARG (exp, 1);
3594 tree len = CALL_EXPR_ARG (exp, 2);
3595 tree slen = c_strlen (src, 1);
3596
3597 /* We must be passed a constant len and src parameter. */
3598 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3599 return NULL_RTX;
3600
3601 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3602
3603 /* We're required to pad with trailing zeros if the requested
3604 len is greater than strlen(s2)+1. In that case try to
3605 use store_by_pieces, if it fails, punt. */
3606 if (tree_int_cst_lt (slen, len))
3607 {
3608 unsigned int dest_align = get_pointer_alignment (dest);
3609 const char *p = c_getstr (src);
3610 rtx dest_mem;
3611
3612 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3613 || !can_store_by_pieces (tree_to_uhwi (len),
3614 builtin_strncpy_read_str,
3615 CONST_CAST (char *, p),
3616 dest_align, false))
3617 return NULL_RTX;
3618
3619 dest_mem = get_memory_rtx (dest, len);
3620 store_by_pieces (dest_mem, tree_to_uhwi (len),
3621 builtin_strncpy_read_str,
3622 CONST_CAST (char *, p), dest_align, false, 0);
3623 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3624 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3625 return dest_mem;
3626 }
3627 }
3628 return NULL_RTX;
3629 }
3630
3631 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3632 bytes from constant string DATA + OFFSET and return it as target
3633 constant. */
3634
3635 rtx
3636 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3637 machine_mode mode)
3638 {
3639 const char *c = (const char *) data;
3640 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3641
3642 memset (p, *c, GET_MODE_SIZE (mode));
3643
3644 return c_readstr (p, mode);
3645 }
3646
3647 /* Callback routine for store_by_pieces. Return the RTL of a register
3648 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3649 char value given in the RTL register data. For example, if mode is
3650 4 bytes wide, return the RTL for 0x01010101*data. */
3651
3652 static rtx
3653 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3654 machine_mode mode)
3655 {
3656 rtx target, coeff;
3657 size_t size;
3658 char *p;
3659
3660 size = GET_MODE_SIZE (mode);
3661 if (size == 1)
3662 return (rtx) data;
3663
3664 p = XALLOCAVEC (char, size);
3665 memset (p, 1, size);
3666 coeff = c_readstr (p, mode);
3667
3668 target = convert_to_mode (mode, (rtx) data, 1);
3669 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3670 return force_reg (mode, target);
3671 }
3672
3673 /* Expand expression EXP, which is a call to the memset builtin. Return
3674 NULL_RTX if we failed the caller should emit a normal call, otherwise
3675 try to get the result in TARGET, if convenient (and in mode MODE if that's
3676 convenient). */
3677
3678 static rtx
3679 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3680 {
3681 if (!validate_arglist (exp,
3682 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
3684 else
3685 {
3686 tree dest = CALL_EXPR_ARG (exp, 0);
3687 tree val = CALL_EXPR_ARG (exp, 1);
3688 tree len = CALL_EXPR_ARG (exp, 2);
3689 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3690 }
3691 }
3692
3693 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3694 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3695 try to get the result in TARGET, if convenient (and in mode MODE if that's
3696 convenient). */
3697
3698 static rtx
3699 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3700 {
3701 if (!validate_arglist (exp,
3702 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3703 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3704 return NULL_RTX;
3705 else
3706 {
3707 tree dest = CALL_EXPR_ARG (exp, 0);
3708 tree val = CALL_EXPR_ARG (exp, 2);
3709 tree len = CALL_EXPR_ARG (exp, 3);
3710 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3711
3712 /* Return src bounds with the result. */
3713 if (res)
3714 {
3715 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3716 expand_normal (CALL_EXPR_ARG (exp, 1)));
3717 res = chkp_join_splitted_slot (res, bnd);
3718 }
3719 return res;
3720 }
3721 }
3722
3723 /* Helper function to do the actual work for expand_builtin_memset. The
3724 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3725 so that this can also be called without constructing an actual CALL_EXPR.
3726 The other arguments and return value are the same as for
3727 expand_builtin_memset. */
3728
3729 static rtx
3730 expand_builtin_memset_args (tree dest, tree val, tree len,
3731 rtx target, machine_mode mode, tree orig_exp)
3732 {
3733 tree fndecl, fn;
3734 enum built_in_function fcode;
3735 machine_mode val_mode;
3736 char c;
3737 unsigned int dest_align;
3738 rtx dest_mem, dest_addr, len_rtx;
3739 HOST_WIDE_INT expected_size = -1;
3740 unsigned int expected_align = 0;
3741 unsigned HOST_WIDE_INT min_size;
3742 unsigned HOST_WIDE_INT max_size;
3743 unsigned HOST_WIDE_INT probable_max_size;
3744
3745 dest_align = get_pointer_alignment (dest);
3746
3747 /* If DEST is not a pointer type, don't do this operation in-line. */
3748 if (dest_align == 0)
3749 return NULL_RTX;
3750
3751 if (currently_expanding_gimple_stmt)
3752 stringop_block_profile (currently_expanding_gimple_stmt,
3753 &expected_align, &expected_size);
3754
3755 if (expected_align < dest_align)
3756 expected_align = dest_align;
3757
3758 /* If the LEN parameter is zero, return DEST. */
3759 if (integer_zerop (len))
3760 {
3761 /* Evaluate and ignore VAL in case it has side-effects. */
3762 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3763 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3764 }
3765
3766 /* Stabilize the arguments in case we fail. */
3767 dest = builtin_save_expr (dest);
3768 val = builtin_save_expr (val);
3769 len = builtin_save_expr (len);
3770
3771 len_rtx = expand_normal (len);
3772 determine_block_size (len, len_rtx, &min_size, &max_size,
3773 &probable_max_size);
3774 dest_mem = get_memory_rtx (dest, len);
3775 val_mode = TYPE_MODE (unsigned_char_type_node);
3776
3777 if (TREE_CODE (val) != INTEGER_CST)
3778 {
3779 rtx val_rtx;
3780
3781 val_rtx = expand_normal (val);
3782 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3783
3784 /* Assume that we can memset by pieces if we can store
3785 * the coefficients by pieces (in the required modes).
3786 * We can't pass builtin_memset_gen_str as that emits RTL. */
3787 c = 1;
3788 if (tree_fits_uhwi_p (len)
3789 && can_store_by_pieces (tree_to_uhwi (len),
3790 builtin_memset_read_str, &c, dest_align,
3791 true))
3792 {
3793 val_rtx = force_reg (val_mode, val_rtx);
3794 store_by_pieces (dest_mem, tree_to_uhwi (len),
3795 builtin_memset_gen_str, val_rtx, dest_align,
3796 true, 0);
3797 }
3798 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3799 dest_align, expected_align,
3800 expected_size, min_size, max_size,
3801 probable_max_size))
3802 goto do_libcall;
3803
3804 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3805 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3806 return dest_mem;
3807 }
3808
3809 if (target_char_cast (val, &c))
3810 goto do_libcall;
3811
3812 if (c)
3813 {
3814 if (tree_fits_uhwi_p (len)
3815 && can_store_by_pieces (tree_to_uhwi (len),
3816 builtin_memset_read_str, &c, dest_align,
3817 true))
3818 store_by_pieces (dest_mem, tree_to_uhwi (len),
3819 builtin_memset_read_str, &c, dest_align, true, 0);
3820 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3821 gen_int_mode (c, val_mode),
3822 dest_align, expected_align,
3823 expected_size, min_size, max_size,
3824 probable_max_size))
3825 goto do_libcall;
3826
3827 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3828 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3829 return dest_mem;
3830 }
3831
3832 set_mem_align (dest_mem, dest_align);
3833 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3834 CALL_EXPR_TAILCALL (orig_exp)
3835 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3836 expected_align, expected_size,
3837 min_size, max_size,
3838 probable_max_size);
3839
3840 if (dest_addr == 0)
3841 {
3842 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3843 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3844 }
3845
3846 return dest_addr;
3847
3848 do_libcall:
3849 fndecl = get_callee_fndecl (orig_exp);
3850 fcode = DECL_FUNCTION_CODE (fndecl);
3851 if (fcode == BUILT_IN_MEMSET
3852 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3853 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3854 dest, val, len);
3855 else if (fcode == BUILT_IN_BZERO)
3856 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3857 dest, len);
3858 else
3859 gcc_unreachable ();
3860 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3861 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3862 return expand_call (fn, target, target == const0_rtx);
3863 }
3864
3865 /* Expand expression EXP, which is a call to the bzero builtin. Return
3866 NULL_RTX if we failed the caller should emit a normal call. */
3867
3868 static rtx
3869 expand_builtin_bzero (tree exp)
3870 {
3871 tree dest, size;
3872 location_t loc = EXPR_LOCATION (exp);
3873
3874 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3875 return NULL_RTX;
3876
3877 dest = CALL_EXPR_ARG (exp, 0);
3878 size = CALL_EXPR_ARG (exp, 1);
3879
3880 /* New argument list transforming bzero(ptr x, int y) to
3881 memset(ptr x, int 0, size_t y). This is done this way
3882 so that if it isn't expanded inline, we fallback to
3883 calling bzero instead of memset. */
3884
3885 return expand_builtin_memset_args (dest, integer_zero_node,
3886 fold_convert_loc (loc,
3887 size_type_node, size),
3888 const0_rtx, VOIDmode, exp);
3889 }
3890
3891 /* Try to expand cmpstr operation ICODE with the given operands.
3892 Return the result rtx on success, otherwise return null. */
3893
3894 static rtx
3895 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3896 HOST_WIDE_INT align)
3897 {
3898 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3899
3900 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3901 target = NULL_RTX;
3902
3903 struct expand_operand ops[4];
3904 create_output_operand (&ops[0], target, insn_mode);
3905 create_fixed_operand (&ops[1], arg1_rtx);
3906 create_fixed_operand (&ops[2], arg2_rtx);
3907 create_integer_operand (&ops[3], align);
3908 if (maybe_expand_insn (icode, 4, ops))
3909 return ops[0].value;
3910 return NULL_RTX;
3911 }
3912
3913 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3914 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3915 otherwise return null. */
3916
3917 static rtx
3918 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3919 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3920 HOST_WIDE_INT align)
3921 {
3922 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3923
3924 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3925 target = NULL_RTX;
3926
3927 struct expand_operand ops[5];
3928 create_output_operand (&ops[0], target, insn_mode);
3929 create_fixed_operand (&ops[1], arg1_rtx);
3930 create_fixed_operand (&ops[2], arg2_rtx);
3931 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3932 TYPE_UNSIGNED (arg3_type));
3933 create_integer_operand (&ops[4], align);
3934 if (maybe_expand_insn (icode, 5, ops))
3935 return ops[0].value;
3936 return NULL_RTX;
3937 }
3938
3939 /* Expand expression EXP, which is a call to the memcmp built-in function.
3940 Return NULL_RTX if we failed and the caller should emit a normal call,
3941 otherwise try to get the result in TARGET, if convenient. */
3942
3943 static rtx
3944 expand_builtin_memcmp (tree exp, rtx target)
3945 {
3946 if (!validate_arglist (exp,
3947 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 return NULL_RTX;
3949
3950 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3951 implementing memcmp because it will stop if it encounters two
3952 zero bytes. */
3953 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3954 if (icode == CODE_FOR_nothing)
3955 return NULL_RTX;
3956
3957 tree arg1 = CALL_EXPR_ARG (exp, 0);
3958 tree arg2 = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
3960
3961 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3962 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3963
3964 /* If we don't have POINTER_TYPE, call the function. */
3965 if (arg1_align == 0 || arg2_align == 0)
3966 return NULL_RTX;
3967
3968 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3969 location_t loc = EXPR_LOCATION (exp);
3970 rtx arg1_rtx = get_memory_rtx (arg1, len);
3971 rtx arg2_rtx = get_memory_rtx (arg2, len);
3972 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3973
3974 /* Set MEM_SIZE as appropriate. */
3975 if (CONST_INT_P (arg3_rtx))
3976 {
3977 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3978 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3979 }
3980
3981 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3982 TREE_TYPE (len), arg3_rtx,
3983 MIN (arg1_align, arg2_align));
3984 if (result)
3985 {
3986 /* Return the value in the proper mode for this function. */
3987 if (GET_MODE (result) == mode)
3988 return result;
3989
3990 if (target != 0)
3991 {
3992 convert_move (target, result, 0);
3993 return target;
3994 }
3995
3996 return convert_to_mode (mode, result, 0);
3997 }
3998
3999 result = target;
4000 if (! (result != 0
4001 && REG_P (result) && GET_MODE (result) == mode
4002 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4003 result = gen_reg_rtx (mode);
4004
4005 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4006 TYPE_MODE (integer_type_node), 3,
4007 XEXP (arg1_rtx, 0), Pmode,
4008 XEXP (arg2_rtx, 0), Pmode,
4009 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4010 TYPE_UNSIGNED (sizetype)),
4011 TYPE_MODE (sizetype));
4012 return result;
4013 }
4014
4015 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4016 if we failed the caller should emit a normal call, otherwise try to get
4017 the result in TARGET, if convenient. */
4018
4019 static rtx
4020 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4021 {
4022 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4023 return NULL_RTX;
4024
4025 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4026 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4027 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4028 {
4029 rtx arg1_rtx, arg2_rtx;
4030 tree fndecl, fn;
4031 tree arg1 = CALL_EXPR_ARG (exp, 0);
4032 tree arg2 = CALL_EXPR_ARG (exp, 1);
4033 rtx result = NULL_RTX;
4034
4035 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4036 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4037
4038 /* If we don't have POINTER_TYPE, call the function. */
4039 if (arg1_align == 0 || arg2_align == 0)
4040 return NULL_RTX;
4041
4042 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4043 arg1 = builtin_save_expr (arg1);
4044 arg2 = builtin_save_expr (arg2);
4045
4046 arg1_rtx = get_memory_rtx (arg1, NULL);
4047 arg2_rtx = get_memory_rtx (arg2, NULL);
4048
4049 /* Try to call cmpstrsi. */
4050 if (cmpstr_icode != CODE_FOR_nothing)
4051 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4052 MIN (arg1_align, arg2_align));
4053
4054 /* Try to determine at least one length and call cmpstrnsi. */
4055 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4056 {
4057 tree len;
4058 rtx arg3_rtx;
4059
4060 tree len1 = c_strlen (arg1, 1);
4061 tree len2 = c_strlen (arg2, 1);
4062
4063 if (len1)
4064 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4065 if (len2)
4066 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4067
4068 /* If we don't have a constant length for the first, use the length
4069 of the second, if we know it. We don't require a constant for
4070 this case; some cost analysis could be done if both are available
4071 but neither is constant. For now, assume they're equally cheap,
4072 unless one has side effects. If both strings have constant lengths,
4073 use the smaller. */
4074
4075 if (!len1)
4076 len = len2;
4077 else if (!len2)
4078 len = len1;
4079 else if (TREE_SIDE_EFFECTS (len1))
4080 len = len2;
4081 else if (TREE_SIDE_EFFECTS (len2))
4082 len = len1;
4083 else if (TREE_CODE (len1) != INTEGER_CST)
4084 len = len2;
4085 else if (TREE_CODE (len2) != INTEGER_CST)
4086 len = len1;
4087 else if (tree_int_cst_lt (len1, len2))
4088 len = len1;
4089 else
4090 len = len2;
4091
4092 /* If both arguments have side effects, we cannot optimize. */
4093 if (len && !TREE_SIDE_EFFECTS (len))
4094 {
4095 arg3_rtx = expand_normal (len);
4096 result = expand_cmpstrn_or_cmpmem
4097 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4098 arg3_rtx, MIN (arg1_align, arg2_align));
4099 }
4100 }
4101
4102 if (result)
4103 {
4104 /* Return the value in the proper mode for this function. */
4105 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4106 if (GET_MODE (result) == mode)
4107 return result;
4108 if (target == 0)
4109 return convert_to_mode (mode, result, 0);
4110 convert_move (target, result, 0);
4111 return target;
4112 }
4113
4114 /* Expand the library call ourselves using a stabilized argument
4115 list to avoid re-evaluating the function's arguments twice. */
4116 fndecl = get_callee_fndecl (exp);
4117 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4118 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4119 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4120 return expand_call (fn, target, target == const0_rtx);
4121 }
4122 return NULL_RTX;
4123 }
4124
4125 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4126 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4127 the result in TARGET, if convenient. */
4128
4129 static rtx
4130 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4131 ATTRIBUTE_UNUSED machine_mode mode)
4132 {
4133 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4134
4135 if (!validate_arglist (exp,
4136 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4137 return NULL_RTX;
4138
4139 /* If c_strlen can determine an expression for one of the string
4140 lengths, and it doesn't have side effects, then emit cmpstrnsi
4141 using length MIN(strlen(string)+1, arg3). */
4142 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4143 if (cmpstrn_icode != CODE_FOR_nothing)
4144 {
4145 tree len, len1, len2;
4146 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4147 rtx result;
4148 tree fndecl, fn;
4149 tree arg1 = CALL_EXPR_ARG (exp, 0);
4150 tree arg2 = CALL_EXPR_ARG (exp, 1);
4151 tree arg3 = CALL_EXPR_ARG (exp, 2);
4152
4153 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4154 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4155
4156 len1 = c_strlen (arg1, 1);
4157 len2 = c_strlen (arg2, 1);
4158
4159 if (len1)
4160 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4161 if (len2)
4162 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4163
4164 /* If we don't have a constant length for the first, use the length
4165 of the second, if we know it. We don't require a constant for
4166 this case; some cost analysis could be done if both are available
4167 but neither is constant. For now, assume they're equally cheap,
4168 unless one has side effects. If both strings have constant lengths,
4169 use the smaller. */
4170
4171 if (!len1)
4172 len = len2;
4173 else if (!len2)
4174 len = len1;
4175 else if (TREE_SIDE_EFFECTS (len1))
4176 len = len2;
4177 else if (TREE_SIDE_EFFECTS (len2))
4178 len = len1;
4179 else if (TREE_CODE (len1) != INTEGER_CST)
4180 len = len2;
4181 else if (TREE_CODE (len2) != INTEGER_CST)
4182 len = len1;
4183 else if (tree_int_cst_lt (len1, len2))
4184 len = len1;
4185 else
4186 len = len2;
4187
4188 /* If both arguments have side effects, we cannot optimize. */
4189 if (!len || TREE_SIDE_EFFECTS (len))
4190 return NULL_RTX;
4191
4192 /* The actual new length parameter is MIN(len,arg3). */
4193 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4194 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4195
4196 /* If we don't have POINTER_TYPE, call the function. */
4197 if (arg1_align == 0 || arg2_align == 0)
4198 return NULL_RTX;
4199
4200 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4201 arg1 = builtin_save_expr (arg1);
4202 arg2 = builtin_save_expr (arg2);
4203 len = builtin_save_expr (len);
4204
4205 arg1_rtx = get_memory_rtx (arg1, len);
4206 arg2_rtx = get_memory_rtx (arg2, len);
4207 arg3_rtx = expand_normal (len);
4208 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4209 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4210 MIN (arg1_align, arg2_align));
4211 if (result)
4212 {
4213 /* Return the value in the proper mode for this function. */
4214 mode = TYPE_MODE (TREE_TYPE (exp));
4215 if (GET_MODE (result) == mode)
4216 return result;
4217 if (target == 0)
4218 return convert_to_mode (mode, result, 0);
4219 convert_move (target, result, 0);
4220 return target;
4221 }
4222
4223 /* Expand the library call ourselves using a stabilized argument
4224 list to avoid re-evaluating the function's arguments twice. */
4225 fndecl = get_callee_fndecl (exp);
4226 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4227 arg1, arg2, len);
4228 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4229 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4230 return expand_call (fn, target, target == const0_rtx);
4231 }
4232 return NULL_RTX;
4233 }
4234
4235 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4236 if that's convenient. */
4237
4238 rtx
4239 expand_builtin_saveregs (void)
4240 {
4241 rtx val;
4242 rtx_insn *seq;
4243
4244 /* Don't do __builtin_saveregs more than once in a function.
4245 Save the result of the first call and reuse it. */
4246 if (saveregs_value != 0)
4247 return saveregs_value;
4248
4249 /* When this function is called, it means that registers must be
4250 saved on entry to this function. So we migrate the call to the
4251 first insn of this function. */
4252
4253 start_sequence ();
4254
4255 /* Do whatever the machine needs done in this case. */
4256 val = targetm.calls.expand_builtin_saveregs ();
4257
4258 seq = get_insns ();
4259 end_sequence ();
4260
4261 saveregs_value = val;
4262
4263 /* Put the insns after the NOTE that starts the function. If this
4264 is inside a start_sequence, make the outer-level insn chain current, so
4265 the code is placed at the start of the function. */
4266 push_topmost_sequence ();
4267 emit_insn_after (seq, entry_of_function ());
4268 pop_topmost_sequence ();
4269
4270 return val;
4271 }
4272
4273 /* Expand a call to __builtin_next_arg. */
4274
4275 static rtx
4276 expand_builtin_next_arg (void)
4277 {
4278 /* Checking arguments is already done in fold_builtin_next_arg
4279 that must be called before this function. */
4280 return expand_binop (ptr_mode, add_optab,
4281 crtl->args.internal_arg_pointer,
4282 crtl->args.arg_offset_rtx,
4283 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4284 }
4285
4286 /* Make it easier for the backends by protecting the valist argument
4287 from multiple evaluations. */
4288
4289 static tree
4290 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4291 {
4292 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4293
4294 /* The current way of determining the type of valist is completely
4295 bogus. We should have the information on the va builtin instead. */
4296 if (!vatype)
4297 vatype = targetm.fn_abi_va_list (cfun->decl);
4298
4299 if (TREE_CODE (vatype) == ARRAY_TYPE)
4300 {
4301 if (TREE_SIDE_EFFECTS (valist))
4302 valist = save_expr (valist);
4303
4304 /* For this case, the backends will be expecting a pointer to
4305 vatype, but it's possible we've actually been given an array
4306 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4307 So fix it. */
4308 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4309 {
4310 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4311 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4312 }
4313 }
4314 else
4315 {
4316 tree pt = build_pointer_type (vatype);
4317
4318 if (! needs_lvalue)
4319 {
4320 if (! TREE_SIDE_EFFECTS (valist))
4321 return valist;
4322
4323 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4324 TREE_SIDE_EFFECTS (valist) = 1;
4325 }
4326
4327 if (TREE_SIDE_EFFECTS (valist))
4328 valist = save_expr (valist);
4329 valist = fold_build2_loc (loc, MEM_REF,
4330 vatype, valist, build_int_cst (pt, 0));
4331 }
4332
4333 return valist;
4334 }
4335
4336 /* The "standard" definition of va_list is void*. */
4337
4338 tree
4339 std_build_builtin_va_list (void)
4340 {
4341 return ptr_type_node;
4342 }
4343
4344 /* The "standard" abi va_list is va_list_type_node. */
4345
4346 tree
4347 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4348 {
4349 return va_list_type_node;
4350 }
4351
4352 /* The "standard" type of va_list is va_list_type_node. */
4353
4354 tree
4355 std_canonical_va_list_type (tree type)
4356 {
4357 tree wtype, htype;
4358
4359 if (INDIRECT_REF_P (type))
4360 type = TREE_TYPE (type);
4361 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4362 type = TREE_TYPE (type);
4363 wtype = va_list_type_node;
4364 htype = type;
4365 /* Treat structure va_list types. */
4366 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4367 htype = TREE_TYPE (htype);
4368 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4369 {
4370 /* If va_list is an array type, the argument may have decayed
4371 to a pointer type, e.g. by being passed to another function.
4372 In that case, unwrap both types so that we can compare the
4373 underlying records. */
4374 if (TREE_CODE (htype) == ARRAY_TYPE
4375 || POINTER_TYPE_P (htype))
4376 {
4377 wtype = TREE_TYPE (wtype);
4378 htype = TREE_TYPE (htype);
4379 }
4380 }
4381 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4382 return va_list_type_node;
4383
4384 return NULL_TREE;
4385 }
4386
4387 /* The "standard" implementation of va_start: just assign `nextarg' to
4388 the variable. */
4389
4390 void
4391 std_expand_builtin_va_start (tree valist, rtx nextarg)
4392 {
4393 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4394 convert_move (va_r, nextarg, 0);
4395
4396 /* We do not have any valid bounds for the pointer, so
4397 just store zero bounds for it. */
4398 if (chkp_function_instrumented_p (current_function_decl))
4399 chkp_expand_bounds_reset_for_mem (valist,
4400 make_tree (TREE_TYPE (valist),
4401 nextarg));
4402 }
4403
4404 /* Expand EXP, a call to __builtin_va_start. */
4405
4406 static rtx
4407 expand_builtin_va_start (tree exp)
4408 {
4409 rtx nextarg;
4410 tree valist;
4411 location_t loc = EXPR_LOCATION (exp);
4412
4413 if (call_expr_nargs (exp) < 2)
4414 {
4415 error_at (loc, "too few arguments to function %<va_start%>");
4416 return const0_rtx;
4417 }
4418
4419 if (fold_builtin_next_arg (exp, true))
4420 return const0_rtx;
4421
4422 nextarg = expand_builtin_next_arg ();
4423 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4424
4425 if (targetm.expand_builtin_va_start)
4426 targetm.expand_builtin_va_start (valist, nextarg);
4427 else
4428 std_expand_builtin_va_start (valist, nextarg);
4429
4430 return const0_rtx;
4431 }
4432
4433 /* Expand EXP, a call to __builtin_va_end. */
4434
4435 static rtx
4436 expand_builtin_va_end (tree exp)
4437 {
4438 tree valist = CALL_EXPR_ARG (exp, 0);
4439
4440 /* Evaluate for side effects, if needed. I hate macros that don't
4441 do that. */
4442 if (TREE_SIDE_EFFECTS (valist))
4443 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4444
4445 return const0_rtx;
4446 }
4447
4448 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4449 builtin rather than just as an assignment in stdarg.h because of the
4450 nastiness of array-type va_list types. */
4451
4452 static rtx
4453 expand_builtin_va_copy (tree exp)
4454 {
4455 tree dst, src, t;
4456 location_t loc = EXPR_LOCATION (exp);
4457
4458 dst = CALL_EXPR_ARG (exp, 0);
4459 src = CALL_EXPR_ARG (exp, 1);
4460
4461 dst = stabilize_va_list_loc (loc, dst, 1);
4462 src = stabilize_va_list_loc (loc, src, 0);
4463
4464 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4465
4466 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4467 {
4468 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4469 TREE_SIDE_EFFECTS (t) = 1;
4470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4471 }
4472 else
4473 {
4474 rtx dstb, srcb, size;
4475
4476 /* Evaluate to pointers. */
4477 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4478 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4479 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4480 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4481
4482 dstb = convert_memory_address (Pmode, dstb);
4483 srcb = convert_memory_address (Pmode, srcb);
4484
4485 /* "Dereference" to BLKmode memories. */
4486 dstb = gen_rtx_MEM (BLKmode, dstb);
4487 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4488 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4489 srcb = gen_rtx_MEM (BLKmode, srcb);
4490 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4491 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4492
4493 /* Copy. */
4494 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4495 }
4496
4497 return const0_rtx;
4498 }
4499
4500 /* Expand a call to one of the builtin functions __builtin_frame_address or
4501 __builtin_return_address. */
4502
4503 static rtx
4504 expand_builtin_frame_address (tree fndecl, tree exp)
4505 {
4506 /* The argument must be a nonnegative integer constant.
4507 It counts the number of frames to scan up the stack.
4508 The value is either the frame pointer value or the return
4509 address saved in that frame. */
4510 if (call_expr_nargs (exp) == 0)
4511 /* Warning about missing arg was already issued. */
4512 return const0_rtx;
4513 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4514 {
4515 error ("invalid argument to %qD", fndecl);
4516 return const0_rtx;
4517 }
4518 else
4519 {
4520 /* Number of frames to scan up the stack. */
4521 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4522
4523 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4524
4525 /* Some ports cannot access arbitrary stack frames. */
4526 if (tem == NULL)
4527 {
4528 warning (0, "unsupported argument to %qD", fndecl);
4529 return const0_rtx;
4530 }
4531
4532 if (count)
4533 {
4534 /* Warn since no effort is made to ensure that any frame
4535 beyond the current one exists or can be safely reached. */
4536 warning (OPT_Wframe_address, "calling %qD with "
4537 "a nonzero argument is unsafe", fndecl);
4538 }
4539
4540 /* For __builtin_frame_address, return what we've got. */
4541 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4542 return tem;
4543
4544 if (!REG_P (tem)
4545 && ! CONSTANT_P (tem))
4546 tem = copy_addr_to_reg (tem);
4547 return tem;
4548 }
4549 }
4550
4551 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4552 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4553 is the same as for allocate_dynamic_stack_space. */
4554
4555 static rtx
4556 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4557 {
4558 rtx op0;
4559 rtx result;
4560 bool valid_arglist;
4561 unsigned int align;
4562 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4563 == BUILT_IN_ALLOCA_WITH_ALIGN);
4564
4565 valid_arglist
4566 = (alloca_with_align
4567 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4568 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4569
4570 if (!valid_arglist)
4571 return NULL_RTX;
4572
4573 /* Compute the argument. */
4574 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4575
4576 /* Compute the alignment. */
4577 align = (alloca_with_align
4578 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4579 : BIGGEST_ALIGNMENT);
4580
4581 /* Allocate the desired space. */
4582 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4583 result = convert_memory_address (ptr_mode, result);
4584
4585 return result;
4586 }
4587
4588 /* Expand a call to bswap builtin in EXP.
4589 Return NULL_RTX if a normal call should be emitted rather than expanding the
4590 function in-line. If convenient, the result should be placed in TARGET.
4591 SUBTARGET may be used as the target for computing one of EXP's operands. */
4592
4593 static rtx
4594 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4595 rtx subtarget)
4596 {
4597 tree arg;
4598 rtx op0;
4599
4600 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4601 return NULL_RTX;
4602
4603 arg = CALL_EXPR_ARG (exp, 0);
4604 op0 = expand_expr (arg,
4605 subtarget && GET_MODE (subtarget) == target_mode
4606 ? subtarget : NULL_RTX,
4607 target_mode, EXPAND_NORMAL);
4608 if (GET_MODE (op0) != target_mode)
4609 op0 = convert_to_mode (target_mode, op0, 1);
4610
4611 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4612
4613 gcc_assert (target);
4614
4615 return convert_to_mode (target_mode, target, 1);
4616 }
4617
4618 /* Expand a call to a unary builtin in EXP.
4619 Return NULL_RTX if a normal call should be emitted rather than expanding the
4620 function in-line. If convenient, the result should be placed in TARGET.
4621 SUBTARGET may be used as the target for computing one of EXP's operands. */
4622
4623 static rtx
4624 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4625 rtx subtarget, optab op_optab)
4626 {
4627 rtx op0;
4628
4629 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4630 return NULL_RTX;
4631
4632 /* Compute the argument. */
4633 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4634 (subtarget
4635 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4636 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4637 VOIDmode, EXPAND_NORMAL);
4638 /* Compute op, into TARGET if possible.
4639 Set TARGET to wherever the result comes back. */
4640 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4641 op_optab, op0, target, op_optab != clrsb_optab);
4642 gcc_assert (target);
4643
4644 return convert_to_mode (target_mode, target, 0);
4645 }
4646
4647 /* Expand a call to __builtin_expect. We just return our argument
4648 as the builtin_expect semantic should've been already executed by
4649 tree branch prediction pass. */
4650
4651 static rtx
4652 expand_builtin_expect (tree exp, rtx target)
4653 {
4654 tree arg;
4655
4656 if (call_expr_nargs (exp) < 2)
4657 return const0_rtx;
4658 arg = CALL_EXPR_ARG (exp, 0);
4659
4660 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4661 /* When guessing was done, the hints should be already stripped away. */
4662 gcc_assert (!flag_guess_branch_prob
4663 || optimize == 0 || seen_error ());
4664 return target;
4665 }
4666
4667 /* Expand a call to __builtin_assume_aligned. We just return our first
4668 argument as the builtin_assume_aligned semantic should've been already
4669 executed by CCP. */
4670
4671 static rtx
4672 expand_builtin_assume_aligned (tree exp, rtx target)
4673 {
4674 if (call_expr_nargs (exp) < 2)
4675 return const0_rtx;
4676 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4677 EXPAND_NORMAL);
4678 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4679 && (call_expr_nargs (exp) < 3
4680 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4681 return target;
4682 }
4683
4684 void
4685 expand_builtin_trap (void)
4686 {
4687 if (targetm.have_trap ())
4688 {
4689 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4690 /* For trap insns when not accumulating outgoing args force
4691 REG_ARGS_SIZE note to prevent crossjumping of calls with
4692 different args sizes. */
4693 if (!ACCUMULATE_OUTGOING_ARGS)
4694 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4695 }
4696 else
4697 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4698 emit_barrier ();
4699 }
4700
4701 /* Expand a call to __builtin_unreachable. We do nothing except emit
4702 a barrier saying that control flow will not pass here.
4703
4704 It is the responsibility of the program being compiled to ensure
4705 that control flow does never reach __builtin_unreachable. */
4706 static void
4707 expand_builtin_unreachable (void)
4708 {
4709 emit_barrier ();
4710 }
4711
4712 /* Expand EXP, a call to fabs, fabsf or fabsl.
4713 Return NULL_RTX if a normal call should be emitted rather than expanding
4714 the function inline. If convenient, the result should be placed
4715 in TARGET. SUBTARGET may be used as the target for computing
4716 the operand. */
4717
4718 static rtx
4719 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4720 {
4721 machine_mode mode;
4722 tree arg;
4723 rtx op0;
4724
4725 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4726 return NULL_RTX;
4727
4728 arg = CALL_EXPR_ARG (exp, 0);
4729 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4730 mode = TYPE_MODE (TREE_TYPE (arg));
4731 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4732 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4733 }
4734
4735 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4736 Return NULL is a normal call should be emitted rather than expanding the
4737 function inline. If convenient, the result should be placed in TARGET.
4738 SUBTARGET may be used as the target for computing the operand. */
4739
4740 static rtx
4741 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4742 {
4743 rtx op0, op1;
4744 tree arg;
4745
4746 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4747 return NULL_RTX;
4748
4749 arg = CALL_EXPR_ARG (exp, 0);
4750 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4751
4752 arg = CALL_EXPR_ARG (exp, 1);
4753 op1 = expand_normal (arg);
4754
4755 return expand_copysign (op0, op1, target);
4756 }
4757
4758 /* Expand a call to __builtin___clear_cache. */
4759
4760 static rtx
4761 expand_builtin___clear_cache (tree exp)
4762 {
4763 if (!targetm.code_for_clear_cache)
4764 {
4765 #ifdef CLEAR_INSN_CACHE
4766 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4767 does something. Just do the default expansion to a call to
4768 __clear_cache(). */
4769 return NULL_RTX;
4770 #else
4771 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4772 does nothing. There is no need to call it. Do nothing. */
4773 return const0_rtx;
4774 #endif /* CLEAR_INSN_CACHE */
4775 }
4776
4777 /* We have a "clear_cache" insn, and it will handle everything. */
4778 tree begin, end;
4779 rtx begin_rtx, end_rtx;
4780
4781 /* We must not expand to a library call. If we did, any
4782 fallback library function in libgcc that might contain a call to
4783 __builtin___clear_cache() would recurse infinitely. */
4784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4785 {
4786 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4787 return const0_rtx;
4788 }
4789
4790 if (targetm.have_clear_cache ())
4791 {
4792 struct expand_operand ops[2];
4793
4794 begin = CALL_EXPR_ARG (exp, 0);
4795 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4796
4797 end = CALL_EXPR_ARG (exp, 1);
4798 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4799
4800 create_address_operand (&ops[0], begin_rtx);
4801 create_address_operand (&ops[1], end_rtx);
4802 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4803 return const0_rtx;
4804 }
4805 return const0_rtx;
4806 }
4807
4808 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4809
4810 static rtx
4811 round_trampoline_addr (rtx tramp)
4812 {
4813 rtx temp, addend, mask;
4814
4815 /* If we don't need too much alignment, we'll have been guaranteed
4816 proper alignment by get_trampoline_type. */
4817 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4818 return tramp;
4819
4820 /* Round address up to desired boundary. */
4821 temp = gen_reg_rtx (Pmode);
4822 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4823 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4824
4825 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4826 temp, 0, OPTAB_LIB_WIDEN);
4827 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4828 temp, 0, OPTAB_LIB_WIDEN);
4829
4830 return tramp;
4831 }
4832
4833 static rtx
4834 expand_builtin_init_trampoline (tree exp, bool onstack)
4835 {
4836 tree t_tramp, t_func, t_chain;
4837 rtx m_tramp, r_tramp, r_chain, tmp;
4838
4839 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4840 POINTER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4842
4843 t_tramp = CALL_EXPR_ARG (exp, 0);
4844 t_func = CALL_EXPR_ARG (exp, 1);
4845 t_chain = CALL_EXPR_ARG (exp, 2);
4846
4847 r_tramp = expand_normal (t_tramp);
4848 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4849 MEM_NOTRAP_P (m_tramp) = 1;
4850
4851 /* If ONSTACK, the TRAMP argument should be the address of a field
4852 within the local function's FRAME decl. Either way, let's see if
4853 we can fill in the MEM_ATTRs for this memory. */
4854 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4855 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4856
4857 /* Creator of a heap trampoline is responsible for making sure the
4858 address is aligned to at least STACK_BOUNDARY. Normally malloc
4859 will ensure this anyhow. */
4860 tmp = round_trampoline_addr (r_tramp);
4861 if (tmp != r_tramp)
4862 {
4863 m_tramp = change_address (m_tramp, BLKmode, tmp);
4864 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4865 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4866 }
4867
4868 /* The FUNC argument should be the address of the nested function.
4869 Extract the actual function decl to pass to the hook. */
4870 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4871 t_func = TREE_OPERAND (t_func, 0);
4872 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4873
4874 r_chain = expand_normal (t_chain);
4875
4876 /* Generate insns to initialize the trampoline. */
4877 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4878
4879 if (onstack)
4880 {
4881 trampolines_created = 1;
4882
4883 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4884 "trampoline generated for nested function %qD", t_func);
4885 }
4886
4887 return const0_rtx;
4888 }
4889
4890 static rtx
4891 expand_builtin_adjust_trampoline (tree exp)
4892 {
4893 rtx tramp;
4894
4895 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4896 return NULL_RTX;
4897
4898 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4899 tramp = round_trampoline_addr (tramp);
4900 if (targetm.calls.trampoline_adjust_address)
4901 tramp = targetm.calls.trampoline_adjust_address (tramp);
4902
4903 return tramp;
4904 }
4905
4906 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4907 function. The function first checks whether the back end provides
4908 an insn to implement signbit for the respective mode. If not, it
4909 checks whether the floating point format of the value is such that
4910 the sign bit can be extracted. If that is not the case, error out.
4911 EXP is the expression that is a call to the builtin function; if
4912 convenient, the result should be placed in TARGET. */
4913 static rtx
4914 expand_builtin_signbit (tree exp, rtx target)
4915 {
4916 const struct real_format *fmt;
4917 machine_mode fmode, imode, rmode;
4918 tree arg;
4919 int word, bitpos;
4920 enum insn_code icode;
4921 rtx temp;
4922 location_t loc = EXPR_LOCATION (exp);
4923
4924 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4925 return NULL_RTX;
4926
4927 arg = CALL_EXPR_ARG (exp, 0);
4928 fmode = TYPE_MODE (TREE_TYPE (arg));
4929 rmode = TYPE_MODE (TREE_TYPE (exp));
4930 fmt = REAL_MODE_FORMAT (fmode);
4931
4932 arg = builtin_save_expr (arg);
4933
4934 /* Expand the argument yielding a RTX expression. */
4935 temp = expand_normal (arg);
4936
4937 /* Check if the back end provides an insn that handles signbit for the
4938 argument's mode. */
4939 icode = optab_handler (signbit_optab, fmode);
4940 if (icode != CODE_FOR_nothing)
4941 {
4942 rtx_insn *last = get_last_insn ();
4943 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4944 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4945 return target;
4946 delete_insns_since (last);
4947 }
4948
4949 /* For floating point formats without a sign bit, implement signbit
4950 as "ARG < 0.0". */
4951 bitpos = fmt->signbit_ro;
4952 if (bitpos < 0)
4953 {
4954 /* But we can't do this if the format supports signed zero. */
4955 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4956
4957 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4958 build_real (TREE_TYPE (arg), dconst0));
4959 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4960 }
4961
4962 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4963 {
4964 imode = int_mode_for_mode (fmode);
4965 gcc_assert (imode != BLKmode);
4966 temp = gen_lowpart (imode, temp);
4967 }
4968 else
4969 {
4970 imode = word_mode;
4971 /* Handle targets with different FP word orders. */
4972 if (FLOAT_WORDS_BIG_ENDIAN)
4973 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4974 else
4975 word = bitpos / BITS_PER_WORD;
4976 temp = operand_subword_force (temp, word, fmode);
4977 bitpos = bitpos % BITS_PER_WORD;
4978 }
4979
4980 /* Force the intermediate word_mode (or narrower) result into a
4981 register. This avoids attempting to create paradoxical SUBREGs
4982 of floating point modes below. */
4983 temp = force_reg (imode, temp);
4984
4985 /* If the bitpos is within the "result mode" lowpart, the operation
4986 can be implement with a single bitwise AND. Otherwise, we need
4987 a right shift and an AND. */
4988
4989 if (bitpos < GET_MODE_BITSIZE (rmode))
4990 {
4991 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4992
4993 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4994 temp = gen_lowpart (rmode, temp);
4995 temp = expand_binop (rmode, and_optab, temp,
4996 immed_wide_int_const (mask, rmode),
4997 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4998 }
4999 else
5000 {
5001 /* Perform a logical right shift to place the signbit in the least
5002 significant bit, then truncate the result to the desired mode
5003 and mask just this bit. */
5004 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5005 temp = gen_lowpart (rmode, temp);
5006 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5007 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5008 }
5009
5010 return temp;
5011 }
5012
5013 /* Expand fork or exec calls. TARGET is the desired target of the
5014 call. EXP is the call. FN is the
5015 identificator of the actual function. IGNORE is nonzero if the
5016 value is to be ignored. */
5017
5018 static rtx
5019 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5020 {
5021 tree id, decl;
5022 tree call;
5023
5024 /* If we are not profiling, just call the function. */
5025 if (!profile_arc_flag)
5026 return NULL_RTX;
5027
5028 /* Otherwise call the wrapper. This should be equivalent for the rest of
5029 compiler, so the code does not diverge, and the wrapper may run the
5030 code necessary for keeping the profiling sane. */
5031
5032 switch (DECL_FUNCTION_CODE (fn))
5033 {
5034 case BUILT_IN_FORK:
5035 id = get_identifier ("__gcov_fork");
5036 break;
5037
5038 case BUILT_IN_EXECL:
5039 id = get_identifier ("__gcov_execl");
5040 break;
5041
5042 case BUILT_IN_EXECV:
5043 id = get_identifier ("__gcov_execv");
5044 break;
5045
5046 case BUILT_IN_EXECLP:
5047 id = get_identifier ("__gcov_execlp");
5048 break;
5049
5050 case BUILT_IN_EXECLE:
5051 id = get_identifier ("__gcov_execle");
5052 break;
5053
5054 case BUILT_IN_EXECVP:
5055 id = get_identifier ("__gcov_execvp");
5056 break;
5057
5058 case BUILT_IN_EXECVE:
5059 id = get_identifier ("__gcov_execve");
5060 break;
5061
5062 default:
5063 gcc_unreachable ();
5064 }
5065
5066 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5067 FUNCTION_DECL, id, TREE_TYPE (fn));
5068 DECL_EXTERNAL (decl) = 1;
5069 TREE_PUBLIC (decl) = 1;
5070 DECL_ARTIFICIAL (decl) = 1;
5071 TREE_NOTHROW (decl) = 1;
5072 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5073 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5074 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5075 return expand_call (call, target, ignore);
5076 }
5077
5078
5079 \f
5080 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5081 the pointer in these functions is void*, the tree optimizers may remove
5082 casts. The mode computed in expand_builtin isn't reliable either, due
5083 to __sync_bool_compare_and_swap.
5084
5085 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5086 group of builtins. This gives us log2 of the mode size. */
5087
5088 static inline machine_mode
5089 get_builtin_sync_mode (int fcode_diff)
5090 {
5091 /* The size is not negotiable, so ask not to get BLKmode in return
5092 if the target indicates that a smaller size would be better. */
5093 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5094 }
5095
5096 /* Expand the memory expression LOC and return the appropriate memory operand
5097 for the builtin_sync operations. */
5098
5099 static rtx
5100 get_builtin_sync_mem (tree loc, machine_mode mode)
5101 {
5102 rtx addr, mem;
5103
5104 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5105 addr = convert_memory_address (Pmode, addr);
5106
5107 /* Note that we explicitly do not want any alias information for this
5108 memory, so that we kill all other live memories. Otherwise we don't
5109 satisfy the full barrier semantics of the intrinsic. */
5110 mem = validize_mem (gen_rtx_MEM (mode, addr));
5111
5112 /* The alignment needs to be at least according to that of the mode. */
5113 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5114 get_pointer_alignment (loc)));
5115 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5116 MEM_VOLATILE_P (mem) = 1;
5117
5118 return mem;
5119 }
5120
5121 /* Make sure an argument is in the right mode.
5122 EXP is the tree argument.
5123 MODE is the mode it should be in. */
5124
5125 static rtx
5126 expand_expr_force_mode (tree exp, machine_mode mode)
5127 {
5128 rtx val;
5129 machine_mode old_mode;
5130
5131 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5132 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5133 of CONST_INTs, where we know the old_mode only from the call argument. */
5134
5135 old_mode = GET_MODE (val);
5136 if (old_mode == VOIDmode)
5137 old_mode = TYPE_MODE (TREE_TYPE (exp));
5138 val = convert_modes (mode, old_mode, val, 1);
5139 return val;
5140 }
5141
5142
5143 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5144 EXP is the CALL_EXPR. CODE is the rtx code
5145 that corresponds to the arithmetic or logical operation from the name;
5146 an exception here is that NOT actually means NAND. TARGET is an optional
5147 place for us to store the results; AFTER is true if this is the
5148 fetch_and_xxx form. */
5149
5150 static rtx
5151 expand_builtin_sync_operation (machine_mode mode, tree exp,
5152 enum rtx_code code, bool after,
5153 rtx target)
5154 {
5155 rtx val, mem;
5156 location_t loc = EXPR_LOCATION (exp);
5157
5158 if (code == NOT && warn_sync_nand)
5159 {
5160 tree fndecl = get_callee_fndecl (exp);
5161 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5162
5163 static bool warned_f_a_n, warned_n_a_f;
5164
5165 switch (fcode)
5166 {
5167 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5168 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5169 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5170 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5171 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5172 if (warned_f_a_n)
5173 break;
5174
5175 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5176 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5177 warned_f_a_n = true;
5178 break;
5179
5180 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5181 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5182 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5183 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5184 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5185 if (warned_n_a_f)
5186 break;
5187
5188 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5189 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5190 warned_n_a_f = true;
5191 break;
5192
5193 default:
5194 gcc_unreachable ();
5195 }
5196 }
5197
5198 /* Expand the operands. */
5199 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5200 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5201
5202 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5203 after);
5204 }
5205
5206 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5207 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5208 true if this is the boolean form. TARGET is a place for us to store the
5209 results; this is NOT optional if IS_BOOL is true. */
5210
5211 static rtx
5212 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5213 bool is_bool, rtx target)
5214 {
5215 rtx old_val, new_val, mem;
5216 rtx *pbool, *poval;
5217
5218 /* Expand the operands. */
5219 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5220 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5221 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5222
5223 pbool = poval = NULL;
5224 if (target != const0_rtx)
5225 {
5226 if (is_bool)
5227 pbool = &target;
5228 else
5229 poval = &target;
5230 }
5231 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5232 false, MEMMODEL_SYNC_SEQ_CST,
5233 MEMMODEL_SYNC_SEQ_CST))
5234 return NULL_RTX;
5235
5236 return target;
5237 }
5238
5239 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5240 general form is actually an atomic exchange, and some targets only
5241 support a reduced form with the second argument being a constant 1.
5242 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5243 the results. */
5244
5245 static rtx
5246 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5247 rtx target)
5248 {
5249 rtx val, mem;
5250
5251 /* Expand the operands. */
5252 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5253 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5254
5255 return expand_sync_lock_test_and_set (target, mem, val);
5256 }
5257
5258 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5259
5260 static void
5261 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5262 {
5263 rtx mem;
5264
5265 /* Expand the operands. */
5266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267
5268 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5269 }
5270
5271 /* Given an integer representing an ``enum memmodel'', verify its
5272 correctness and return the memory model enum. */
5273
5274 static enum memmodel
5275 get_memmodel (tree exp)
5276 {
5277 rtx op;
5278 unsigned HOST_WIDE_INT val;
5279
5280 /* If the parameter is not a constant, it's a run time value so we'll just
5281 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5282 if (TREE_CODE (exp) != INTEGER_CST)
5283 return MEMMODEL_SEQ_CST;
5284
5285 op = expand_normal (exp);
5286
5287 val = INTVAL (op);
5288 if (targetm.memmodel_check)
5289 val = targetm.memmodel_check (val);
5290 else if (val & ~MEMMODEL_MASK)
5291 {
5292 warning (OPT_Winvalid_memory_model,
5293 "Unknown architecture specifier in memory model to builtin.");
5294 return MEMMODEL_SEQ_CST;
5295 }
5296
5297 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5298 if (memmodel_base (val) >= MEMMODEL_LAST)
5299 {
5300 warning (OPT_Winvalid_memory_model,
5301 "invalid memory model argument to builtin");
5302 return MEMMODEL_SEQ_CST;
5303 }
5304
5305 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5306 be conservative and promote consume to acquire. */
5307 if (val == MEMMODEL_CONSUME)
5308 val = MEMMODEL_ACQUIRE;
5309
5310 return (enum memmodel) val;
5311 }
5312
5313 /* Expand the __atomic_exchange intrinsic:
5314 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5315 EXP is the CALL_EXPR.
5316 TARGET is an optional place for us to store the results. */
5317
5318 static rtx
5319 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5320 {
5321 rtx val, mem;
5322 enum memmodel model;
5323
5324 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5325
5326 if (!flag_inline_atomics)
5327 return NULL_RTX;
5328
5329 /* Expand the operands. */
5330 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5332
5333 return expand_atomic_exchange (target, mem, val, model);
5334 }
5335
5336 /* Expand the __atomic_compare_exchange intrinsic:
5337 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5338 TYPE desired, BOOL weak,
5339 enum memmodel success,
5340 enum memmodel failure)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5343
5344 static rtx
5345 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5346 rtx target)
5347 {
5348 rtx expect, desired, mem, oldval;
5349 rtx_code_label *label;
5350 enum memmodel success, failure;
5351 tree weak;
5352 bool is_weak;
5353
5354 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5355 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5356
5357 if (failure > success)
5358 {
5359 warning (OPT_Winvalid_memory_model,
5360 "failure memory model cannot be stronger than success memory "
5361 "model for %<__atomic_compare_exchange%>");
5362 success = MEMMODEL_SEQ_CST;
5363 }
5364
5365 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5366 {
5367 warning (OPT_Winvalid_memory_model,
5368 "invalid failure memory model for "
5369 "%<__atomic_compare_exchange%>");
5370 failure = MEMMODEL_SEQ_CST;
5371 success = MEMMODEL_SEQ_CST;
5372 }
5373
5374
5375 if (!flag_inline_atomics)
5376 return NULL_RTX;
5377
5378 /* Expand the operands. */
5379 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5380
5381 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5382 expect = convert_memory_address (Pmode, expect);
5383 expect = gen_rtx_MEM (mode, expect);
5384 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5385
5386 weak = CALL_EXPR_ARG (exp, 3);
5387 is_weak = false;
5388 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5389 is_weak = true;
5390
5391 if (target == const0_rtx)
5392 target = NULL;
5393
5394 /* Lest the rtl backend create a race condition with an imporoper store
5395 to memory, always create a new pseudo for OLDVAL. */
5396 oldval = NULL;
5397
5398 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5399 is_weak, success, failure))
5400 return NULL_RTX;
5401
5402 /* Conditionally store back to EXPECT, lest we create a race condition
5403 with an improper store to memory. */
5404 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5405 the normal case where EXPECT is totally private, i.e. a register. At
5406 which point the store can be unconditional. */
5407 label = gen_label_rtx ();
5408 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5409 GET_MODE (target), 1, label);
5410 emit_move_insn (expect, oldval);
5411 emit_label (label);
5412
5413 return target;
5414 }
5415
5416 /* Expand the __atomic_load intrinsic:
5417 TYPE __atomic_load (TYPE *object, enum memmodel)
5418 EXP is the CALL_EXPR.
5419 TARGET is an optional place for us to store the results. */
5420
5421 static rtx
5422 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5423 {
5424 rtx mem;
5425 enum memmodel model;
5426
5427 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5428 if (is_mm_release (model) || is_mm_acq_rel (model))
5429 {
5430 warning (OPT_Winvalid_memory_model,
5431 "invalid memory model for %<__atomic_load%>");
5432 model = MEMMODEL_SEQ_CST;
5433 }
5434
5435 if (!flag_inline_atomics)
5436 return NULL_RTX;
5437
5438 /* Expand the operand. */
5439 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5440
5441 return expand_atomic_load (target, mem, model);
5442 }
5443
5444
5445 /* Expand the __atomic_store intrinsic:
5446 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5447 EXP is the CALL_EXPR.
5448 TARGET is an optional place for us to store the results. */
5449
5450 static rtx
5451 expand_builtin_atomic_store (machine_mode mode, tree exp)
5452 {
5453 rtx mem, val;
5454 enum memmodel model;
5455
5456 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5457 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5458 || is_mm_release (model)))
5459 {
5460 warning (OPT_Winvalid_memory_model,
5461 "invalid memory model for %<__atomic_store%>");
5462 model = MEMMODEL_SEQ_CST;
5463 }
5464
5465 if (!flag_inline_atomics)
5466 return NULL_RTX;
5467
5468 /* Expand the operands. */
5469 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5470 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5471
5472 return expand_atomic_store (mem, val, model, false);
5473 }
5474
5475 /* Expand the __atomic_fetch_XXX intrinsic:
5476 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5477 EXP is the CALL_EXPR.
5478 TARGET is an optional place for us to store the results.
5479 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5480 FETCH_AFTER is true if returning the result of the operation.
5481 FETCH_AFTER is false if returning the value before the operation.
5482 IGNORE is true if the result is not used.
5483 EXT_CALL is the correct builtin for an external call if this cannot be
5484 resolved to an instruction sequence. */
5485
5486 static rtx
5487 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5488 enum rtx_code code, bool fetch_after,
5489 bool ignore, enum built_in_function ext_call)
5490 {
5491 rtx val, mem, ret;
5492 enum memmodel model;
5493 tree fndecl;
5494 tree addr;
5495
5496 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5497
5498 /* Expand the operands. */
5499 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5500 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5501
5502 /* Only try generating instructions if inlining is turned on. */
5503 if (flag_inline_atomics)
5504 {
5505 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5506 if (ret)
5507 return ret;
5508 }
5509
5510 /* Return if a different routine isn't needed for the library call. */
5511 if (ext_call == BUILT_IN_NONE)
5512 return NULL_RTX;
5513
5514 /* Change the call to the specified function. */
5515 fndecl = get_callee_fndecl (exp);
5516 addr = CALL_EXPR_FN (exp);
5517 STRIP_NOPS (addr);
5518
5519 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5520 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5521
5522 /* Expand the call here so we can emit trailing code. */
5523 ret = expand_call (exp, target, ignore);
5524
5525 /* Replace the original function just in case it matters. */
5526 TREE_OPERAND (addr, 0) = fndecl;
5527
5528 /* Then issue the arithmetic correction to return the right result. */
5529 if (!ignore)
5530 {
5531 if (code == NOT)
5532 {
5533 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5534 OPTAB_LIB_WIDEN);
5535 ret = expand_simple_unop (mode, NOT, ret, target, true);
5536 }
5537 else
5538 ret = expand_simple_binop (mode, code, ret, val, target, true,
5539 OPTAB_LIB_WIDEN);
5540 }
5541 return ret;
5542 }
5543
5544 /* Expand an atomic clear operation.
5545 void _atomic_clear (BOOL *obj, enum memmodel)
5546 EXP is the call expression. */
5547
5548 static rtx
5549 expand_builtin_atomic_clear (tree exp)
5550 {
5551 machine_mode mode;
5552 rtx mem, ret;
5553 enum memmodel model;
5554
5555 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5556 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5557 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5558
5559 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5560 {
5561 warning (OPT_Winvalid_memory_model,
5562 "invalid memory model for %<__atomic_store%>");
5563 model = MEMMODEL_SEQ_CST;
5564 }
5565
5566 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5567 Failing that, a store is issued by __atomic_store. The only way this can
5568 fail is if the bool type is larger than a word size. Unlikely, but
5569 handle it anyway for completeness. Assume a single threaded model since
5570 there is no atomic support in this case, and no barriers are required. */
5571 ret = expand_atomic_store (mem, const0_rtx, model, true);
5572 if (!ret)
5573 emit_move_insn (mem, const0_rtx);
5574 return const0_rtx;
5575 }
5576
5577 /* Expand an atomic test_and_set operation.
5578 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5579 EXP is the call expression. */
5580
5581 static rtx
5582 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5583 {
5584 rtx mem;
5585 enum memmodel model;
5586 machine_mode mode;
5587
5588 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5589 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5590 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5591
5592 return expand_atomic_test_and_set (target, mem, model);
5593 }
5594
5595
5596 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5597 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5598
5599 static tree
5600 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5601 {
5602 int size;
5603 machine_mode mode;
5604 unsigned int mode_align, type_align;
5605
5606 if (TREE_CODE (arg0) != INTEGER_CST)
5607 return NULL_TREE;
5608
5609 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5610 mode = mode_for_size (size, MODE_INT, 0);
5611 mode_align = GET_MODE_ALIGNMENT (mode);
5612
5613 if (TREE_CODE (arg1) == INTEGER_CST)
5614 {
5615 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5616
5617 /* Either this argument is null, or it's a fake pointer encoding
5618 the alignment of the object. */
5619 val = val & -val;
5620 val *= BITS_PER_UNIT;
5621
5622 if (val == 0 || mode_align < val)
5623 type_align = mode_align;
5624 else
5625 type_align = val;
5626 }
5627 else
5628 {
5629 tree ttype = TREE_TYPE (arg1);
5630
5631 /* This function is usually invoked and folded immediately by the front
5632 end before anything else has a chance to look at it. The pointer
5633 parameter at this point is usually cast to a void *, so check for that
5634 and look past the cast. */
5635 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5636 && VOID_TYPE_P (TREE_TYPE (ttype)))
5637 arg1 = TREE_OPERAND (arg1, 0);
5638
5639 ttype = TREE_TYPE (arg1);
5640 gcc_assert (POINTER_TYPE_P (ttype));
5641
5642 /* Get the underlying type of the object. */
5643 ttype = TREE_TYPE (ttype);
5644 type_align = TYPE_ALIGN (ttype);
5645 }
5646
5647 /* If the object has smaller alignment, the lock free routines cannot
5648 be used. */
5649 if (type_align < mode_align)
5650 return boolean_false_node;
5651
5652 /* Check if a compare_and_swap pattern exists for the mode which represents
5653 the required size. The pattern is not allowed to fail, so the existence
5654 of the pattern indicates support is present. */
5655 if (can_compare_and_swap_p (mode, true))
5656 return boolean_true_node;
5657 else
5658 return boolean_false_node;
5659 }
5660
5661 /* Return true if the parameters to call EXP represent an object which will
5662 always generate lock free instructions. The first argument represents the
5663 size of the object, and the second parameter is a pointer to the object
5664 itself. If NULL is passed for the object, then the result is based on
5665 typical alignment for an object of the specified size. Otherwise return
5666 false. */
5667
5668 static rtx
5669 expand_builtin_atomic_always_lock_free (tree exp)
5670 {
5671 tree size;
5672 tree arg0 = CALL_EXPR_ARG (exp, 0);
5673 tree arg1 = CALL_EXPR_ARG (exp, 1);
5674
5675 if (TREE_CODE (arg0) != INTEGER_CST)
5676 {
5677 error ("non-constant argument 1 to __atomic_always_lock_free");
5678 return const0_rtx;
5679 }
5680
5681 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5682 if (size == boolean_true_node)
5683 return const1_rtx;
5684 return const0_rtx;
5685 }
5686
5687 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5688 is lock free on this architecture. */
5689
5690 static tree
5691 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5692 {
5693 if (!flag_inline_atomics)
5694 return NULL_TREE;
5695
5696 /* If it isn't always lock free, don't generate a result. */
5697 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5698 return boolean_true_node;
5699
5700 return NULL_TREE;
5701 }
5702
5703 /* Return true if the parameters to call EXP represent an object which will
5704 always generate lock free instructions. The first argument represents the
5705 size of the object, and the second parameter is a pointer to the object
5706 itself. If NULL is passed for the object, then the result is based on
5707 typical alignment for an object of the specified size. Otherwise return
5708 NULL*/
5709
5710 static rtx
5711 expand_builtin_atomic_is_lock_free (tree exp)
5712 {
5713 tree size;
5714 tree arg0 = CALL_EXPR_ARG (exp, 0);
5715 tree arg1 = CALL_EXPR_ARG (exp, 1);
5716
5717 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5718 {
5719 error ("non-integer argument 1 to __atomic_is_lock_free");
5720 return NULL_RTX;
5721 }
5722
5723 if (!flag_inline_atomics)
5724 return NULL_RTX;
5725
5726 /* If the value is known at compile time, return the RTX for it. */
5727 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5728 if (size == boolean_true_node)
5729 return const1_rtx;
5730
5731 return NULL_RTX;
5732 }
5733
5734 /* Expand the __atomic_thread_fence intrinsic:
5735 void __atomic_thread_fence (enum memmodel)
5736 EXP is the CALL_EXPR. */
5737
5738 static void
5739 expand_builtin_atomic_thread_fence (tree exp)
5740 {
5741 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5742 expand_mem_thread_fence (model);
5743 }
5744
5745 /* Expand the __atomic_signal_fence intrinsic:
5746 void __atomic_signal_fence (enum memmodel)
5747 EXP is the CALL_EXPR. */
5748
5749 static void
5750 expand_builtin_atomic_signal_fence (tree exp)
5751 {
5752 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5753 expand_mem_signal_fence (model);
5754 }
5755
5756 /* Expand the __sync_synchronize intrinsic. */
5757
5758 static void
5759 expand_builtin_sync_synchronize (void)
5760 {
5761 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5762 }
5763
5764 static rtx
5765 expand_builtin_thread_pointer (tree exp, rtx target)
5766 {
5767 enum insn_code icode;
5768 if (!validate_arglist (exp, VOID_TYPE))
5769 return const0_rtx;
5770 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5771 if (icode != CODE_FOR_nothing)
5772 {
5773 struct expand_operand op;
5774 /* If the target is not sutitable then create a new target. */
5775 if (target == NULL_RTX
5776 || !REG_P (target)
5777 || GET_MODE (target) != Pmode)
5778 target = gen_reg_rtx (Pmode);
5779 create_output_operand (&op, target, Pmode);
5780 expand_insn (icode, 1, &op);
5781 return target;
5782 }
5783 error ("__builtin_thread_pointer is not supported on this target");
5784 return const0_rtx;
5785 }
5786
5787 static void
5788 expand_builtin_set_thread_pointer (tree exp)
5789 {
5790 enum insn_code icode;
5791 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5792 return;
5793 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5794 if (icode != CODE_FOR_nothing)
5795 {
5796 struct expand_operand op;
5797 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5798 Pmode, EXPAND_NORMAL);
5799 create_input_operand (&op, val, Pmode);
5800 expand_insn (icode, 1, &op);
5801 return;
5802 }
5803 error ("__builtin_set_thread_pointer is not supported on this target");
5804 }
5805
5806 \f
5807 /* Emit code to restore the current value of stack. */
5808
5809 static void
5810 expand_stack_restore (tree var)
5811 {
5812 rtx_insn *prev;
5813 rtx sa = expand_normal (var);
5814
5815 sa = convert_memory_address (Pmode, sa);
5816
5817 prev = get_last_insn ();
5818 emit_stack_restore (SAVE_BLOCK, sa);
5819
5820 record_new_stack_level ();
5821
5822 fixup_args_size_notes (prev, get_last_insn (), 0);
5823 }
5824
5825 /* Emit code to save the current value of stack. */
5826
5827 static rtx
5828 expand_stack_save (void)
5829 {
5830 rtx ret = NULL_RTX;
5831
5832 emit_stack_save (SAVE_BLOCK, &ret);
5833 return ret;
5834 }
5835
5836
5837 /* Expand an expression EXP that calls a built-in function,
5838 with result going to TARGET if that's convenient
5839 (and in mode MODE if that's convenient).
5840 SUBTARGET may be used as the target for computing one of EXP's operands.
5841 IGNORE is nonzero if the value is to be ignored. */
5842
5843 rtx
5844 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5845 int ignore)
5846 {
5847 tree fndecl = get_callee_fndecl (exp);
5848 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5849 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5850 int flags;
5851
5852 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5853 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5854
5855 /* When ASan is enabled, we don't want to expand some memory/string
5856 builtins and rely on libsanitizer's hooks. This allows us to avoid
5857 redundant checks and be sure, that possible overflow will be detected
5858 by ASan. */
5859
5860 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5861 return expand_call (exp, target, ignore);
5862
5863 /* When not optimizing, generate calls to library functions for a certain
5864 set of builtins. */
5865 if (!optimize
5866 && !called_as_built_in (fndecl)
5867 && fcode != BUILT_IN_FORK
5868 && fcode != BUILT_IN_EXECL
5869 && fcode != BUILT_IN_EXECV
5870 && fcode != BUILT_IN_EXECLP
5871 && fcode != BUILT_IN_EXECLE
5872 && fcode != BUILT_IN_EXECVP
5873 && fcode != BUILT_IN_EXECVE
5874 && fcode != BUILT_IN_ALLOCA
5875 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5876 && fcode != BUILT_IN_FREE
5877 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5878 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5879 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5880 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5881 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5882 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5883 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5884 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5885 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5886 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5887 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5888 && fcode != BUILT_IN_CHKP_BNDRET)
5889 return expand_call (exp, target, ignore);
5890
5891 /* The built-in function expanders test for target == const0_rtx
5892 to determine whether the function's result will be ignored. */
5893 if (ignore)
5894 target = const0_rtx;
5895
5896 /* If the result of a pure or const built-in function is ignored, and
5897 none of its arguments are volatile, we can avoid expanding the
5898 built-in call and just evaluate the arguments for side-effects. */
5899 if (target == const0_rtx
5900 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5901 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5902 {
5903 bool volatilep = false;
5904 tree arg;
5905 call_expr_arg_iterator iter;
5906
5907 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5908 if (TREE_THIS_VOLATILE (arg))
5909 {
5910 volatilep = true;
5911 break;
5912 }
5913
5914 if (! volatilep)
5915 {
5916 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5917 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5918 return const0_rtx;
5919 }
5920 }
5921
5922 /* expand_builtin_with_bounds is supposed to be used for
5923 instrumented builtin calls. */
5924 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5925
5926 switch (fcode)
5927 {
5928 CASE_FLT_FN (BUILT_IN_FABS):
5929 case BUILT_IN_FABSD32:
5930 case BUILT_IN_FABSD64:
5931 case BUILT_IN_FABSD128:
5932 target = expand_builtin_fabs (exp, target, subtarget);
5933 if (target)
5934 return target;
5935 break;
5936
5937 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5938 target = expand_builtin_copysign (exp, target, subtarget);
5939 if (target)
5940 return target;
5941 break;
5942
5943 /* Just do a normal library call if we were unable to fold
5944 the values. */
5945 CASE_FLT_FN (BUILT_IN_CABS):
5946 break;
5947
5948 CASE_FLT_FN (BUILT_IN_EXP):
5949 CASE_FLT_FN (BUILT_IN_EXP10):
5950 CASE_FLT_FN (BUILT_IN_POW10):
5951 CASE_FLT_FN (BUILT_IN_EXP2):
5952 CASE_FLT_FN (BUILT_IN_EXPM1):
5953 CASE_FLT_FN (BUILT_IN_LOGB):
5954 CASE_FLT_FN (BUILT_IN_LOG):
5955 CASE_FLT_FN (BUILT_IN_LOG10):
5956 CASE_FLT_FN (BUILT_IN_LOG2):
5957 CASE_FLT_FN (BUILT_IN_LOG1P):
5958 CASE_FLT_FN (BUILT_IN_TAN):
5959 CASE_FLT_FN (BUILT_IN_ASIN):
5960 CASE_FLT_FN (BUILT_IN_ACOS):
5961 CASE_FLT_FN (BUILT_IN_ATAN):
5962 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5963 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5964 because of possible accuracy problems. */
5965 if (! flag_unsafe_math_optimizations)
5966 break;
5967 CASE_FLT_FN (BUILT_IN_SQRT):
5968 CASE_FLT_FN (BUILT_IN_FLOOR):
5969 CASE_FLT_FN (BUILT_IN_CEIL):
5970 CASE_FLT_FN (BUILT_IN_TRUNC):
5971 CASE_FLT_FN (BUILT_IN_ROUND):
5972 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5973 CASE_FLT_FN (BUILT_IN_RINT):
5974 target = expand_builtin_mathfn (exp, target, subtarget);
5975 if (target)
5976 return target;
5977 break;
5978
5979 CASE_FLT_FN (BUILT_IN_FMA):
5980 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5981 if (target)
5982 return target;
5983 break;
5984
5985 CASE_FLT_FN (BUILT_IN_ILOGB):
5986 if (! flag_unsafe_math_optimizations)
5987 break;
5988 CASE_FLT_FN (BUILT_IN_ISINF):
5989 CASE_FLT_FN (BUILT_IN_FINITE):
5990 case BUILT_IN_ISFINITE:
5991 case BUILT_IN_ISNORMAL:
5992 target = expand_builtin_interclass_mathfn (exp, target);
5993 if (target)
5994 return target;
5995 break;
5996
5997 CASE_FLT_FN (BUILT_IN_ICEIL):
5998 CASE_FLT_FN (BUILT_IN_LCEIL):
5999 CASE_FLT_FN (BUILT_IN_LLCEIL):
6000 CASE_FLT_FN (BUILT_IN_LFLOOR):
6001 CASE_FLT_FN (BUILT_IN_IFLOOR):
6002 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6003 target = expand_builtin_int_roundingfn (exp, target);
6004 if (target)
6005 return target;
6006 break;
6007
6008 CASE_FLT_FN (BUILT_IN_IRINT):
6009 CASE_FLT_FN (BUILT_IN_LRINT):
6010 CASE_FLT_FN (BUILT_IN_LLRINT):
6011 CASE_FLT_FN (BUILT_IN_IROUND):
6012 CASE_FLT_FN (BUILT_IN_LROUND):
6013 CASE_FLT_FN (BUILT_IN_LLROUND):
6014 target = expand_builtin_int_roundingfn_2 (exp, target);
6015 if (target)
6016 return target;
6017 break;
6018
6019 CASE_FLT_FN (BUILT_IN_POWI):
6020 target = expand_builtin_powi (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_ATAN2):
6026 CASE_FLT_FN (BUILT_IN_LDEXP):
6027 CASE_FLT_FN (BUILT_IN_SCALB):
6028 CASE_FLT_FN (BUILT_IN_SCALBN):
6029 CASE_FLT_FN (BUILT_IN_SCALBLN):
6030 if (! flag_unsafe_math_optimizations)
6031 break;
6032
6033 CASE_FLT_FN (BUILT_IN_FMOD):
6034 CASE_FLT_FN (BUILT_IN_REMAINDER):
6035 CASE_FLT_FN (BUILT_IN_DREM):
6036 CASE_FLT_FN (BUILT_IN_POW):
6037 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6038 if (target)
6039 return target;
6040 break;
6041
6042 CASE_FLT_FN (BUILT_IN_CEXPI):
6043 target = expand_builtin_cexpi (exp, target);
6044 gcc_assert (target);
6045 return target;
6046
6047 CASE_FLT_FN (BUILT_IN_SIN):
6048 CASE_FLT_FN (BUILT_IN_COS):
6049 if (! flag_unsafe_math_optimizations)
6050 break;
6051 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6052 if (target)
6053 return target;
6054 break;
6055
6056 CASE_FLT_FN (BUILT_IN_SINCOS):
6057 if (! flag_unsafe_math_optimizations)
6058 break;
6059 target = expand_builtin_sincos (exp);
6060 if (target)
6061 return target;
6062 break;
6063
6064 case BUILT_IN_APPLY_ARGS:
6065 return expand_builtin_apply_args ();
6066
6067 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6068 FUNCTION with a copy of the parameters described by
6069 ARGUMENTS, and ARGSIZE. It returns a block of memory
6070 allocated on the stack into which is stored all the registers
6071 that might possibly be used for returning the result of a
6072 function. ARGUMENTS is the value returned by
6073 __builtin_apply_args. ARGSIZE is the number of bytes of
6074 arguments that must be copied. ??? How should this value be
6075 computed? We'll also need a safe worst case value for varargs
6076 functions. */
6077 case BUILT_IN_APPLY:
6078 if (!validate_arglist (exp, POINTER_TYPE,
6079 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6080 && !validate_arglist (exp, REFERENCE_TYPE,
6081 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6082 return const0_rtx;
6083 else
6084 {
6085 rtx ops[3];
6086
6087 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6088 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6089 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6090
6091 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6092 }
6093
6094 /* __builtin_return (RESULT) causes the function to return the
6095 value described by RESULT. RESULT is address of the block of
6096 memory returned by __builtin_apply. */
6097 case BUILT_IN_RETURN:
6098 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6099 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6100 return const0_rtx;
6101
6102 case BUILT_IN_SAVEREGS:
6103 return expand_builtin_saveregs ();
6104
6105 case BUILT_IN_VA_ARG_PACK:
6106 /* All valid uses of __builtin_va_arg_pack () are removed during
6107 inlining. */
6108 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6109 return const0_rtx;
6110
6111 case BUILT_IN_VA_ARG_PACK_LEN:
6112 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6113 inlining. */
6114 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6115 return const0_rtx;
6116
6117 /* Return the address of the first anonymous stack arg. */
6118 case BUILT_IN_NEXT_ARG:
6119 if (fold_builtin_next_arg (exp, false))
6120 return const0_rtx;
6121 return expand_builtin_next_arg ();
6122
6123 case BUILT_IN_CLEAR_CACHE:
6124 target = expand_builtin___clear_cache (exp);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_CLASSIFY_TYPE:
6130 return expand_builtin_classify_type (exp);
6131
6132 case BUILT_IN_CONSTANT_P:
6133 return const0_rtx;
6134
6135 case BUILT_IN_FRAME_ADDRESS:
6136 case BUILT_IN_RETURN_ADDRESS:
6137 return expand_builtin_frame_address (fndecl, exp);
6138
6139 /* Returns the address of the area where the structure is returned.
6140 0 otherwise. */
6141 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6142 if (call_expr_nargs (exp) != 0
6143 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6144 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6145 return const0_rtx;
6146 else
6147 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6148
6149 case BUILT_IN_ALLOCA:
6150 case BUILT_IN_ALLOCA_WITH_ALIGN:
6151 /* If the allocation stems from the declaration of a variable-sized
6152 object, it cannot accumulate. */
6153 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6154 if (target)
6155 return target;
6156 break;
6157
6158 case BUILT_IN_STACK_SAVE:
6159 return expand_stack_save ();
6160
6161 case BUILT_IN_STACK_RESTORE:
6162 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6163 return const0_rtx;
6164
6165 case BUILT_IN_BSWAP16:
6166 case BUILT_IN_BSWAP32:
6167 case BUILT_IN_BSWAP64:
6168 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6169 if (target)
6170 return target;
6171 break;
6172
6173 CASE_INT_FN (BUILT_IN_FFS):
6174 target = expand_builtin_unop (target_mode, exp, target,
6175 subtarget, ffs_optab);
6176 if (target)
6177 return target;
6178 break;
6179
6180 CASE_INT_FN (BUILT_IN_CLZ):
6181 target = expand_builtin_unop (target_mode, exp, target,
6182 subtarget, clz_optab);
6183 if (target)
6184 return target;
6185 break;
6186
6187 CASE_INT_FN (BUILT_IN_CTZ):
6188 target = expand_builtin_unop (target_mode, exp, target,
6189 subtarget, ctz_optab);
6190 if (target)
6191 return target;
6192 break;
6193
6194 CASE_INT_FN (BUILT_IN_CLRSB):
6195 target = expand_builtin_unop (target_mode, exp, target,
6196 subtarget, clrsb_optab);
6197 if (target)
6198 return target;
6199 break;
6200
6201 CASE_INT_FN (BUILT_IN_POPCOUNT):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, popcount_optab);
6204 if (target)
6205 return target;
6206 break;
6207
6208 CASE_INT_FN (BUILT_IN_PARITY):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, parity_optab);
6211 if (target)
6212 return target;
6213 break;
6214
6215 case BUILT_IN_STRLEN:
6216 target = expand_builtin_strlen (exp, target, target_mode);
6217 if (target)
6218 return target;
6219 break;
6220
6221 case BUILT_IN_STRCPY:
6222 target = expand_builtin_strcpy (exp, target);
6223 if (target)
6224 return target;
6225 break;
6226
6227 case BUILT_IN_STRNCPY:
6228 target = expand_builtin_strncpy (exp, target);
6229 if (target)
6230 return target;
6231 break;
6232
6233 case BUILT_IN_STPCPY:
6234 target = expand_builtin_stpcpy (exp, target, mode);
6235 if (target)
6236 return target;
6237 break;
6238
6239 case BUILT_IN_MEMCPY:
6240 target = expand_builtin_memcpy (exp, target);
6241 if (target)
6242 return target;
6243 break;
6244
6245 case BUILT_IN_MEMPCPY:
6246 target = expand_builtin_mempcpy (exp, target, mode);
6247 if (target)
6248 return target;
6249 break;
6250
6251 case BUILT_IN_MEMSET:
6252 target = expand_builtin_memset (exp, target, mode);
6253 if (target)
6254 return target;
6255 break;
6256
6257 case BUILT_IN_BZERO:
6258 target = expand_builtin_bzero (exp);
6259 if (target)
6260 return target;
6261 break;
6262
6263 case BUILT_IN_STRCMP:
6264 target = expand_builtin_strcmp (exp, target);
6265 if (target)
6266 return target;
6267 break;
6268
6269 case BUILT_IN_STRNCMP:
6270 target = expand_builtin_strncmp (exp, target, mode);
6271 if (target)
6272 return target;
6273 break;
6274
6275 case BUILT_IN_BCMP:
6276 case BUILT_IN_MEMCMP:
6277 target = expand_builtin_memcmp (exp, target);
6278 if (target)
6279 return target;
6280 break;
6281
6282 case BUILT_IN_SETJMP:
6283 /* This should have been lowered to the builtins below. */
6284 gcc_unreachable ();
6285
6286 case BUILT_IN_SETJMP_SETUP:
6287 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6288 and the receiver label. */
6289 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6290 {
6291 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6292 VOIDmode, EXPAND_NORMAL);
6293 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6294 rtx_insn *label_r = label_rtx (label);
6295
6296 /* This is copied from the handling of non-local gotos. */
6297 expand_builtin_setjmp_setup (buf_addr, label_r);
6298 nonlocal_goto_handler_labels
6299 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6300 nonlocal_goto_handler_labels);
6301 /* ??? Do not let expand_label treat us as such since we would
6302 not want to be both on the list of non-local labels and on
6303 the list of forced labels. */
6304 FORCED_LABEL (label) = 0;
6305 return const0_rtx;
6306 }
6307 break;
6308
6309 case BUILT_IN_SETJMP_RECEIVER:
6310 /* __builtin_setjmp_receiver is passed the receiver label. */
6311 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6312 {
6313 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6314 rtx_insn *label_r = label_rtx (label);
6315
6316 expand_builtin_setjmp_receiver (label_r);
6317 return const0_rtx;
6318 }
6319 break;
6320
6321 /* __builtin_longjmp is passed a pointer to an array of five words.
6322 It's similar to the C library longjmp function but works with
6323 __builtin_setjmp above. */
6324 case BUILT_IN_LONGJMP:
6325 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6326 {
6327 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6328 VOIDmode, EXPAND_NORMAL);
6329 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6330
6331 if (value != const1_rtx)
6332 {
6333 error ("%<__builtin_longjmp%> second argument must be 1");
6334 return const0_rtx;
6335 }
6336
6337 expand_builtin_longjmp (buf_addr, value);
6338 return const0_rtx;
6339 }
6340 break;
6341
6342 case BUILT_IN_NONLOCAL_GOTO:
6343 target = expand_builtin_nonlocal_goto (exp);
6344 if (target)
6345 return target;
6346 break;
6347
6348 /* This updates the setjmp buffer that is its argument with the value
6349 of the current stack pointer. */
6350 case BUILT_IN_UPDATE_SETJMP_BUF:
6351 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6352 {
6353 rtx buf_addr
6354 = expand_normal (CALL_EXPR_ARG (exp, 0));
6355
6356 expand_builtin_update_setjmp_buf (buf_addr);
6357 return const0_rtx;
6358 }
6359 break;
6360
6361 case BUILT_IN_TRAP:
6362 expand_builtin_trap ();
6363 return const0_rtx;
6364
6365 case BUILT_IN_UNREACHABLE:
6366 expand_builtin_unreachable ();
6367 return const0_rtx;
6368
6369 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6370 case BUILT_IN_SIGNBITD32:
6371 case BUILT_IN_SIGNBITD64:
6372 case BUILT_IN_SIGNBITD128:
6373 target = expand_builtin_signbit (exp, target);
6374 if (target)
6375 return target;
6376 break;
6377
6378 /* Various hooks for the DWARF 2 __throw routine. */
6379 case BUILT_IN_UNWIND_INIT:
6380 expand_builtin_unwind_init ();
6381 return const0_rtx;
6382 case BUILT_IN_DWARF_CFA:
6383 return virtual_cfa_rtx;
6384 #ifdef DWARF2_UNWIND_INFO
6385 case BUILT_IN_DWARF_SP_COLUMN:
6386 return expand_builtin_dwarf_sp_column ();
6387 case BUILT_IN_INIT_DWARF_REG_SIZES:
6388 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6389 return const0_rtx;
6390 #endif
6391 case BUILT_IN_FROB_RETURN_ADDR:
6392 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6393 case BUILT_IN_EXTRACT_RETURN_ADDR:
6394 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6395 case BUILT_IN_EH_RETURN:
6396 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6397 CALL_EXPR_ARG (exp, 1));
6398 return const0_rtx;
6399 case BUILT_IN_EH_RETURN_DATA_REGNO:
6400 return expand_builtin_eh_return_data_regno (exp);
6401 case BUILT_IN_EXTEND_POINTER:
6402 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6403 case BUILT_IN_EH_POINTER:
6404 return expand_builtin_eh_pointer (exp);
6405 case BUILT_IN_EH_FILTER:
6406 return expand_builtin_eh_filter (exp);
6407 case BUILT_IN_EH_COPY_VALUES:
6408 return expand_builtin_eh_copy_values (exp);
6409
6410 case BUILT_IN_VA_START:
6411 return expand_builtin_va_start (exp);
6412 case BUILT_IN_VA_END:
6413 return expand_builtin_va_end (exp);
6414 case BUILT_IN_VA_COPY:
6415 return expand_builtin_va_copy (exp);
6416 case BUILT_IN_EXPECT:
6417 return expand_builtin_expect (exp, target);
6418 case BUILT_IN_ASSUME_ALIGNED:
6419 return expand_builtin_assume_aligned (exp, target);
6420 case BUILT_IN_PREFETCH:
6421 expand_builtin_prefetch (exp);
6422 return const0_rtx;
6423
6424 case BUILT_IN_INIT_TRAMPOLINE:
6425 return expand_builtin_init_trampoline (exp, true);
6426 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6427 return expand_builtin_init_trampoline (exp, false);
6428 case BUILT_IN_ADJUST_TRAMPOLINE:
6429 return expand_builtin_adjust_trampoline (exp);
6430
6431 case BUILT_IN_FORK:
6432 case BUILT_IN_EXECL:
6433 case BUILT_IN_EXECV:
6434 case BUILT_IN_EXECLP:
6435 case BUILT_IN_EXECLE:
6436 case BUILT_IN_EXECVP:
6437 case BUILT_IN_EXECVE:
6438 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6439 if (target)
6440 return target;
6441 break;
6442
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6444 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6445 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6446 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6447 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6449 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6455 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6456 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6457 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6458 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6460 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6461 if (target)
6462 return target;
6463 break;
6464
6465 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6466 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6467 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6468 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6469 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6471 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6472 if (target)
6473 return target;
6474 break;
6475
6476 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6477 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6478 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6479 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6480 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6482 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6483 if (target)
6484 return target;
6485 break;
6486
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6488 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6489 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6490 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6491 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6493 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6501 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6502 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6504 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6510 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6511 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6512 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6513 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6515 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6521 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6522 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6523 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6524 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6526 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6527 if (target)
6528 return target;
6529 break;
6530
6531 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6532 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6533 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6534 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6535 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6537 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6543 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6544 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6545 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6546 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6548 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6549 if (target)
6550 return target;
6551 break;
6552
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6554 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6555 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6556 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6557 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6559 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6560 if (target)
6561 return target;
6562 break;
6563
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6567 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6568 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6569 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6570 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6571 if (target)
6572 return target;
6573 break;
6574
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6578 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6579 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6580 if (mode == VOIDmode)
6581 mode = TYPE_MODE (boolean_type_node);
6582 if (!target || !register_operand (target, mode))
6583 target = gen_reg_rtx (mode);
6584
6585 mode = get_builtin_sync_mode
6586 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6587 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6597 mode = get_builtin_sync_mode
6598 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6599 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6600 if (target)
6601 return target;
6602 break;
6603
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6607 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6608 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6610 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6611 if (target)
6612 return target;
6613 break;
6614
6615 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6616 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6617 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6618 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6619 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6621 expand_builtin_sync_lock_release (mode, exp);
6622 return const0_rtx;
6623
6624 case BUILT_IN_SYNC_SYNCHRONIZE:
6625 expand_builtin_sync_synchronize ();
6626 return const0_rtx;
6627
6628 case BUILT_IN_ATOMIC_EXCHANGE_1:
6629 case BUILT_IN_ATOMIC_EXCHANGE_2:
6630 case BUILT_IN_ATOMIC_EXCHANGE_4:
6631 case BUILT_IN_ATOMIC_EXCHANGE_8:
6632 case BUILT_IN_ATOMIC_EXCHANGE_16:
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6634 target = expand_builtin_atomic_exchange (mode, exp, target);
6635 if (target)
6636 return target;
6637 break;
6638
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6642 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6643 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6644 {
6645 unsigned int nargs, z;
6646 vec<tree, va_gc> *vec;
6647
6648 mode =
6649 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6650 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6651 if (target)
6652 return target;
6653
6654 /* If this is turned into an external library call, the weak parameter
6655 must be dropped to match the expected parameter list. */
6656 nargs = call_expr_nargs (exp);
6657 vec_alloc (vec, nargs - 1);
6658 for (z = 0; z < 3; z++)
6659 vec->quick_push (CALL_EXPR_ARG (exp, z));
6660 /* Skip the boolean weak parameter. */
6661 for (z = 4; z < 6; z++)
6662 vec->quick_push (CALL_EXPR_ARG (exp, z));
6663 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6664 break;
6665 }
6666
6667 case BUILT_IN_ATOMIC_LOAD_1:
6668 case BUILT_IN_ATOMIC_LOAD_2:
6669 case BUILT_IN_ATOMIC_LOAD_4:
6670 case BUILT_IN_ATOMIC_LOAD_8:
6671 case BUILT_IN_ATOMIC_LOAD_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6673 target = expand_builtin_atomic_load (mode, exp, target);
6674 if (target)
6675 return target;
6676 break;
6677
6678 case BUILT_IN_ATOMIC_STORE_1:
6679 case BUILT_IN_ATOMIC_STORE_2:
6680 case BUILT_IN_ATOMIC_STORE_4:
6681 case BUILT_IN_ATOMIC_STORE_8:
6682 case BUILT_IN_ATOMIC_STORE_16:
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6684 target = expand_builtin_atomic_store (mode, exp);
6685 if (target)
6686 return const0_rtx;
6687 break;
6688
6689 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6690 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6691 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6692 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6693 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6694 {
6695 enum built_in_function lib;
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6697 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6698 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6699 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6700 ignore, lib);
6701 if (target)
6702 return target;
6703 break;
6704 }
6705 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6706 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6707 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6708 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6709 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6710 {
6711 enum built_in_function lib;
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6713 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6714 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6716 ignore, lib);
6717 if (target)
6718 return target;
6719 break;
6720 }
6721 case BUILT_IN_ATOMIC_AND_FETCH_1:
6722 case BUILT_IN_ATOMIC_AND_FETCH_2:
6723 case BUILT_IN_ATOMIC_AND_FETCH_4:
6724 case BUILT_IN_ATOMIC_AND_FETCH_8:
6725 case BUILT_IN_ATOMIC_AND_FETCH_16:
6726 {
6727 enum built_in_function lib;
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6730 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6732 ignore, lib);
6733 if (target)
6734 return target;
6735 break;
6736 }
6737 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6738 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6739 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6740 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6741 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6742 {
6743 enum built_in_function lib;
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6746 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6747 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6748 ignore, lib);
6749 if (target)
6750 return target;
6751 break;
6752 }
6753 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6754 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6755 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6756 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6757 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6758 {
6759 enum built_in_function lib;
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6761 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6762 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6764 ignore, lib);
6765 if (target)
6766 return target;
6767 break;
6768 }
6769 case BUILT_IN_ATOMIC_OR_FETCH_1:
6770 case BUILT_IN_ATOMIC_OR_FETCH_2:
6771 case BUILT_IN_ATOMIC_OR_FETCH_4:
6772 case BUILT_IN_ATOMIC_OR_FETCH_8:
6773 case BUILT_IN_ATOMIC_OR_FETCH_16:
6774 {
6775 enum built_in_function lib;
6776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6777 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6778 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6780 ignore, lib);
6781 if (target)
6782 return target;
6783 break;
6784 }
6785 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6786 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6787 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6788 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6789 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6792 ignore, BUILT_IN_NONE);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6798 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6799 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6800 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6801 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6804 ignore, BUILT_IN_NONE);
6805 if (target)
6806 return target;
6807 break;
6808
6809 case BUILT_IN_ATOMIC_FETCH_AND_1:
6810 case BUILT_IN_ATOMIC_FETCH_AND_2:
6811 case BUILT_IN_ATOMIC_FETCH_AND_4:
6812 case BUILT_IN_ATOMIC_FETCH_AND_8:
6813 case BUILT_IN_ATOMIC_FETCH_AND_16:
6814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6815 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6816 ignore, BUILT_IN_NONE);
6817 if (target)
6818 return target;
6819 break;
6820
6821 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6822 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6823 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6824 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6825 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6828 ignore, BUILT_IN_NONE);
6829 if (target)
6830 return target;
6831 break;
6832
6833 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6834 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6835 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6836 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6837 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6840 ignore, BUILT_IN_NONE);
6841 if (target)
6842 return target;
6843 break;
6844
6845 case BUILT_IN_ATOMIC_FETCH_OR_1:
6846 case BUILT_IN_ATOMIC_FETCH_OR_2:
6847 case BUILT_IN_ATOMIC_FETCH_OR_4:
6848 case BUILT_IN_ATOMIC_FETCH_OR_8:
6849 case BUILT_IN_ATOMIC_FETCH_OR_16:
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6851 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6852 ignore, BUILT_IN_NONE);
6853 if (target)
6854 return target;
6855 break;
6856
6857 case BUILT_IN_ATOMIC_TEST_AND_SET:
6858 return expand_builtin_atomic_test_and_set (exp, target);
6859
6860 case BUILT_IN_ATOMIC_CLEAR:
6861 return expand_builtin_atomic_clear (exp);
6862
6863 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6864 return expand_builtin_atomic_always_lock_free (exp);
6865
6866 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6867 target = expand_builtin_atomic_is_lock_free (exp);
6868 if (target)
6869 return target;
6870 break;
6871
6872 case BUILT_IN_ATOMIC_THREAD_FENCE:
6873 expand_builtin_atomic_thread_fence (exp);
6874 return const0_rtx;
6875
6876 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6877 expand_builtin_atomic_signal_fence (exp);
6878 return const0_rtx;
6879
6880 case BUILT_IN_OBJECT_SIZE:
6881 return expand_builtin_object_size (exp);
6882
6883 case BUILT_IN_MEMCPY_CHK:
6884 case BUILT_IN_MEMPCPY_CHK:
6885 case BUILT_IN_MEMMOVE_CHK:
6886 case BUILT_IN_MEMSET_CHK:
6887 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6888 if (target)
6889 return target;
6890 break;
6891
6892 case BUILT_IN_STRCPY_CHK:
6893 case BUILT_IN_STPCPY_CHK:
6894 case BUILT_IN_STRNCPY_CHK:
6895 case BUILT_IN_STPNCPY_CHK:
6896 case BUILT_IN_STRCAT_CHK:
6897 case BUILT_IN_STRNCAT_CHK:
6898 case BUILT_IN_SNPRINTF_CHK:
6899 case BUILT_IN_VSNPRINTF_CHK:
6900 maybe_emit_chk_warning (exp, fcode);
6901 break;
6902
6903 case BUILT_IN_SPRINTF_CHK:
6904 case BUILT_IN_VSPRINTF_CHK:
6905 maybe_emit_sprintf_chk_warning (exp, fcode);
6906 break;
6907
6908 case BUILT_IN_FREE:
6909 if (warn_free_nonheap_object)
6910 maybe_emit_free_warning (exp);
6911 break;
6912
6913 case BUILT_IN_THREAD_POINTER:
6914 return expand_builtin_thread_pointer (exp, target);
6915
6916 case BUILT_IN_SET_THREAD_POINTER:
6917 expand_builtin_set_thread_pointer (exp);
6918 return const0_rtx;
6919
6920 case BUILT_IN_CILK_DETACH:
6921 expand_builtin_cilk_detach (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_CILK_POP_FRAME:
6925 expand_builtin_cilk_pop_frame (exp);
6926 return const0_rtx;
6927
6928 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6929 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6931 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6932 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6933 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6934 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6935 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6937 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6938 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6939 /* We allow user CHKP builtins if Pointer Bounds
6940 Checker is off. */
6941 if (!chkp_function_instrumented_p (current_function_decl))
6942 {
6943 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6944 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6945 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6946 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6947 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6948 return expand_normal (CALL_EXPR_ARG (exp, 0));
6949 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6950 return expand_normal (size_zero_node);
6951 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6952 return expand_normal (size_int (-1));
6953 else
6954 return const0_rtx;
6955 }
6956 /* FALLTHROUGH */
6957
6958 case BUILT_IN_CHKP_BNDMK:
6959 case BUILT_IN_CHKP_BNDSTX:
6960 case BUILT_IN_CHKP_BNDCL:
6961 case BUILT_IN_CHKP_BNDCU:
6962 case BUILT_IN_CHKP_BNDLDX:
6963 case BUILT_IN_CHKP_BNDRET:
6964 case BUILT_IN_CHKP_INTERSECT:
6965 case BUILT_IN_CHKP_NARROW:
6966 case BUILT_IN_CHKP_EXTRACT_LOWER:
6967 case BUILT_IN_CHKP_EXTRACT_UPPER:
6968 /* Software implementation of Pointer Bounds Checker is NYI.
6969 Target support is required. */
6970 error ("Your target platform does not support -fcheck-pointer-bounds");
6971 break;
6972
6973 case BUILT_IN_ACC_ON_DEVICE:
6974 /* Do library call, if we failed to expand the builtin when
6975 folding. */
6976 break;
6977
6978 default: /* just do library call, if unknown builtin */
6979 break;
6980 }
6981
6982 /* The switch statement above can drop through to cause the function
6983 to be called normally. */
6984 return expand_call (exp, target, ignore);
6985 }
6986
6987 /* Similar to expand_builtin but is used for instrumented calls. */
6988
6989 rtx
6990 expand_builtin_with_bounds (tree exp, rtx target,
6991 rtx subtarget ATTRIBUTE_UNUSED,
6992 machine_mode mode, int ignore)
6993 {
6994 tree fndecl = get_callee_fndecl (exp);
6995 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6996
6997 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6998
6999 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7000 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7001
7002 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7003 && fcode < END_CHKP_BUILTINS);
7004
7005 switch (fcode)
7006 {
7007 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7008 target = expand_builtin_memcpy_with_bounds (exp, target);
7009 if (target)
7010 return target;
7011 break;
7012
7013 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7014 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7015 if (target)
7016 return target;
7017 break;
7018
7019 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7020 target = expand_builtin_memset_with_bounds (exp, target, mode);
7021 if (target)
7022 return target;
7023 break;
7024
7025 default:
7026 break;
7027 }
7028
7029 /* The switch statement above can drop through to cause the function
7030 to be called normally. */
7031 return expand_call (exp, target, ignore);
7032 }
7033
7034 /* Determine whether a tree node represents a call to a built-in
7035 function. If the tree T is a call to a built-in function with
7036 the right number of arguments of the appropriate types, return
7037 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7038 Otherwise the return value is END_BUILTINS. */
7039
7040 enum built_in_function
7041 builtin_mathfn_code (const_tree t)
7042 {
7043 const_tree fndecl, arg, parmlist;
7044 const_tree argtype, parmtype;
7045 const_call_expr_arg_iterator iter;
7046
7047 if (TREE_CODE (t) != CALL_EXPR
7048 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7049 return END_BUILTINS;
7050
7051 fndecl = get_callee_fndecl (t);
7052 if (fndecl == NULL_TREE
7053 || TREE_CODE (fndecl) != FUNCTION_DECL
7054 || ! DECL_BUILT_IN (fndecl)
7055 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7056 return END_BUILTINS;
7057
7058 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7059 init_const_call_expr_arg_iterator (t, &iter);
7060 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7061 {
7062 /* If a function doesn't take a variable number of arguments,
7063 the last element in the list will have type `void'. */
7064 parmtype = TREE_VALUE (parmlist);
7065 if (VOID_TYPE_P (parmtype))
7066 {
7067 if (more_const_call_expr_args_p (&iter))
7068 return END_BUILTINS;
7069 return DECL_FUNCTION_CODE (fndecl);
7070 }
7071
7072 if (! more_const_call_expr_args_p (&iter))
7073 return END_BUILTINS;
7074
7075 arg = next_const_call_expr_arg (&iter);
7076 argtype = TREE_TYPE (arg);
7077
7078 if (SCALAR_FLOAT_TYPE_P (parmtype))
7079 {
7080 if (! SCALAR_FLOAT_TYPE_P (argtype))
7081 return END_BUILTINS;
7082 }
7083 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7084 {
7085 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7086 return END_BUILTINS;
7087 }
7088 else if (POINTER_TYPE_P (parmtype))
7089 {
7090 if (! POINTER_TYPE_P (argtype))
7091 return END_BUILTINS;
7092 }
7093 else if (INTEGRAL_TYPE_P (parmtype))
7094 {
7095 if (! INTEGRAL_TYPE_P (argtype))
7096 return END_BUILTINS;
7097 }
7098 else
7099 return END_BUILTINS;
7100 }
7101
7102 /* Variable-length argument list. */
7103 return DECL_FUNCTION_CODE (fndecl);
7104 }
7105
7106 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7107 evaluate to a constant. */
7108
7109 static tree
7110 fold_builtin_constant_p (tree arg)
7111 {
7112 /* We return 1 for a numeric type that's known to be a constant
7113 value at compile-time or for an aggregate type that's a
7114 literal constant. */
7115 STRIP_NOPS (arg);
7116
7117 /* If we know this is a constant, emit the constant of one. */
7118 if (CONSTANT_CLASS_P (arg)
7119 || (TREE_CODE (arg) == CONSTRUCTOR
7120 && TREE_CONSTANT (arg)))
7121 return integer_one_node;
7122 if (TREE_CODE (arg) == ADDR_EXPR)
7123 {
7124 tree op = TREE_OPERAND (arg, 0);
7125 if (TREE_CODE (op) == STRING_CST
7126 || (TREE_CODE (op) == ARRAY_REF
7127 && integer_zerop (TREE_OPERAND (op, 1))
7128 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7129 return integer_one_node;
7130 }
7131
7132 /* If this expression has side effects, show we don't know it to be a
7133 constant. Likewise if it's a pointer or aggregate type since in
7134 those case we only want literals, since those are only optimized
7135 when generating RTL, not later.
7136 And finally, if we are compiling an initializer, not code, we
7137 need to return a definite result now; there's not going to be any
7138 more optimization done. */
7139 if (TREE_SIDE_EFFECTS (arg)
7140 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7141 || POINTER_TYPE_P (TREE_TYPE (arg))
7142 || cfun == 0
7143 || folding_initializer
7144 || force_folding_builtin_constant_p)
7145 return integer_zero_node;
7146
7147 return NULL_TREE;
7148 }
7149
7150 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7151 return it as a truthvalue. */
7152
7153 static tree
7154 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7155 tree predictor)
7156 {
7157 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7158
7159 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7160 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7161 ret_type = TREE_TYPE (TREE_TYPE (fn));
7162 pred_type = TREE_VALUE (arg_types);
7163 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7164
7165 pred = fold_convert_loc (loc, pred_type, pred);
7166 expected = fold_convert_loc (loc, expected_type, expected);
7167 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7168 predictor);
7169
7170 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7171 build_int_cst (ret_type, 0));
7172 }
7173
7174 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7175 NULL_TREE if no simplification is possible. */
7176
7177 tree
7178 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7179 {
7180 tree inner, fndecl, inner_arg0;
7181 enum tree_code code;
7182
7183 /* Distribute the expected value over short-circuiting operators.
7184 See through the cast from truthvalue_type_node to long. */
7185 inner_arg0 = arg0;
7186 while (CONVERT_EXPR_P (inner_arg0)
7187 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7188 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7189 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7190
7191 /* If this is a builtin_expect within a builtin_expect keep the
7192 inner one. See through a comparison against a constant. It
7193 might have been added to create a thruthvalue. */
7194 inner = inner_arg0;
7195
7196 if (COMPARISON_CLASS_P (inner)
7197 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7198 inner = TREE_OPERAND (inner, 0);
7199
7200 if (TREE_CODE (inner) == CALL_EXPR
7201 && (fndecl = get_callee_fndecl (inner))
7202 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7203 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7204 return arg0;
7205
7206 inner = inner_arg0;
7207 code = TREE_CODE (inner);
7208 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7209 {
7210 tree op0 = TREE_OPERAND (inner, 0);
7211 tree op1 = TREE_OPERAND (inner, 1);
7212
7213 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7214 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7215 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7216
7217 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7218 }
7219
7220 /* If the argument isn't invariant then there's nothing else we can do. */
7221 if (!TREE_CONSTANT (inner_arg0))
7222 return NULL_TREE;
7223
7224 /* If we expect that a comparison against the argument will fold to
7225 a constant return the constant. In practice, this means a true
7226 constant or the address of a non-weak symbol. */
7227 inner = inner_arg0;
7228 STRIP_NOPS (inner);
7229 if (TREE_CODE (inner) == ADDR_EXPR)
7230 {
7231 do
7232 {
7233 inner = TREE_OPERAND (inner, 0);
7234 }
7235 while (TREE_CODE (inner) == COMPONENT_REF
7236 || TREE_CODE (inner) == ARRAY_REF);
7237 if ((TREE_CODE (inner) == VAR_DECL
7238 || TREE_CODE (inner) == FUNCTION_DECL)
7239 && DECL_WEAK (inner))
7240 return NULL_TREE;
7241 }
7242
7243 /* Otherwise, ARG0 already has the proper type for the return value. */
7244 return arg0;
7245 }
7246
7247 /* Fold a call to __builtin_classify_type with argument ARG. */
7248
7249 static tree
7250 fold_builtin_classify_type (tree arg)
7251 {
7252 if (arg == 0)
7253 return build_int_cst (integer_type_node, no_type_class);
7254
7255 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7256 }
7257
7258 /* Fold a call to __builtin_strlen with argument ARG. */
7259
7260 static tree
7261 fold_builtin_strlen (location_t loc, tree type, tree arg)
7262 {
7263 if (!validate_arg (arg, POINTER_TYPE))
7264 return NULL_TREE;
7265 else
7266 {
7267 tree len = c_strlen (arg, 0);
7268
7269 if (len)
7270 return fold_convert_loc (loc, type, len);
7271
7272 return NULL_TREE;
7273 }
7274 }
7275
7276 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7277
7278 static tree
7279 fold_builtin_inf (location_t loc, tree type, int warn)
7280 {
7281 REAL_VALUE_TYPE real;
7282
7283 /* __builtin_inff is intended to be usable to define INFINITY on all
7284 targets. If an infinity is not available, INFINITY expands "to a
7285 positive constant of type float that overflows at translation
7286 time", footnote "In this case, using INFINITY will violate the
7287 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7288 Thus we pedwarn to ensure this constraint violation is
7289 diagnosed. */
7290 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7291 pedwarn (loc, 0, "target format does not support infinity");
7292
7293 real_inf (&real);
7294 return build_real (type, real);
7295 }
7296
7297 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7298
7299 static tree
7300 fold_builtin_nan (tree arg, tree type, int quiet)
7301 {
7302 REAL_VALUE_TYPE real;
7303 const char *str;
7304
7305 if (!validate_arg (arg, POINTER_TYPE))
7306 return NULL_TREE;
7307 str = c_getstr (arg);
7308 if (!str)
7309 return NULL_TREE;
7310
7311 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7312 return NULL_TREE;
7313
7314 return build_real (type, real);
7315 }
7316
7317 /* FNDECL is assumed to be builtin which can narrow the FP type of
7318 the argument, for instance lround((double)f) -> lroundf (f).
7319 Do the transformation for a call with argument ARG. */
7320
7321 static tree
7322 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7323 {
7324 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7325
7326 if (!validate_arg (arg, REAL_TYPE))
7327 return NULL_TREE;
7328
7329 /* If argument is already integer valued, and we don't need to worry
7330 about setting errno, there's no need to perform rounding. */
7331 if (! flag_errno_math && integer_valued_real_p (arg))
7332 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7333 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7334
7335 if (optimize)
7336 {
7337 tree ftype = TREE_TYPE (arg);
7338 tree arg0 = strip_float_extensions (arg);
7339 tree newtype = TREE_TYPE (arg0);
7340 tree decl;
7341
7342 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7343 && (decl = mathfn_built_in (newtype, fcode)))
7344 return build_call_expr_loc (loc, decl, 1,
7345 fold_convert_loc (loc, newtype, arg0));
7346 }
7347
7348 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7349 sizeof (int) == sizeof (long). */
7350 if (TYPE_PRECISION (integer_type_node)
7351 == TYPE_PRECISION (long_integer_type_node))
7352 {
7353 tree newfn = NULL_TREE;
7354 switch (fcode)
7355 {
7356 CASE_FLT_FN (BUILT_IN_ICEIL):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7358 break;
7359
7360 CASE_FLT_FN (BUILT_IN_IFLOOR):
7361 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7362 break;
7363
7364 CASE_FLT_FN (BUILT_IN_IROUND):
7365 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7366 break;
7367
7368 CASE_FLT_FN (BUILT_IN_IRINT):
7369 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7370 break;
7371
7372 default:
7373 break;
7374 }
7375
7376 if (newfn)
7377 {
7378 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7379 return fold_convert_loc (loc,
7380 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7381 }
7382 }
7383
7384 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7385 sizeof (long long) == sizeof (long). */
7386 if (TYPE_PRECISION (long_long_integer_type_node)
7387 == TYPE_PRECISION (long_integer_type_node))
7388 {
7389 tree newfn = NULL_TREE;
7390 switch (fcode)
7391 {
7392 CASE_FLT_FN (BUILT_IN_LLCEIL):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7394 break;
7395
7396 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7397 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7398 break;
7399
7400 CASE_FLT_FN (BUILT_IN_LLROUND):
7401 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7402 break;
7403
7404 CASE_FLT_FN (BUILT_IN_LLRINT):
7405 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7406 break;
7407
7408 default:
7409 break;
7410 }
7411
7412 if (newfn)
7413 {
7414 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7415 return fold_convert_loc (loc,
7416 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7417 }
7418 }
7419
7420 return NULL_TREE;
7421 }
7422
7423 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7424 NULL_TREE if no simplification can be made. */
7425
7426 static tree
7427 fold_builtin_sincos (location_t loc,
7428 tree arg0, tree arg1, tree arg2)
7429 {
7430 tree type;
7431 tree res, fn, call;
7432
7433 if (!validate_arg (arg0, REAL_TYPE)
7434 || !validate_arg (arg1, POINTER_TYPE)
7435 || !validate_arg (arg2, POINTER_TYPE))
7436 return NULL_TREE;
7437
7438 type = TREE_TYPE (arg0);
7439
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7442 return res;
7443
7444 /* Canonicalize sincos to cexpi. */
7445 if (!targetm.libc_has_function (function_c99_math_complex))
7446 return NULL_TREE;
7447 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7448 if (!fn)
7449 return NULL_TREE;
7450
7451 call = build_call_expr_loc (loc, fn, 1, arg0);
7452 call = builtin_save_expr (call);
7453
7454 return build2 (COMPOUND_EXPR, void_type_node,
7455 build2 (MODIFY_EXPR, void_type_node,
7456 build_fold_indirect_ref_loc (loc, arg1),
7457 build1 (IMAGPART_EXPR, type, call)),
7458 build2 (MODIFY_EXPR, void_type_node,
7459 build_fold_indirect_ref_loc (loc, arg2),
7460 build1 (REALPART_EXPR, type, call)));
7461 }
7462
7463 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7464 NULL_TREE if no simplification can be made. */
7465
7466 static tree
7467 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7468 {
7469 tree rtype;
7470 tree realp, imagp, ifn;
7471 tree res;
7472
7473 if (!validate_arg (arg0, COMPLEX_TYPE)
7474 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7475 return NULL_TREE;
7476
7477 /* Calculate the result when the argument is a constant. */
7478 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7479 return res;
7480
7481 rtype = TREE_TYPE (TREE_TYPE (arg0));
7482
7483 /* In case we can figure out the real part of arg0 and it is constant zero
7484 fold to cexpi. */
7485 if (!targetm.libc_has_function (function_c99_math_complex))
7486 return NULL_TREE;
7487 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7488 if (!ifn)
7489 return NULL_TREE;
7490
7491 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7492 && real_zerop (realp))
7493 {
7494 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7495 return build_call_expr_loc (loc, ifn, 1, narg);
7496 }
7497
7498 /* In case we can easily decompose real and imaginary parts split cexp
7499 to exp (r) * cexpi (i). */
7500 if (flag_unsafe_math_optimizations
7501 && realp)
7502 {
7503 tree rfn, rcall, icall;
7504
7505 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7506 if (!rfn)
7507 return NULL_TREE;
7508
7509 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7510 if (!imagp)
7511 return NULL_TREE;
7512
7513 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7514 icall = builtin_save_expr (icall);
7515 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7516 rcall = builtin_save_expr (rcall);
7517 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7518 fold_build2_loc (loc, MULT_EXPR, rtype,
7519 rcall,
7520 fold_build1_loc (loc, REALPART_EXPR,
7521 rtype, icall)),
7522 fold_build2_loc (loc, MULT_EXPR, rtype,
7523 rcall,
7524 fold_build1_loc (loc, IMAGPART_EXPR,
7525 rtype, icall)));
7526 }
7527
7528 return NULL_TREE;
7529 }
7530
7531 /* Fold function call to builtin lround, lroundf or lroundl (or the
7532 corresponding long long versions) and other rounding functions. ARG
7533 is the argument to the call. Return NULL_TREE if no simplification
7534 can be made. */
7535
7536 static tree
7537 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7538 {
7539 if (!validate_arg (arg, REAL_TYPE))
7540 return NULL_TREE;
7541
7542 /* Optimize lround of constant value. */
7543 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7544 {
7545 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7546
7547 if (real_isfinite (&x))
7548 {
7549 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7550 tree ftype = TREE_TYPE (arg);
7551 REAL_VALUE_TYPE r;
7552 bool fail = false;
7553
7554 switch (DECL_FUNCTION_CODE (fndecl))
7555 {
7556 CASE_FLT_FN (BUILT_IN_IFLOOR):
7557 CASE_FLT_FN (BUILT_IN_LFLOOR):
7558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7559 real_floor (&r, TYPE_MODE (ftype), &x);
7560 break;
7561
7562 CASE_FLT_FN (BUILT_IN_ICEIL):
7563 CASE_FLT_FN (BUILT_IN_LCEIL):
7564 CASE_FLT_FN (BUILT_IN_LLCEIL):
7565 real_ceil (&r, TYPE_MODE (ftype), &x);
7566 break;
7567
7568 CASE_FLT_FN (BUILT_IN_IROUND):
7569 CASE_FLT_FN (BUILT_IN_LROUND):
7570 CASE_FLT_FN (BUILT_IN_LLROUND):
7571 real_round (&r, TYPE_MODE (ftype), &x);
7572 break;
7573
7574 default:
7575 gcc_unreachable ();
7576 }
7577
7578 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
7579 if (!fail)
7580 return wide_int_to_tree (itype, val);
7581 }
7582 }
7583
7584 switch (DECL_FUNCTION_CODE (fndecl))
7585 {
7586 CASE_FLT_FN (BUILT_IN_LFLOOR):
7587 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7588 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7589 if (tree_expr_nonnegative_p (arg))
7590 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7591 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7592 break;
7593 default:;
7594 }
7595
7596 return fold_fixed_mathfn (loc, fndecl, arg);
7597 }
7598
7599 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7600 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7601 the argument to the call. Return NULL_TREE if no simplification can
7602 be made. */
7603
7604 static tree
7605 fold_builtin_bitop (tree fndecl, tree arg)
7606 {
7607 if (!validate_arg (arg, INTEGER_TYPE))
7608 return NULL_TREE;
7609
7610 /* Optimize for constant argument. */
7611 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7612 {
7613 tree type = TREE_TYPE (arg);
7614 int result;
7615
7616 switch (DECL_FUNCTION_CODE (fndecl))
7617 {
7618 CASE_INT_FN (BUILT_IN_FFS):
7619 result = wi::ffs (arg);
7620 break;
7621
7622 CASE_INT_FN (BUILT_IN_CLZ):
7623 if (wi::ne_p (arg, 0))
7624 result = wi::clz (arg);
7625 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7626 result = TYPE_PRECISION (type);
7627 break;
7628
7629 CASE_INT_FN (BUILT_IN_CTZ):
7630 if (wi::ne_p (arg, 0))
7631 result = wi::ctz (arg);
7632 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7633 result = TYPE_PRECISION (type);
7634 break;
7635
7636 CASE_INT_FN (BUILT_IN_CLRSB):
7637 result = wi::clrsb (arg);
7638 break;
7639
7640 CASE_INT_FN (BUILT_IN_POPCOUNT):
7641 result = wi::popcount (arg);
7642 break;
7643
7644 CASE_INT_FN (BUILT_IN_PARITY):
7645 result = wi::parity (arg);
7646 break;
7647
7648 default:
7649 gcc_unreachable ();
7650 }
7651
7652 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7653 }
7654
7655 return NULL_TREE;
7656 }
7657
7658 /* Fold function call to builtin_bswap and the short, long and long long
7659 variants. Return NULL_TREE if no simplification can be made. */
7660 static tree
7661 fold_builtin_bswap (tree fndecl, tree arg)
7662 {
7663 if (! validate_arg (arg, INTEGER_TYPE))
7664 return NULL_TREE;
7665
7666 /* Optimize constant value. */
7667 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7668 {
7669 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7670
7671 switch (DECL_FUNCTION_CODE (fndecl))
7672 {
7673 case BUILT_IN_BSWAP16:
7674 case BUILT_IN_BSWAP32:
7675 case BUILT_IN_BSWAP64:
7676 {
7677 signop sgn = TYPE_SIGN (type);
7678 tree result =
7679 wide_int_to_tree (type,
7680 wide_int::from (arg, TYPE_PRECISION (type),
7681 sgn).bswap ());
7682 return result;
7683 }
7684 default:
7685 gcc_unreachable ();
7686 }
7687 }
7688
7689 return NULL_TREE;
7690 }
7691
7692 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7693 NULL_TREE if no simplification can be made. */
7694
7695 static tree
7696 fold_builtin_hypot (location_t loc, tree arg0, tree arg1, tree type)
7697 {
7698 tree res;
7699
7700 if (!validate_arg (arg0, REAL_TYPE)
7701 || !validate_arg (arg1, REAL_TYPE))
7702 return NULL_TREE;
7703
7704 /* Calculate the result when the argument is a constant. */
7705 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7706 return res;
7707
7708 /* If either argument is zero, hypot is fabs of the other. */
7709 if (real_zerop (arg0))
7710 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7711 else if (real_zerop (arg1))
7712 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7713
7714 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7715 if (flag_unsafe_math_optimizations
7716 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7717 return fold_build2_loc (loc, MULT_EXPR, type,
7718 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7719 build_real_truncate (type, dconst_sqrt2 ()));
7720
7721 return NULL_TREE;
7722 }
7723
7724
7725 /* Fold a builtin function call to pow, powf, or powl. Return
7726 NULL_TREE if no simplification can be made. */
7727 static tree
7728 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7729 {
7730 tree res;
7731
7732 if (!validate_arg (arg0, REAL_TYPE)
7733 || !validate_arg (arg1, REAL_TYPE))
7734 return NULL_TREE;
7735
7736 /* Calculate the result when the argument is a constant. */
7737 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7738 return res;
7739
7740 /* Optimize pow(1.0,y) = 1.0. */
7741 if (real_onep (arg0))
7742 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7743
7744 if (TREE_CODE (arg1) == REAL_CST
7745 && !TREE_OVERFLOW (arg1))
7746 {
7747 REAL_VALUE_TYPE cint;
7748 REAL_VALUE_TYPE c;
7749 HOST_WIDE_INT n;
7750
7751 c = TREE_REAL_CST (arg1);
7752
7753 /* Optimize pow(x,0.0) = 1.0. */
7754 if (real_equal (&c, &dconst0))
7755 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7756 arg0);
7757
7758 /* Optimize pow(x,1.0) = x. */
7759 if (real_equal (&c, &dconst1))
7760 return arg0;
7761
7762 /* Optimize pow(x,-1.0) = 1.0/x. */
7763 if (real_equal (&c, &dconstm1))
7764 return fold_build2_loc (loc, RDIV_EXPR, type,
7765 build_real (type, dconst1), arg0);
7766
7767 /* Optimize pow(x,0.5) = sqrt(x). */
7768 if (flag_unsafe_math_optimizations
7769 && real_equal (&c, &dconsthalf))
7770 {
7771 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7772
7773 if (sqrtfn != NULL_TREE)
7774 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7775 }
7776
7777 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7778 if (flag_unsafe_math_optimizations)
7779 {
7780 const REAL_VALUE_TYPE dconstroot
7781 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7782
7783 if (real_equal (&c, &dconstroot))
7784 {
7785 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7786 if (cbrtfn != NULL_TREE)
7787 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7788 }
7789 }
7790
7791 /* Check for an integer exponent. */
7792 n = real_to_integer (&c);
7793 real_from_integer (&cint, VOIDmode, n, SIGNED);
7794 if (real_identical (&c, &cint))
7795 {
7796 /* Attempt to evaluate pow at compile-time, unless this should
7797 raise an exception. */
7798 if (TREE_CODE (arg0) == REAL_CST
7799 && !TREE_OVERFLOW (arg0)
7800 && (n > 0
7801 || (!flag_trapping_math && !flag_errno_math)
7802 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
7803 {
7804 REAL_VALUE_TYPE x;
7805 bool inexact;
7806
7807 x = TREE_REAL_CST (arg0);
7808 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7809 if (flag_unsafe_math_optimizations || !inexact)
7810 return build_real (type, x);
7811 }
7812 }
7813 }
7814
7815 if (flag_unsafe_math_optimizations)
7816 {
7817 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7818
7819 /* Optimize pow(expN(x),y) = expN(x*y). */
7820 if (BUILTIN_EXPONENT_P (fcode))
7821 {
7822 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7823 tree arg = CALL_EXPR_ARG (arg0, 0);
7824 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7825 return build_call_expr_loc (loc, expfn, 1, arg);
7826 }
7827
7828 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7829 if (BUILTIN_SQRT_P (fcode))
7830 {
7831 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7832 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7833 build_real (type, dconsthalf));
7834 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7835 }
7836
7837 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7838 if (BUILTIN_CBRT_P (fcode))
7839 {
7840 tree arg = CALL_EXPR_ARG (arg0, 0);
7841 if (tree_expr_nonnegative_p (arg))
7842 {
7843 tree c = build_real_truncate (type, dconst_third ());
7844 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
7845 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7846 }
7847 }
7848
7849 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7850 if (fcode == BUILT_IN_POW
7851 || fcode == BUILT_IN_POWF
7852 || fcode == BUILT_IN_POWL)
7853 {
7854 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7855 if (tree_expr_nonnegative_p (arg00))
7856 {
7857 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7858 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7859 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7860 }
7861 }
7862 }
7863
7864 return NULL_TREE;
7865 }
7866
7867 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7869 static tree
7870 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7871 tree arg0, tree arg1, tree type)
7872 {
7873 if (!validate_arg (arg0, REAL_TYPE)
7874 || !validate_arg (arg1, INTEGER_TYPE))
7875 return NULL_TREE;
7876
7877 /* Optimize pow(1.0,y) = 1.0. */
7878 if (real_onep (arg0))
7879 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7880
7881 if (tree_fits_shwi_p (arg1))
7882 {
7883 HOST_WIDE_INT c = tree_to_shwi (arg1);
7884
7885 /* Evaluate powi at compile-time. */
7886 if (TREE_CODE (arg0) == REAL_CST
7887 && !TREE_OVERFLOW (arg0))
7888 {
7889 REAL_VALUE_TYPE x;
7890 x = TREE_REAL_CST (arg0);
7891 real_powi (&x, TYPE_MODE (type), &x, c);
7892 return build_real (type, x);
7893 }
7894
7895 /* Optimize pow(x,0) = 1.0. */
7896 if (c == 0)
7897 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7898 arg0);
7899
7900 /* Optimize pow(x,1) = x. */
7901 if (c == 1)
7902 return arg0;
7903
7904 /* Optimize pow(x,-1) = 1.0/x. */
7905 if (c == -1)
7906 return fold_build2_loc (loc, RDIV_EXPR, type,
7907 build_real (type, dconst1), arg0);
7908 }
7909
7910 return NULL_TREE;
7911 }
7912
7913 /* A subroutine of fold_builtin to fold the various exponent
7914 functions. Return NULL_TREE if no simplification can be made.
7915 FUNC is the corresponding MPFR exponent function. */
7916
7917 static tree
7918 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7919 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7920 {
7921 if (validate_arg (arg, REAL_TYPE))
7922 {
7923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7924 tree res;
7925
7926 /* Calculate the result when the argument is a constant. */
7927 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7928 return res;
7929
7930 /* Optimize expN(logN(x)) = x. */
7931 if (flag_unsafe_math_optimizations)
7932 {
7933 const enum built_in_function fcode = builtin_mathfn_code (arg);
7934
7935 if ((func == mpfr_exp
7936 && (fcode == BUILT_IN_LOG
7937 || fcode == BUILT_IN_LOGF
7938 || fcode == BUILT_IN_LOGL))
7939 || (func == mpfr_exp2
7940 && (fcode == BUILT_IN_LOG2
7941 || fcode == BUILT_IN_LOG2F
7942 || fcode == BUILT_IN_LOG2L))
7943 || (func == mpfr_exp10
7944 && (fcode == BUILT_IN_LOG10
7945 || fcode == BUILT_IN_LOG10F
7946 || fcode == BUILT_IN_LOG10L)))
7947 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7948 }
7949 }
7950
7951 return NULL_TREE;
7952 }
7953
7954 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7955 arguments to the call, and TYPE is its return type.
7956 Return NULL_TREE if no simplification can be made. */
7957
7958 static tree
7959 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7960 {
7961 if (!validate_arg (arg1, POINTER_TYPE)
7962 || !validate_arg (arg2, INTEGER_TYPE)
7963 || !validate_arg (len, INTEGER_TYPE))
7964 return NULL_TREE;
7965 else
7966 {
7967 const char *p1;
7968
7969 if (TREE_CODE (arg2) != INTEGER_CST
7970 || !tree_fits_uhwi_p (len))
7971 return NULL_TREE;
7972
7973 p1 = c_getstr (arg1);
7974 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7975 {
7976 char c;
7977 const char *r;
7978 tree tem;
7979
7980 if (target_char_cast (arg2, &c))
7981 return NULL_TREE;
7982
7983 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7984
7985 if (r == NULL)
7986 return build_int_cst (TREE_TYPE (arg1), 0);
7987
7988 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7989 return fold_convert_loc (loc, type, tem);
7990 }
7991 return NULL_TREE;
7992 }
7993 }
7994
7995 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7996 Return NULL_TREE if no simplification can be made. */
7997
7998 static tree
7999 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8000 {
8001 const char *p1, *p2;
8002
8003 if (!validate_arg (arg1, POINTER_TYPE)
8004 || !validate_arg (arg2, POINTER_TYPE)
8005 || !validate_arg (len, INTEGER_TYPE))
8006 return NULL_TREE;
8007
8008 /* If the LEN parameter is zero, return zero. */
8009 if (integer_zerop (len))
8010 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8011 arg1, arg2);
8012
8013 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8014 if (operand_equal_p (arg1, arg2, 0))
8015 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8016
8017 p1 = c_getstr (arg1);
8018 p2 = c_getstr (arg2);
8019
8020 /* If all arguments are constant, and the value of len is not greater
8021 than the lengths of arg1 and arg2, evaluate at compile-time. */
8022 if (tree_fits_uhwi_p (len) && p1 && p2
8023 && compare_tree_int (len, strlen (p1) + 1) <= 0
8024 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8025 {
8026 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8027
8028 if (r > 0)
8029 return integer_one_node;
8030 else if (r < 0)
8031 return integer_minus_one_node;
8032 else
8033 return integer_zero_node;
8034 }
8035
8036 /* If len parameter is one, return an expression corresponding to
8037 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8038 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8039 {
8040 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8041 tree cst_uchar_ptr_node
8042 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8043
8044 tree ind1
8045 = fold_convert_loc (loc, integer_type_node,
8046 build1 (INDIRECT_REF, cst_uchar_node,
8047 fold_convert_loc (loc,
8048 cst_uchar_ptr_node,
8049 arg1)));
8050 tree ind2
8051 = fold_convert_loc (loc, integer_type_node,
8052 build1 (INDIRECT_REF, cst_uchar_node,
8053 fold_convert_loc (loc,
8054 cst_uchar_ptr_node,
8055 arg2)));
8056 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8057 }
8058
8059 return NULL_TREE;
8060 }
8061
8062 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8063 Return NULL_TREE if no simplification can be made. */
8064
8065 static tree
8066 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8067 {
8068 const char *p1, *p2;
8069
8070 if (!validate_arg (arg1, POINTER_TYPE)
8071 || !validate_arg (arg2, POINTER_TYPE))
8072 return NULL_TREE;
8073
8074 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8075 if (operand_equal_p (arg1, arg2, 0))
8076 return integer_zero_node;
8077
8078 p1 = c_getstr (arg1);
8079 p2 = c_getstr (arg2);
8080
8081 if (p1 && p2)
8082 {
8083 const int i = strcmp (p1, p2);
8084 if (i < 0)
8085 return integer_minus_one_node;
8086 else if (i > 0)
8087 return integer_one_node;
8088 else
8089 return integer_zero_node;
8090 }
8091
8092 /* If the second arg is "", return *(const unsigned char*)arg1. */
8093 if (p2 && *p2 == '\0')
8094 {
8095 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8096 tree cst_uchar_ptr_node
8097 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8098
8099 return fold_convert_loc (loc, integer_type_node,
8100 build1 (INDIRECT_REF, cst_uchar_node,
8101 fold_convert_loc (loc,
8102 cst_uchar_ptr_node,
8103 arg1)));
8104 }
8105
8106 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8107 if (p1 && *p1 == '\0')
8108 {
8109 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8110 tree cst_uchar_ptr_node
8111 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8112
8113 tree temp
8114 = fold_convert_loc (loc, integer_type_node,
8115 build1 (INDIRECT_REF, cst_uchar_node,
8116 fold_convert_loc (loc,
8117 cst_uchar_ptr_node,
8118 arg2)));
8119 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8120 }
8121
8122 return NULL_TREE;
8123 }
8124
8125 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8126 Return NULL_TREE if no simplification can be made. */
8127
8128 static tree
8129 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8130 {
8131 const char *p1, *p2;
8132
8133 if (!validate_arg (arg1, POINTER_TYPE)
8134 || !validate_arg (arg2, POINTER_TYPE)
8135 || !validate_arg (len, INTEGER_TYPE))
8136 return NULL_TREE;
8137
8138 /* If the LEN parameter is zero, return zero. */
8139 if (integer_zerop (len))
8140 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8141 arg1, arg2);
8142
8143 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8144 if (operand_equal_p (arg1, arg2, 0))
8145 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8146
8147 p1 = c_getstr (arg1);
8148 p2 = c_getstr (arg2);
8149
8150 if (tree_fits_uhwi_p (len) && p1 && p2)
8151 {
8152 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8153 if (i > 0)
8154 return integer_one_node;
8155 else if (i < 0)
8156 return integer_minus_one_node;
8157 else
8158 return integer_zero_node;
8159 }
8160
8161 /* If the second arg is "", and the length is greater than zero,
8162 return *(const unsigned char*)arg1. */
8163 if (p2 && *p2 == '\0'
8164 && TREE_CODE (len) == INTEGER_CST
8165 && tree_int_cst_sgn (len) == 1)
8166 {
8167 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8168 tree cst_uchar_ptr_node
8169 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8170
8171 return fold_convert_loc (loc, integer_type_node,
8172 build1 (INDIRECT_REF, cst_uchar_node,
8173 fold_convert_loc (loc,
8174 cst_uchar_ptr_node,
8175 arg1)));
8176 }
8177
8178 /* If the first arg is "", and the length is greater than zero,
8179 return -*(const unsigned char*)arg2. */
8180 if (p1 && *p1 == '\0'
8181 && TREE_CODE (len) == INTEGER_CST
8182 && tree_int_cst_sgn (len) == 1)
8183 {
8184 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8185 tree cst_uchar_ptr_node
8186 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8187
8188 tree temp = fold_convert_loc (loc, integer_type_node,
8189 build1 (INDIRECT_REF, cst_uchar_node,
8190 fold_convert_loc (loc,
8191 cst_uchar_ptr_node,
8192 arg2)));
8193 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8194 }
8195
8196 /* If len parameter is one, return an expression corresponding to
8197 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8198 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8199 {
8200 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8201 tree cst_uchar_ptr_node
8202 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8203
8204 tree ind1 = fold_convert_loc (loc, integer_type_node,
8205 build1 (INDIRECT_REF, cst_uchar_node,
8206 fold_convert_loc (loc,
8207 cst_uchar_ptr_node,
8208 arg1)));
8209 tree ind2 = fold_convert_loc (loc, integer_type_node,
8210 build1 (INDIRECT_REF, cst_uchar_node,
8211 fold_convert_loc (loc,
8212 cst_uchar_ptr_node,
8213 arg2)));
8214 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8215 }
8216
8217 return NULL_TREE;
8218 }
8219
8220 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8221 ARG. Return NULL_TREE if no simplification can be made. */
8222
8223 static tree
8224 fold_builtin_signbit (location_t loc, tree arg, tree type)
8225 {
8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
8228
8229 /* If ARG is a compile-time constant, determine the result. */
8230 if (TREE_CODE (arg) == REAL_CST
8231 && !TREE_OVERFLOW (arg))
8232 {
8233 REAL_VALUE_TYPE c;
8234
8235 c = TREE_REAL_CST (arg);
8236 return (REAL_VALUE_NEGATIVE (c)
8237 ? build_one_cst (type)
8238 : build_zero_cst (type));
8239 }
8240
8241 /* If ARG is non-negative, the result is always zero. */
8242 if (tree_expr_nonnegative_p (arg))
8243 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8244
8245 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8246 if (!HONOR_SIGNED_ZEROS (arg))
8247 return fold_convert (type,
8248 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8249 build_real (TREE_TYPE (arg), dconst0)));
8250
8251 return NULL_TREE;
8252 }
8253
8254 /* Fold function call to builtin copysign, copysignf or copysignl with
8255 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8256 be made. */
8257
8258 static tree
8259 fold_builtin_copysign (location_t loc, tree arg1, tree arg2, tree type)
8260 {
8261 if (!validate_arg (arg1, REAL_TYPE)
8262 || !validate_arg (arg2, REAL_TYPE))
8263 return NULL_TREE;
8264
8265 /* copysign(X,X) is X. */
8266 if (operand_equal_p (arg1, arg2, 0))
8267 return fold_convert_loc (loc, type, arg1);
8268
8269 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8270 if (TREE_CODE (arg1) == REAL_CST
8271 && TREE_CODE (arg2) == REAL_CST
8272 && !TREE_OVERFLOW (arg1)
8273 && !TREE_OVERFLOW (arg2))
8274 {
8275 REAL_VALUE_TYPE c1, c2;
8276
8277 c1 = TREE_REAL_CST (arg1);
8278 c2 = TREE_REAL_CST (arg2);
8279 /* c1.sign := c2.sign. */
8280 real_copysign (&c1, &c2);
8281 return build_real (type, c1);
8282 }
8283
8284 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8285 Remember to evaluate Y for side-effects. */
8286 if (tree_expr_nonnegative_p (arg2))
8287 return omit_one_operand_loc (loc, type,
8288 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8289 arg2);
8290
8291 return NULL_TREE;
8292 }
8293
8294 /* Fold a call to builtin isascii with argument ARG. */
8295
8296 static tree
8297 fold_builtin_isascii (location_t loc, tree arg)
8298 {
8299 if (!validate_arg (arg, INTEGER_TYPE))
8300 return NULL_TREE;
8301 else
8302 {
8303 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8304 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8305 build_int_cst (integer_type_node,
8306 ~ (unsigned HOST_WIDE_INT) 0x7f));
8307 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8308 arg, integer_zero_node);
8309 }
8310 }
8311
8312 /* Fold a call to builtin toascii with argument ARG. */
8313
8314 static tree
8315 fold_builtin_toascii (location_t loc, tree arg)
8316 {
8317 if (!validate_arg (arg, INTEGER_TYPE))
8318 return NULL_TREE;
8319
8320 /* Transform toascii(c) -> (c & 0x7f). */
8321 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8322 build_int_cst (integer_type_node, 0x7f));
8323 }
8324
8325 /* Fold a call to builtin isdigit with argument ARG. */
8326
8327 static tree
8328 fold_builtin_isdigit (location_t loc, tree arg)
8329 {
8330 if (!validate_arg (arg, INTEGER_TYPE))
8331 return NULL_TREE;
8332 else
8333 {
8334 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8335 /* According to the C standard, isdigit is unaffected by locale.
8336 However, it definitely is affected by the target character set. */
8337 unsigned HOST_WIDE_INT target_digit0
8338 = lang_hooks.to_target_charset ('0');
8339
8340 if (target_digit0 == 0)
8341 return NULL_TREE;
8342
8343 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8344 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8345 build_int_cst (unsigned_type_node, target_digit0));
8346 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8347 build_int_cst (unsigned_type_node, 9));
8348 }
8349 }
8350
8351 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8352
8353 static tree
8354 fold_builtin_fabs (location_t loc, tree arg, tree type)
8355 {
8356 if (!validate_arg (arg, REAL_TYPE))
8357 return NULL_TREE;
8358
8359 arg = fold_convert_loc (loc, type, arg);
8360 if (TREE_CODE (arg) == REAL_CST)
8361 return fold_abs_const (arg, type);
8362 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8363 }
8364
8365 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8366
8367 static tree
8368 fold_builtin_abs (location_t loc, tree arg, tree type)
8369 {
8370 if (!validate_arg (arg, INTEGER_TYPE))
8371 return NULL_TREE;
8372
8373 arg = fold_convert_loc (loc, type, arg);
8374 if (TREE_CODE (arg) == INTEGER_CST)
8375 return fold_abs_const (arg, type);
8376 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8377 }
8378
8379 /* Fold a fma operation with arguments ARG[012]. */
8380
8381 tree
8382 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8383 tree type, tree arg0, tree arg1, tree arg2)
8384 {
8385 if (TREE_CODE (arg0) == REAL_CST
8386 && TREE_CODE (arg1) == REAL_CST
8387 && TREE_CODE (arg2) == REAL_CST)
8388 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8389
8390 return NULL_TREE;
8391 }
8392
8393 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8394
8395 static tree
8396 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8397 {
8398 if (validate_arg (arg0, REAL_TYPE)
8399 && validate_arg (arg1, REAL_TYPE)
8400 && validate_arg (arg2, REAL_TYPE))
8401 {
8402 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8403 if (tem)
8404 return tem;
8405
8406 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8407 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8408 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8409 }
8410 return NULL_TREE;
8411 }
8412
8413 /* Fold a call to builtin fmin or fmax. */
8414
8415 static tree
8416 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8417 tree type, bool max)
8418 {
8419 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8420 {
8421 /* Calculate the result when the argument is a constant. */
8422 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8423
8424 if (res)
8425 return res;
8426
8427 /* If either argument is NaN, return the other one. Avoid the
8428 transformation if we get (and honor) a signalling NaN. Using
8429 omit_one_operand() ensures we create a non-lvalue. */
8430 if (TREE_CODE (arg0) == REAL_CST
8431 && real_isnan (&TREE_REAL_CST (arg0))
8432 && (! HONOR_SNANS (arg0)
8433 || ! TREE_REAL_CST (arg0).signalling))
8434 return omit_one_operand_loc (loc, type, arg1, arg0);
8435 if (TREE_CODE (arg1) == REAL_CST
8436 && real_isnan (&TREE_REAL_CST (arg1))
8437 && (! HONOR_SNANS (arg1)
8438 || ! TREE_REAL_CST (arg1).signalling))
8439 return omit_one_operand_loc (loc, type, arg0, arg1);
8440
8441 /* Transform fmin/fmax(x,x) -> x. */
8442 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8443 return omit_one_operand_loc (loc, type, arg0, arg1);
8444
8445 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8446 functions to return the numeric arg if the other one is NaN.
8447 These tree codes don't honor that, so only transform if
8448 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8449 handled, so we don't have to worry about it either. */
8450 if (flag_finite_math_only)
8451 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8452 fold_convert_loc (loc, type, arg0),
8453 fold_convert_loc (loc, type, arg1));
8454 }
8455 return NULL_TREE;
8456 }
8457
8458 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8459
8460 static tree
8461 fold_builtin_carg (location_t loc, tree arg, tree type)
8462 {
8463 if (validate_arg (arg, COMPLEX_TYPE)
8464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8465 {
8466 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8467
8468 if (atan2_fn)
8469 {
8470 tree new_arg = builtin_save_expr (arg);
8471 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8472 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8473 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8474 }
8475 }
8476
8477 return NULL_TREE;
8478 }
8479
8480 /* Fold a call to builtin logb/ilogb. */
8481
8482 static tree
8483 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8484 {
8485 if (! validate_arg (arg, REAL_TYPE))
8486 return NULL_TREE;
8487
8488 STRIP_NOPS (arg);
8489
8490 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8491 {
8492 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8493
8494 switch (value->cl)
8495 {
8496 case rvc_nan:
8497 case rvc_inf:
8498 /* If arg is Inf or NaN and we're logb, return it. */
8499 if (TREE_CODE (rettype) == REAL_TYPE)
8500 {
8501 /* For logb(-Inf) we have to return +Inf. */
8502 if (real_isinf (value) && real_isneg (value))
8503 {
8504 REAL_VALUE_TYPE tem;
8505 real_inf (&tem);
8506 return build_real (rettype, tem);
8507 }
8508 return fold_convert_loc (loc, rettype, arg);
8509 }
8510 /* Fall through... */
8511 case rvc_zero:
8512 /* Zero may set errno and/or raise an exception for logb, also
8513 for ilogb we don't know FP_ILOGB0. */
8514 return NULL_TREE;
8515 case rvc_normal:
8516 /* For normal numbers, proceed iff radix == 2. In GCC,
8517 normalized significands are in the range [0.5, 1.0). We
8518 want the exponent as if they were [1.0, 2.0) so get the
8519 exponent and subtract 1. */
8520 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8521 return fold_convert_loc (loc, rettype,
8522 build_int_cst (integer_type_node,
8523 REAL_EXP (value)-1));
8524 break;
8525 }
8526 }
8527
8528 return NULL_TREE;
8529 }
8530
8531 /* Fold a call to builtin significand, if radix == 2. */
8532
8533 static tree
8534 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8535 {
8536 if (! validate_arg (arg, REAL_TYPE))
8537 return NULL_TREE;
8538
8539 STRIP_NOPS (arg);
8540
8541 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8542 {
8543 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8544
8545 switch (value->cl)
8546 {
8547 case rvc_zero:
8548 case rvc_nan:
8549 case rvc_inf:
8550 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8551 return fold_convert_loc (loc, rettype, arg);
8552 case rvc_normal:
8553 /* For normal numbers, proceed iff radix == 2. */
8554 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8555 {
8556 REAL_VALUE_TYPE result = *value;
8557 /* In GCC, normalized significands are in the range [0.5,
8558 1.0). We want them to be [1.0, 2.0) so set the
8559 exponent to 1. */
8560 SET_REAL_EXP (&result, 1);
8561 return build_real (rettype, result);
8562 }
8563 break;
8564 }
8565 }
8566
8567 return NULL_TREE;
8568 }
8569
8570 /* Fold a call to builtin frexp, we can assume the base is 2. */
8571
8572 static tree
8573 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8574 {
8575 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8576 return NULL_TREE;
8577
8578 STRIP_NOPS (arg0);
8579
8580 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8581 return NULL_TREE;
8582
8583 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8584
8585 /* Proceed if a valid pointer type was passed in. */
8586 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8587 {
8588 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8589 tree frac, exp;
8590
8591 switch (value->cl)
8592 {
8593 case rvc_zero:
8594 /* For +-0, return (*exp = 0, +-0). */
8595 exp = integer_zero_node;
8596 frac = arg0;
8597 break;
8598 case rvc_nan:
8599 case rvc_inf:
8600 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8601 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8602 case rvc_normal:
8603 {
8604 /* Since the frexp function always expects base 2, and in
8605 GCC normalized significands are already in the range
8606 [0.5, 1.0), we have exactly what frexp wants. */
8607 REAL_VALUE_TYPE frac_rvt = *value;
8608 SET_REAL_EXP (&frac_rvt, 0);
8609 frac = build_real (rettype, frac_rvt);
8610 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8611 }
8612 break;
8613 default:
8614 gcc_unreachable ();
8615 }
8616
8617 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8618 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8619 TREE_SIDE_EFFECTS (arg1) = 1;
8620 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8621 }
8622
8623 return NULL_TREE;
8624 }
8625
8626 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8627 then we can assume the base is two. If it's false, then we have to
8628 check the mode of the TYPE parameter in certain cases. */
8629
8630 static tree
8631 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8632 tree type, bool ldexp)
8633 {
8634 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8635 {
8636 STRIP_NOPS (arg0);
8637 STRIP_NOPS (arg1);
8638
8639 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8640 if (real_zerop (arg0) || integer_zerop (arg1)
8641 || (TREE_CODE (arg0) == REAL_CST
8642 && !real_isfinite (&TREE_REAL_CST (arg0))))
8643 return omit_one_operand_loc (loc, type, arg0, arg1);
8644
8645 /* If both arguments are constant, then try to evaluate it. */
8646 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8647 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8648 && tree_fits_shwi_p (arg1))
8649 {
8650 /* Bound the maximum adjustment to twice the range of the
8651 mode's valid exponents. Use abs to ensure the range is
8652 positive as a sanity check. */
8653 const long max_exp_adj = 2 *
8654 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8655 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8656
8657 /* Get the user-requested adjustment. */
8658 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8659
8660 /* The requested adjustment must be inside this range. This
8661 is a preliminary cap to avoid things like overflow, we
8662 may still fail to compute the result for other reasons. */
8663 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8664 {
8665 REAL_VALUE_TYPE initial_result;
8666
8667 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8668
8669 /* Ensure we didn't overflow. */
8670 if (! real_isinf (&initial_result))
8671 {
8672 const REAL_VALUE_TYPE trunc_result
8673 = real_value_truncate (TYPE_MODE (type), initial_result);
8674
8675 /* Only proceed if the target mode can hold the
8676 resulting value. */
8677 if (real_equal (&initial_result, &trunc_result))
8678 return build_real (type, trunc_result);
8679 }
8680 }
8681 }
8682 }
8683
8684 return NULL_TREE;
8685 }
8686
8687 /* Fold a call to builtin modf. */
8688
8689 static tree
8690 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8691 {
8692 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8693 return NULL_TREE;
8694
8695 STRIP_NOPS (arg0);
8696
8697 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8698 return NULL_TREE;
8699
8700 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8701
8702 /* Proceed if a valid pointer type was passed in. */
8703 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8704 {
8705 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8706 REAL_VALUE_TYPE trunc, frac;
8707
8708 switch (value->cl)
8709 {
8710 case rvc_nan:
8711 case rvc_zero:
8712 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8713 trunc = frac = *value;
8714 break;
8715 case rvc_inf:
8716 /* For +-Inf, return (*arg1 = arg0, +-0). */
8717 frac = dconst0;
8718 frac.sign = value->sign;
8719 trunc = *value;
8720 break;
8721 case rvc_normal:
8722 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8723 real_trunc (&trunc, VOIDmode, value);
8724 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8725 /* If the original number was negative and already
8726 integral, then the fractional part is -0.0. */
8727 if (value->sign && frac.cl == rvc_zero)
8728 frac.sign = value->sign;
8729 break;
8730 }
8731
8732 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8733 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8734 build_real (rettype, trunc));
8735 TREE_SIDE_EFFECTS (arg1) = 1;
8736 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8737 build_real (rettype, frac));
8738 }
8739
8740 return NULL_TREE;
8741 }
8742
8743 /* Given a location LOC, an interclass builtin function decl FNDECL
8744 and its single argument ARG, return an folded expression computing
8745 the same, or NULL_TREE if we either couldn't or didn't want to fold
8746 (the latter happen if there's an RTL instruction available). */
8747
8748 static tree
8749 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8750 {
8751 machine_mode mode;
8752
8753 if (!validate_arg (arg, REAL_TYPE))
8754 return NULL_TREE;
8755
8756 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8757 return NULL_TREE;
8758
8759 mode = TYPE_MODE (TREE_TYPE (arg));
8760
8761 /* If there is no optab, try generic code. */
8762 switch (DECL_FUNCTION_CODE (fndecl))
8763 {
8764 tree result;
8765
8766 CASE_FLT_FN (BUILT_IN_ISINF):
8767 {
8768 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8769 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8770 tree const type = TREE_TYPE (arg);
8771 REAL_VALUE_TYPE r;
8772 char buf[128];
8773
8774 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8775 real_from_string (&r, buf);
8776 result = build_call_expr (isgr_fn, 2,
8777 fold_build1_loc (loc, ABS_EXPR, type, arg),
8778 build_real (type, r));
8779 return result;
8780 }
8781 CASE_FLT_FN (BUILT_IN_FINITE):
8782 case BUILT_IN_ISFINITE:
8783 {
8784 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8785 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8786 tree const type = TREE_TYPE (arg);
8787 REAL_VALUE_TYPE r;
8788 char buf[128];
8789
8790 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8791 real_from_string (&r, buf);
8792 result = build_call_expr (isle_fn, 2,
8793 fold_build1_loc (loc, ABS_EXPR, type, arg),
8794 build_real (type, r));
8795 /*result = fold_build2_loc (loc, UNGT_EXPR,
8796 TREE_TYPE (TREE_TYPE (fndecl)),
8797 fold_build1_loc (loc, ABS_EXPR, type, arg),
8798 build_real (type, r));
8799 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8800 TREE_TYPE (TREE_TYPE (fndecl)),
8801 result);*/
8802 return result;
8803 }
8804 case BUILT_IN_ISNORMAL:
8805 {
8806 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8807 islessequal(fabs(x),DBL_MAX). */
8808 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8809 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8810 tree const type = TREE_TYPE (arg);
8811 REAL_VALUE_TYPE rmax, rmin;
8812 char buf[128];
8813
8814 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8815 real_from_string (&rmax, buf);
8816 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8817 real_from_string (&rmin, buf);
8818 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8819 result = build_call_expr (isle_fn, 2, arg,
8820 build_real (type, rmax));
8821 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8822 build_call_expr (isge_fn, 2, arg,
8823 build_real (type, rmin)));
8824 return result;
8825 }
8826 default:
8827 break;
8828 }
8829
8830 return NULL_TREE;
8831 }
8832
8833 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8834 ARG is the argument for the call. */
8835
8836 static tree
8837 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8838 {
8839 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8840 REAL_VALUE_TYPE r;
8841
8842 if (!validate_arg (arg, REAL_TYPE))
8843 return NULL_TREE;
8844
8845 switch (builtin_index)
8846 {
8847 case BUILT_IN_ISINF:
8848 if (!HONOR_INFINITIES (arg))
8849 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8850
8851 if (TREE_CODE (arg) == REAL_CST)
8852 {
8853 r = TREE_REAL_CST (arg);
8854 if (real_isinf (&r))
8855 return real_compare (GT_EXPR, &r, &dconst0)
8856 ? integer_one_node : integer_minus_one_node;
8857 else
8858 return integer_zero_node;
8859 }
8860
8861 return NULL_TREE;
8862
8863 case BUILT_IN_ISINF_SIGN:
8864 {
8865 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8866 /* In a boolean context, GCC will fold the inner COND_EXPR to
8867 1. So e.g. "if (isinf_sign(x))" would be folded to just
8868 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8869 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
8870 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8871 tree tmp = NULL_TREE;
8872
8873 arg = builtin_save_expr (arg);
8874
8875 if (signbit_fn && isinf_fn)
8876 {
8877 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8878 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8879
8880 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8881 signbit_call, integer_zero_node);
8882 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8883 isinf_call, integer_zero_node);
8884
8885 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8886 integer_minus_one_node, integer_one_node);
8887 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8888 isinf_call, tmp,
8889 integer_zero_node);
8890 }
8891
8892 return tmp;
8893 }
8894
8895 case BUILT_IN_ISFINITE:
8896 if (!HONOR_NANS (arg)
8897 && !HONOR_INFINITIES (arg))
8898 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8899
8900 if (TREE_CODE (arg) == REAL_CST)
8901 {
8902 r = TREE_REAL_CST (arg);
8903 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
8904 }
8905
8906 return NULL_TREE;
8907
8908 case BUILT_IN_ISNAN:
8909 if (!HONOR_NANS (arg))
8910 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8911
8912 if (TREE_CODE (arg) == REAL_CST)
8913 {
8914 r = TREE_REAL_CST (arg);
8915 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8916 }
8917
8918 arg = builtin_save_expr (arg);
8919 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8920
8921 default:
8922 gcc_unreachable ();
8923 }
8924 }
8925
8926 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8927 This builtin will generate code to return the appropriate floating
8928 point classification depending on the value of the floating point
8929 number passed in. The possible return values must be supplied as
8930 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8931 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8932 one floating point argument which is "type generic". */
8933
8934 static tree
8935 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8936 {
8937 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8938 arg, type, res, tmp;
8939 machine_mode mode;
8940 REAL_VALUE_TYPE r;
8941 char buf[128];
8942
8943 /* Verify the required arguments in the original call. */
8944 if (nargs != 6
8945 || !validate_arg (args[0], INTEGER_TYPE)
8946 || !validate_arg (args[1], INTEGER_TYPE)
8947 || !validate_arg (args[2], INTEGER_TYPE)
8948 || !validate_arg (args[3], INTEGER_TYPE)
8949 || !validate_arg (args[4], INTEGER_TYPE)
8950 || !validate_arg (args[5], REAL_TYPE))
8951 return NULL_TREE;
8952
8953 fp_nan = args[0];
8954 fp_infinite = args[1];
8955 fp_normal = args[2];
8956 fp_subnormal = args[3];
8957 fp_zero = args[4];
8958 arg = args[5];
8959 type = TREE_TYPE (arg);
8960 mode = TYPE_MODE (type);
8961 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8962
8963 /* fpclassify(x) ->
8964 isnan(x) ? FP_NAN :
8965 (fabs(x) == Inf ? FP_INFINITE :
8966 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8967 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8968
8969 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8970 build_real (type, dconst0));
8971 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8972 tmp, fp_zero, fp_subnormal);
8973
8974 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8975 real_from_string (&r, buf);
8976 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8977 arg, build_real (type, r));
8978 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8979
8980 if (HONOR_INFINITIES (mode))
8981 {
8982 real_inf (&r);
8983 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8984 build_real (type, r));
8985 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8986 fp_infinite, res);
8987 }
8988
8989 if (HONOR_NANS (mode))
8990 {
8991 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8992 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8993 }
8994
8995 return res;
8996 }
8997
8998 /* Fold a call to an unordered comparison function such as
8999 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9000 being called and ARG0 and ARG1 are the arguments for the call.
9001 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9002 the opposite of the desired result. UNORDERED_CODE is used
9003 for modes that can hold NaNs and ORDERED_CODE is used for
9004 the rest. */
9005
9006 static tree
9007 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9008 enum tree_code unordered_code,
9009 enum tree_code ordered_code)
9010 {
9011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9012 enum tree_code code;
9013 tree type0, type1;
9014 enum tree_code code0, code1;
9015 tree cmp_type = NULL_TREE;
9016
9017 type0 = TREE_TYPE (arg0);
9018 type1 = TREE_TYPE (arg1);
9019
9020 code0 = TREE_CODE (type0);
9021 code1 = TREE_CODE (type1);
9022
9023 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9024 /* Choose the wider of two real types. */
9025 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9026 ? type0 : type1;
9027 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9028 cmp_type = type0;
9029 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9030 cmp_type = type1;
9031
9032 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9033 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9034
9035 if (unordered_code == UNORDERED_EXPR)
9036 {
9037 if (!HONOR_NANS (arg0))
9038 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9039 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9040 }
9041
9042 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9043 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9044 fold_build2_loc (loc, code, type, arg0, arg1));
9045 }
9046
9047 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9048 arithmetics if it can never overflow, or into internal functions that
9049 return both result of arithmetics and overflowed boolean flag in
9050 a complex integer result, or some other check for overflow. */
9051
9052 static tree
9053 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9054 tree arg0, tree arg1, tree arg2)
9055 {
9056 enum internal_fn ifn = IFN_LAST;
9057 tree type = TREE_TYPE (TREE_TYPE (arg2));
9058 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9059 switch (fcode)
9060 {
9061 case BUILT_IN_ADD_OVERFLOW:
9062 case BUILT_IN_SADD_OVERFLOW:
9063 case BUILT_IN_SADDL_OVERFLOW:
9064 case BUILT_IN_SADDLL_OVERFLOW:
9065 case BUILT_IN_UADD_OVERFLOW:
9066 case BUILT_IN_UADDL_OVERFLOW:
9067 case BUILT_IN_UADDLL_OVERFLOW:
9068 ifn = IFN_ADD_OVERFLOW;
9069 break;
9070 case BUILT_IN_SUB_OVERFLOW:
9071 case BUILT_IN_SSUB_OVERFLOW:
9072 case BUILT_IN_SSUBL_OVERFLOW:
9073 case BUILT_IN_SSUBLL_OVERFLOW:
9074 case BUILT_IN_USUB_OVERFLOW:
9075 case BUILT_IN_USUBL_OVERFLOW:
9076 case BUILT_IN_USUBLL_OVERFLOW:
9077 ifn = IFN_SUB_OVERFLOW;
9078 break;
9079 case BUILT_IN_MUL_OVERFLOW:
9080 case BUILT_IN_SMUL_OVERFLOW:
9081 case BUILT_IN_SMULL_OVERFLOW:
9082 case BUILT_IN_SMULLL_OVERFLOW:
9083 case BUILT_IN_UMUL_OVERFLOW:
9084 case BUILT_IN_UMULL_OVERFLOW:
9085 case BUILT_IN_UMULLL_OVERFLOW:
9086 ifn = IFN_MUL_OVERFLOW;
9087 break;
9088 default:
9089 gcc_unreachable ();
9090 }
9091 tree ctype = build_complex_type (type);
9092 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9093 2, arg0, arg1);
9094 tree tgt = save_expr (call);
9095 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9096 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9097 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9098 tree store
9099 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9100 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9101 }
9102
9103 /* Fold a call to built-in function FNDECL with 0 arguments.
9104 This function returns NULL_TREE if no simplification was possible. */
9105
9106 static tree
9107 fold_builtin_0 (location_t loc, tree fndecl)
9108 {
9109 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9111 switch (fcode)
9112 {
9113 CASE_FLT_FN (BUILT_IN_INF):
9114 case BUILT_IN_INFD32:
9115 case BUILT_IN_INFD64:
9116 case BUILT_IN_INFD128:
9117 return fold_builtin_inf (loc, type, true);
9118
9119 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9120 return fold_builtin_inf (loc, type, false);
9121
9122 case BUILT_IN_CLASSIFY_TYPE:
9123 return fold_builtin_classify_type (NULL_TREE);
9124
9125 default:
9126 break;
9127 }
9128 return NULL_TREE;
9129 }
9130
9131 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9132 This function returns NULL_TREE if no simplification was possible. */
9133
9134 static tree
9135 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9136 {
9137 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9138 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9139 switch (fcode)
9140 {
9141 case BUILT_IN_CONSTANT_P:
9142 {
9143 tree val = fold_builtin_constant_p (arg0);
9144
9145 /* Gimplification will pull the CALL_EXPR for the builtin out of
9146 an if condition. When not optimizing, we'll not CSE it back.
9147 To avoid link error types of regressions, return false now. */
9148 if (!val && !optimize)
9149 val = integer_zero_node;
9150
9151 return val;
9152 }
9153
9154 case BUILT_IN_CLASSIFY_TYPE:
9155 return fold_builtin_classify_type (arg0);
9156
9157 case BUILT_IN_STRLEN:
9158 return fold_builtin_strlen (loc, type, arg0);
9159
9160 CASE_FLT_FN (BUILT_IN_FABS):
9161 case BUILT_IN_FABSD32:
9162 case BUILT_IN_FABSD64:
9163 case BUILT_IN_FABSD128:
9164 return fold_builtin_fabs (loc, arg0, type);
9165
9166 case BUILT_IN_ABS:
9167 case BUILT_IN_LABS:
9168 case BUILT_IN_LLABS:
9169 case BUILT_IN_IMAXABS:
9170 return fold_builtin_abs (loc, arg0, type);
9171
9172 CASE_FLT_FN (BUILT_IN_CONJ):
9173 if (validate_arg (arg0, COMPLEX_TYPE)
9174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9175 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9176 break;
9177
9178 CASE_FLT_FN (BUILT_IN_CREAL):
9179 if (validate_arg (arg0, COMPLEX_TYPE)
9180 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9181 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9182 break;
9183
9184 CASE_FLT_FN (BUILT_IN_CIMAG):
9185 if (validate_arg (arg0, COMPLEX_TYPE)
9186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9187 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9188 break;
9189
9190 CASE_FLT_FN (BUILT_IN_CCOS):
9191 if (validate_arg (arg0, COMPLEX_TYPE)
9192 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9193 return do_mpc_arg1 (arg0, type, mpc_cos);
9194 break;
9195
9196 CASE_FLT_FN (BUILT_IN_CCOSH):
9197 if (validate_arg (arg0, COMPLEX_TYPE)
9198 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9199 return do_mpc_arg1 (arg0, type, mpc_cosh);
9200 break;
9201
9202 CASE_FLT_FN (BUILT_IN_CPROJ):
9203 if (TREE_CODE (arg0) == COMPLEX_CST
9204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9205 {
9206 const REAL_VALUE_TYPE *real
9207 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9208 const REAL_VALUE_TYPE *imag
9209 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9210
9211 if (real_isinf (real) || real_isinf (imag))
9212 return build_complex_inf (type, imag->sign);
9213 else
9214 return arg0;
9215 }
9216 break;
9217
9218 CASE_FLT_FN (BUILT_IN_CSIN):
9219 if (validate_arg (arg0, COMPLEX_TYPE)
9220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9221 return do_mpc_arg1 (arg0, type, mpc_sin);
9222 break;
9223
9224 CASE_FLT_FN (BUILT_IN_CSINH):
9225 if (validate_arg (arg0, COMPLEX_TYPE)
9226 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9227 return do_mpc_arg1 (arg0, type, mpc_sinh);
9228 break;
9229
9230 CASE_FLT_FN (BUILT_IN_CTAN):
9231 if (validate_arg (arg0, COMPLEX_TYPE)
9232 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9233 return do_mpc_arg1 (arg0, type, mpc_tan);
9234 break;
9235
9236 CASE_FLT_FN (BUILT_IN_CTANH):
9237 if (validate_arg (arg0, COMPLEX_TYPE)
9238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9239 return do_mpc_arg1 (arg0, type, mpc_tanh);
9240 break;
9241
9242 CASE_FLT_FN (BUILT_IN_CLOG):
9243 if (validate_arg (arg0, COMPLEX_TYPE)
9244 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9245 return do_mpc_arg1 (arg0, type, mpc_log);
9246 break;
9247
9248 CASE_FLT_FN (BUILT_IN_CSQRT):
9249 if (validate_arg (arg0, COMPLEX_TYPE)
9250 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9251 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9252 break;
9253
9254 CASE_FLT_FN (BUILT_IN_CASIN):
9255 if (validate_arg (arg0, COMPLEX_TYPE)
9256 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9257 return do_mpc_arg1 (arg0, type, mpc_asin);
9258 break;
9259
9260 CASE_FLT_FN (BUILT_IN_CACOS):
9261 if (validate_arg (arg0, COMPLEX_TYPE)
9262 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9263 return do_mpc_arg1 (arg0, type, mpc_acos);
9264 break;
9265
9266 CASE_FLT_FN (BUILT_IN_CATAN):
9267 if (validate_arg (arg0, COMPLEX_TYPE)
9268 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9269 return do_mpc_arg1 (arg0, type, mpc_atan);
9270 break;
9271
9272 CASE_FLT_FN (BUILT_IN_CASINH):
9273 if (validate_arg (arg0, COMPLEX_TYPE)
9274 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9275 return do_mpc_arg1 (arg0, type, mpc_asinh);
9276 break;
9277
9278 CASE_FLT_FN (BUILT_IN_CACOSH):
9279 if (validate_arg (arg0, COMPLEX_TYPE)
9280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9281 return do_mpc_arg1 (arg0, type, mpc_acosh);
9282 break;
9283
9284 CASE_FLT_FN (BUILT_IN_CATANH):
9285 if (validate_arg (arg0, COMPLEX_TYPE)
9286 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9287 return do_mpc_arg1 (arg0, type, mpc_atanh);
9288 break;
9289
9290 CASE_FLT_FN (BUILT_IN_CABS):
9291 if (TREE_CODE (arg0) == COMPLEX_CST
9292 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9293 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
9294 type, mpfr_hypot);
9295 break;
9296
9297 CASE_FLT_FN (BUILT_IN_CARG):
9298 return fold_builtin_carg (loc, arg0, type);
9299
9300 CASE_FLT_FN (BUILT_IN_SQRT):
9301 if (validate_arg (arg0, REAL_TYPE))
9302 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9303 break;
9304
9305 CASE_FLT_FN (BUILT_IN_CBRT):
9306 if (validate_arg (arg0, REAL_TYPE))
9307 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9308 break;
9309
9310 CASE_FLT_FN (BUILT_IN_ASIN):
9311 if (validate_arg (arg0, REAL_TYPE))
9312 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9313 &dconstm1, &dconst1, true);
9314 break;
9315
9316 CASE_FLT_FN (BUILT_IN_ACOS):
9317 if (validate_arg (arg0, REAL_TYPE))
9318 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9319 &dconstm1, &dconst1, true);
9320 break;
9321
9322 CASE_FLT_FN (BUILT_IN_ATAN):
9323 if (validate_arg (arg0, REAL_TYPE))
9324 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9325 break;
9326
9327 CASE_FLT_FN (BUILT_IN_ASINH):
9328 if (validate_arg (arg0, REAL_TYPE))
9329 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9330 break;
9331
9332 CASE_FLT_FN (BUILT_IN_ACOSH):
9333 if (validate_arg (arg0, REAL_TYPE))
9334 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9335 &dconst1, NULL, true);
9336 break;
9337
9338 CASE_FLT_FN (BUILT_IN_ATANH):
9339 if (validate_arg (arg0, REAL_TYPE))
9340 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9341 &dconstm1, &dconst1, false);
9342 break;
9343
9344 CASE_FLT_FN (BUILT_IN_SIN):
9345 if (validate_arg (arg0, REAL_TYPE))
9346 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9347 break;
9348
9349 CASE_FLT_FN (BUILT_IN_COS):
9350 if (validate_arg (arg0, REAL_TYPE))
9351 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
9352 break;
9353
9354 CASE_FLT_FN (BUILT_IN_TAN):
9355 if (validate_arg (arg0, REAL_TYPE))
9356 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
9357 break;
9358
9359 CASE_FLT_FN (BUILT_IN_CEXP):
9360 return fold_builtin_cexp (loc, arg0, type);
9361
9362 CASE_FLT_FN (BUILT_IN_CEXPI):
9363 if (validate_arg (arg0, REAL_TYPE))
9364 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9365 break;
9366
9367 CASE_FLT_FN (BUILT_IN_SINH):
9368 if (validate_arg (arg0, REAL_TYPE))
9369 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9370 break;
9371
9372 CASE_FLT_FN (BUILT_IN_COSH):
9373 if (validate_arg (arg0, REAL_TYPE))
9374 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
9375 break;
9376
9377 CASE_FLT_FN (BUILT_IN_TANH):
9378 if (validate_arg (arg0, REAL_TYPE))
9379 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9380 break;
9381
9382 CASE_FLT_FN (BUILT_IN_ERF):
9383 if (validate_arg (arg0, REAL_TYPE))
9384 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9385 break;
9386
9387 CASE_FLT_FN (BUILT_IN_ERFC):
9388 if (validate_arg (arg0, REAL_TYPE))
9389 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9390 break;
9391
9392 CASE_FLT_FN (BUILT_IN_TGAMMA):
9393 if (validate_arg (arg0, REAL_TYPE))
9394 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9395 break;
9396
9397 CASE_FLT_FN (BUILT_IN_EXP):
9398 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9399
9400 CASE_FLT_FN (BUILT_IN_EXP2):
9401 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9402
9403 CASE_FLT_FN (BUILT_IN_EXP10):
9404 CASE_FLT_FN (BUILT_IN_POW10):
9405 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9406
9407 CASE_FLT_FN (BUILT_IN_EXPM1):
9408 if (validate_arg (arg0, REAL_TYPE))
9409 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9410 break;
9411
9412 CASE_FLT_FN (BUILT_IN_LOG):
9413 if (validate_arg (arg0, REAL_TYPE))
9414 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9415 break;
9416
9417 CASE_FLT_FN (BUILT_IN_LOG2):
9418 if (validate_arg (arg0, REAL_TYPE))
9419 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9420 break;
9421
9422 CASE_FLT_FN (BUILT_IN_LOG10):
9423 if (validate_arg (arg0, REAL_TYPE))
9424 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9425 break;
9426
9427 CASE_FLT_FN (BUILT_IN_LOG1P):
9428 if (validate_arg (arg0, REAL_TYPE))
9429 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9430 &dconstm1, NULL, false);
9431 break;
9432
9433 CASE_FLT_FN (BUILT_IN_J0):
9434 if (validate_arg (arg0, REAL_TYPE))
9435 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9436 NULL, NULL, 0);
9437 break;
9438
9439 CASE_FLT_FN (BUILT_IN_J1):
9440 if (validate_arg (arg0, REAL_TYPE))
9441 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9442 NULL, NULL, 0);
9443 break;
9444
9445 CASE_FLT_FN (BUILT_IN_Y0):
9446 if (validate_arg (arg0, REAL_TYPE))
9447 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9448 &dconst0, NULL, false);
9449 break;
9450
9451 CASE_FLT_FN (BUILT_IN_Y1):
9452 if (validate_arg (arg0, REAL_TYPE))
9453 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9454 &dconst0, NULL, false);
9455 break;
9456
9457 CASE_FLT_FN (BUILT_IN_NAN):
9458 case BUILT_IN_NAND32:
9459 case BUILT_IN_NAND64:
9460 case BUILT_IN_NAND128:
9461 return fold_builtin_nan (arg0, type, true);
9462
9463 CASE_FLT_FN (BUILT_IN_NANS):
9464 return fold_builtin_nan (arg0, type, false);
9465
9466 CASE_FLT_FN (BUILT_IN_FLOOR):
9467 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9468 {
9469 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9470 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9471 {
9472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9473 REAL_VALUE_TYPE r;
9474 real_floor (&r, TYPE_MODE (type), &x);
9475 return build_real (type, r);
9476 }
9477 }
9478 break;
9479
9480 CASE_FLT_FN (BUILT_IN_CEIL):
9481 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9482 {
9483 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9484 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9485 {
9486 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9487 REAL_VALUE_TYPE r;
9488 real_ceil (&r, TYPE_MODE (type), &x);
9489 return build_real (type, r);
9490 }
9491 }
9492 break;
9493
9494 CASE_FLT_FN (BUILT_IN_TRUNC):
9495 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9496 {
9497 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9498 REAL_VALUE_TYPE r;
9499 real_trunc (&r, TYPE_MODE (type), &x);
9500 return build_real (type, r);
9501 }
9502 break;
9503
9504 CASE_FLT_FN (BUILT_IN_ROUND):
9505 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9506 {
9507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9508 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9509 {
9510 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9511 REAL_VALUE_TYPE r;
9512 real_round (&r, TYPE_MODE (type), &x);
9513 return build_real (type, r);
9514 }
9515 }
9516 break;
9517
9518 CASE_FLT_FN (BUILT_IN_ICEIL):
9519 CASE_FLT_FN (BUILT_IN_LCEIL):
9520 CASE_FLT_FN (BUILT_IN_LLCEIL):
9521 CASE_FLT_FN (BUILT_IN_LFLOOR):
9522 CASE_FLT_FN (BUILT_IN_IFLOOR):
9523 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9524 CASE_FLT_FN (BUILT_IN_IROUND):
9525 CASE_FLT_FN (BUILT_IN_LROUND):
9526 CASE_FLT_FN (BUILT_IN_LLROUND):
9527 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9528
9529 CASE_FLT_FN (BUILT_IN_IRINT):
9530 CASE_FLT_FN (BUILT_IN_LRINT):
9531 CASE_FLT_FN (BUILT_IN_LLRINT):
9532 return fold_fixed_mathfn (loc, fndecl, arg0);
9533
9534 case BUILT_IN_BSWAP16:
9535 case BUILT_IN_BSWAP32:
9536 case BUILT_IN_BSWAP64:
9537 return fold_builtin_bswap (fndecl, arg0);
9538
9539 CASE_INT_FN (BUILT_IN_FFS):
9540 CASE_INT_FN (BUILT_IN_CLZ):
9541 CASE_INT_FN (BUILT_IN_CTZ):
9542 CASE_INT_FN (BUILT_IN_CLRSB):
9543 CASE_INT_FN (BUILT_IN_POPCOUNT):
9544 CASE_INT_FN (BUILT_IN_PARITY):
9545 return fold_builtin_bitop (fndecl, arg0);
9546
9547 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9548 return fold_builtin_signbit (loc, arg0, type);
9549
9550 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9551 return fold_builtin_significand (loc, arg0, type);
9552
9553 CASE_FLT_FN (BUILT_IN_ILOGB):
9554 CASE_FLT_FN (BUILT_IN_LOGB):
9555 return fold_builtin_logb (loc, arg0, type);
9556
9557 case BUILT_IN_ISASCII:
9558 return fold_builtin_isascii (loc, arg0);
9559
9560 case BUILT_IN_TOASCII:
9561 return fold_builtin_toascii (loc, arg0);
9562
9563 case BUILT_IN_ISDIGIT:
9564 return fold_builtin_isdigit (loc, arg0);
9565
9566 CASE_FLT_FN (BUILT_IN_FINITE):
9567 case BUILT_IN_FINITED32:
9568 case BUILT_IN_FINITED64:
9569 case BUILT_IN_FINITED128:
9570 case BUILT_IN_ISFINITE:
9571 {
9572 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9573 if (ret)
9574 return ret;
9575 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9576 }
9577
9578 CASE_FLT_FN (BUILT_IN_ISINF):
9579 case BUILT_IN_ISINFD32:
9580 case BUILT_IN_ISINFD64:
9581 case BUILT_IN_ISINFD128:
9582 {
9583 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9584 if (ret)
9585 return ret;
9586 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9587 }
9588
9589 case BUILT_IN_ISNORMAL:
9590 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9591
9592 case BUILT_IN_ISINF_SIGN:
9593 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9594
9595 CASE_FLT_FN (BUILT_IN_ISNAN):
9596 case BUILT_IN_ISNAND32:
9597 case BUILT_IN_ISNAND64:
9598 case BUILT_IN_ISNAND128:
9599 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9600
9601 case BUILT_IN_FREE:
9602 if (integer_zerop (arg0))
9603 return build_empty_stmt (loc);
9604 break;
9605
9606 default:
9607 break;
9608 }
9609
9610 return NULL_TREE;
9611
9612 }
9613
9614 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9615 This function returns NULL_TREE if no simplification was possible. */
9616
9617 static tree
9618 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9619 {
9620 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9621 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9622
9623 switch (fcode)
9624 {
9625 CASE_FLT_FN (BUILT_IN_JN):
9626 if (validate_arg (arg0, INTEGER_TYPE)
9627 && validate_arg (arg1, REAL_TYPE))
9628 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9629 break;
9630
9631 CASE_FLT_FN (BUILT_IN_YN):
9632 if (validate_arg (arg0, INTEGER_TYPE)
9633 && validate_arg (arg1, REAL_TYPE))
9634 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9635 &dconst0, false);
9636 break;
9637
9638 CASE_FLT_FN (BUILT_IN_DREM):
9639 CASE_FLT_FN (BUILT_IN_REMAINDER):
9640 if (validate_arg (arg0, REAL_TYPE)
9641 && validate_arg (arg1, REAL_TYPE))
9642 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9643 break;
9644
9645 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9646 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9647 if (validate_arg (arg0, REAL_TYPE)
9648 && validate_arg (arg1, POINTER_TYPE))
9649 return do_mpfr_lgamma_r (arg0, arg1, type);
9650 break;
9651
9652 CASE_FLT_FN (BUILT_IN_ATAN2):
9653 if (validate_arg (arg0, REAL_TYPE)
9654 && validate_arg (arg1, REAL_TYPE))
9655 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9656 break;
9657
9658 CASE_FLT_FN (BUILT_IN_FDIM):
9659 if (validate_arg (arg0, REAL_TYPE)
9660 && validate_arg (arg1, REAL_TYPE))
9661 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9662 break;
9663
9664 CASE_FLT_FN (BUILT_IN_HYPOT):
9665 return fold_builtin_hypot (loc, arg0, arg1, type);
9666
9667 CASE_FLT_FN (BUILT_IN_CPOW):
9668 if (validate_arg (arg0, COMPLEX_TYPE)
9669 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9670 && validate_arg (arg1, COMPLEX_TYPE)
9671 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9672 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9673 break;
9674
9675 CASE_FLT_FN (BUILT_IN_LDEXP):
9676 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9677 CASE_FLT_FN (BUILT_IN_SCALBN):
9678 CASE_FLT_FN (BUILT_IN_SCALBLN):
9679 return fold_builtin_load_exponent (loc, arg0, arg1,
9680 type, /*ldexp=*/false);
9681
9682 CASE_FLT_FN (BUILT_IN_FREXP):
9683 return fold_builtin_frexp (loc, arg0, arg1, type);
9684
9685 CASE_FLT_FN (BUILT_IN_MODF):
9686 return fold_builtin_modf (loc, arg0, arg1, type);
9687
9688 case BUILT_IN_STRSTR:
9689 return fold_builtin_strstr (loc, arg0, arg1, type);
9690
9691 case BUILT_IN_STRSPN:
9692 return fold_builtin_strspn (loc, arg0, arg1);
9693
9694 case BUILT_IN_STRCSPN:
9695 return fold_builtin_strcspn (loc, arg0, arg1);
9696
9697 case BUILT_IN_STRCHR:
9698 case BUILT_IN_INDEX:
9699 return fold_builtin_strchr (loc, arg0, arg1, type);
9700
9701 case BUILT_IN_STRRCHR:
9702 case BUILT_IN_RINDEX:
9703 return fold_builtin_strrchr (loc, arg0, arg1, type);
9704
9705 case BUILT_IN_STRCMP:
9706 return fold_builtin_strcmp (loc, arg0, arg1);
9707
9708 case BUILT_IN_STRPBRK:
9709 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9710
9711 case BUILT_IN_EXPECT:
9712 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9713
9714 CASE_FLT_FN (BUILT_IN_POW):
9715 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
9716
9717 CASE_FLT_FN (BUILT_IN_POWI):
9718 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
9719
9720 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9721 return fold_builtin_copysign (loc, arg0, arg1, type);
9722
9723 CASE_FLT_FN (BUILT_IN_FMIN):
9724 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9725
9726 CASE_FLT_FN (BUILT_IN_FMAX):
9727 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9728
9729 case BUILT_IN_ISGREATER:
9730 return fold_builtin_unordered_cmp (loc, fndecl,
9731 arg0, arg1, UNLE_EXPR, LE_EXPR);
9732 case BUILT_IN_ISGREATEREQUAL:
9733 return fold_builtin_unordered_cmp (loc, fndecl,
9734 arg0, arg1, UNLT_EXPR, LT_EXPR);
9735 case BUILT_IN_ISLESS:
9736 return fold_builtin_unordered_cmp (loc, fndecl,
9737 arg0, arg1, UNGE_EXPR, GE_EXPR);
9738 case BUILT_IN_ISLESSEQUAL:
9739 return fold_builtin_unordered_cmp (loc, fndecl,
9740 arg0, arg1, UNGT_EXPR, GT_EXPR);
9741 case BUILT_IN_ISLESSGREATER:
9742 return fold_builtin_unordered_cmp (loc, fndecl,
9743 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9744 case BUILT_IN_ISUNORDERED:
9745 return fold_builtin_unordered_cmp (loc, fndecl,
9746 arg0, arg1, UNORDERED_EXPR,
9747 NOP_EXPR);
9748
9749 /* We do the folding for va_start in the expander. */
9750 case BUILT_IN_VA_START:
9751 break;
9752
9753 case BUILT_IN_OBJECT_SIZE:
9754 return fold_builtin_object_size (arg0, arg1);
9755
9756 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9757 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9758
9759 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9760 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9761
9762 default:
9763 break;
9764 }
9765 return NULL_TREE;
9766 }
9767
9768 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9769 and ARG2.
9770 This function returns NULL_TREE if no simplification was possible. */
9771
9772 static tree
9773 fold_builtin_3 (location_t loc, tree fndecl,
9774 tree arg0, tree arg1, tree arg2)
9775 {
9776 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9777 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9778 switch (fcode)
9779 {
9780
9781 CASE_FLT_FN (BUILT_IN_SINCOS):
9782 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9783
9784 CASE_FLT_FN (BUILT_IN_FMA):
9785 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9786 break;
9787
9788 CASE_FLT_FN (BUILT_IN_REMQUO):
9789 if (validate_arg (arg0, REAL_TYPE)
9790 && validate_arg (arg1, REAL_TYPE)
9791 && validate_arg (arg2, POINTER_TYPE))
9792 return do_mpfr_remquo (arg0, arg1, arg2);
9793 break;
9794
9795 case BUILT_IN_STRNCMP:
9796 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9797
9798 case BUILT_IN_MEMCHR:
9799 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9800
9801 case BUILT_IN_BCMP:
9802 case BUILT_IN_MEMCMP:
9803 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9804
9805 case BUILT_IN_EXPECT:
9806 return fold_builtin_expect (loc, arg0, arg1, arg2);
9807
9808 case BUILT_IN_ADD_OVERFLOW:
9809 case BUILT_IN_SUB_OVERFLOW:
9810 case BUILT_IN_MUL_OVERFLOW:
9811 case BUILT_IN_SADD_OVERFLOW:
9812 case BUILT_IN_SADDL_OVERFLOW:
9813 case BUILT_IN_SADDLL_OVERFLOW:
9814 case BUILT_IN_SSUB_OVERFLOW:
9815 case BUILT_IN_SSUBL_OVERFLOW:
9816 case BUILT_IN_SSUBLL_OVERFLOW:
9817 case BUILT_IN_SMUL_OVERFLOW:
9818 case BUILT_IN_SMULL_OVERFLOW:
9819 case BUILT_IN_SMULLL_OVERFLOW:
9820 case BUILT_IN_UADD_OVERFLOW:
9821 case BUILT_IN_UADDL_OVERFLOW:
9822 case BUILT_IN_UADDLL_OVERFLOW:
9823 case BUILT_IN_USUB_OVERFLOW:
9824 case BUILT_IN_USUBL_OVERFLOW:
9825 case BUILT_IN_USUBLL_OVERFLOW:
9826 case BUILT_IN_UMUL_OVERFLOW:
9827 case BUILT_IN_UMULL_OVERFLOW:
9828 case BUILT_IN_UMULLL_OVERFLOW:
9829 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9830
9831 default:
9832 break;
9833 }
9834 return NULL_TREE;
9835 }
9836
9837 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9838 arguments. IGNORE is true if the result of the
9839 function call is ignored. This function returns NULL_TREE if no
9840 simplification was possible. */
9841
9842 tree
9843 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9844 {
9845 tree ret = NULL_TREE;
9846
9847 switch (nargs)
9848 {
9849 case 0:
9850 ret = fold_builtin_0 (loc, fndecl);
9851 break;
9852 case 1:
9853 ret = fold_builtin_1 (loc, fndecl, args[0]);
9854 break;
9855 case 2:
9856 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9857 break;
9858 case 3:
9859 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9860 break;
9861 default:
9862 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9863 break;
9864 }
9865 if (ret)
9866 {
9867 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9868 SET_EXPR_LOCATION (ret, loc);
9869 TREE_NO_WARNING (ret) = 1;
9870 return ret;
9871 }
9872 return NULL_TREE;
9873 }
9874
9875 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9876 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9877 of arguments in ARGS to be omitted. OLDNARGS is the number of
9878 elements in ARGS. */
9879
9880 static tree
9881 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9882 int skip, tree fndecl, int n, va_list newargs)
9883 {
9884 int nargs = oldnargs - skip + n;
9885 tree *buffer;
9886
9887 if (n > 0)
9888 {
9889 int i, j;
9890
9891 buffer = XALLOCAVEC (tree, nargs);
9892 for (i = 0; i < n; i++)
9893 buffer[i] = va_arg (newargs, tree);
9894 for (j = skip; j < oldnargs; j++, i++)
9895 buffer[i] = args[j];
9896 }
9897 else
9898 buffer = args + skip;
9899
9900 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9901 }
9902
9903 /* Return true if FNDECL shouldn't be folded right now.
9904 If a built-in function has an inline attribute always_inline
9905 wrapper, defer folding it after always_inline functions have
9906 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9907 might not be performed. */
9908
9909 bool
9910 avoid_folding_inline_builtin (tree fndecl)
9911 {
9912 return (DECL_DECLARED_INLINE_P (fndecl)
9913 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9914 && cfun
9915 && !cfun->always_inline_functions_inlined
9916 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9917 }
9918
9919 /* A wrapper function for builtin folding that prevents warnings for
9920 "statement without effect" and the like, caused by removing the
9921 call node earlier than the warning is generated. */
9922
9923 tree
9924 fold_call_expr (location_t loc, tree exp, bool ignore)
9925 {
9926 tree ret = NULL_TREE;
9927 tree fndecl = get_callee_fndecl (exp);
9928 if (fndecl
9929 && TREE_CODE (fndecl) == FUNCTION_DECL
9930 && DECL_BUILT_IN (fndecl)
9931 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9932 yet. Defer folding until we see all the arguments
9933 (after inlining). */
9934 && !CALL_EXPR_VA_ARG_PACK (exp))
9935 {
9936 int nargs = call_expr_nargs (exp);
9937
9938 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9939 instead last argument is __builtin_va_arg_pack (). Defer folding
9940 even in that case, until arguments are finalized. */
9941 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9942 {
9943 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9944 if (fndecl2
9945 && TREE_CODE (fndecl2) == FUNCTION_DECL
9946 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9947 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9948 return NULL_TREE;
9949 }
9950
9951 if (avoid_folding_inline_builtin (fndecl))
9952 return NULL_TREE;
9953
9954 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9955 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9956 CALL_EXPR_ARGP (exp), ignore);
9957 else
9958 {
9959 tree *args = CALL_EXPR_ARGP (exp);
9960 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9961 if (ret)
9962 return ret;
9963 }
9964 }
9965 return NULL_TREE;
9966 }
9967
9968 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9969 N arguments are passed in the array ARGARRAY. Return a folded
9970 expression or NULL_TREE if no simplification was possible. */
9971
9972 tree
9973 fold_builtin_call_array (location_t loc, tree,
9974 tree fn,
9975 int n,
9976 tree *argarray)
9977 {
9978 if (TREE_CODE (fn) != ADDR_EXPR)
9979 return NULL_TREE;
9980
9981 tree fndecl = TREE_OPERAND (fn, 0);
9982 if (TREE_CODE (fndecl) == FUNCTION_DECL
9983 && DECL_BUILT_IN (fndecl))
9984 {
9985 /* If last argument is __builtin_va_arg_pack (), arguments to this
9986 function are not finalized yet. Defer folding until they are. */
9987 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9988 {
9989 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9990 if (fndecl2
9991 && TREE_CODE (fndecl2) == FUNCTION_DECL
9992 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9993 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9994 return NULL_TREE;
9995 }
9996 if (avoid_folding_inline_builtin (fndecl))
9997 return NULL_TREE;
9998 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9999 return targetm.fold_builtin (fndecl, n, argarray, false);
10000 else
10001 return fold_builtin_n (loc, fndecl, argarray, n, false);
10002 }
10003
10004 return NULL_TREE;
10005 }
10006
10007 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10008 along with N new arguments specified as the "..." parameters. SKIP
10009 is the number of arguments in EXP to be omitted. This function is used
10010 to do varargs-to-varargs transformations. */
10011
10012 static tree
10013 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10014 {
10015 va_list ap;
10016 tree t;
10017
10018 va_start (ap, n);
10019 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10020 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10021 va_end (ap);
10022
10023 return t;
10024 }
10025
10026 /* Validate a single argument ARG against a tree code CODE representing
10027 a type. */
10028
10029 static bool
10030 validate_arg (const_tree arg, enum tree_code code)
10031 {
10032 if (!arg)
10033 return false;
10034 else if (code == POINTER_TYPE)
10035 return POINTER_TYPE_P (TREE_TYPE (arg));
10036 else if (code == INTEGER_TYPE)
10037 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10038 return code == TREE_CODE (TREE_TYPE (arg));
10039 }
10040
10041 /* This function validates the types of a function call argument list
10042 against a specified list of tree_codes. If the last specifier is a 0,
10043 that represents an ellipses, otherwise the last specifier must be a
10044 VOID_TYPE.
10045
10046 This is the GIMPLE version of validate_arglist. Eventually we want to
10047 completely convert builtins.c to work from GIMPLEs and the tree based
10048 validate_arglist will then be removed. */
10049
10050 bool
10051 validate_gimple_arglist (const gcall *call, ...)
10052 {
10053 enum tree_code code;
10054 bool res = 0;
10055 va_list ap;
10056 const_tree arg;
10057 size_t i;
10058
10059 va_start (ap, call);
10060 i = 0;
10061
10062 do
10063 {
10064 code = (enum tree_code) va_arg (ap, int);
10065 switch (code)
10066 {
10067 case 0:
10068 /* This signifies an ellipses, any further arguments are all ok. */
10069 res = true;
10070 goto end;
10071 case VOID_TYPE:
10072 /* This signifies an endlink, if no arguments remain, return
10073 true, otherwise return false. */
10074 res = (i == gimple_call_num_args (call));
10075 goto end;
10076 default:
10077 /* If no parameters remain or the parameter's code does not
10078 match the specified code, return false. Otherwise continue
10079 checking any remaining arguments. */
10080 arg = gimple_call_arg (call, i++);
10081 if (!validate_arg (arg, code))
10082 goto end;
10083 break;
10084 }
10085 }
10086 while (1);
10087
10088 /* We need gotos here since we can only have one VA_CLOSE in a
10089 function. */
10090 end: ;
10091 va_end (ap);
10092
10093 return res;
10094 }
10095
10096 /* Default target-specific builtin expander that does nothing. */
10097
10098 rtx
10099 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10100 rtx target ATTRIBUTE_UNUSED,
10101 rtx subtarget ATTRIBUTE_UNUSED,
10102 machine_mode mode ATTRIBUTE_UNUSED,
10103 int ignore ATTRIBUTE_UNUSED)
10104 {
10105 return NULL_RTX;
10106 }
10107
10108 /* Returns true is EXP represents data that would potentially reside
10109 in a readonly section. */
10110
10111 bool
10112 readonly_data_expr (tree exp)
10113 {
10114 STRIP_NOPS (exp);
10115
10116 if (TREE_CODE (exp) != ADDR_EXPR)
10117 return false;
10118
10119 exp = get_base_address (TREE_OPERAND (exp, 0));
10120 if (!exp)
10121 return false;
10122
10123 /* Make sure we call decl_readonly_section only for trees it
10124 can handle (since it returns true for everything it doesn't
10125 understand). */
10126 if (TREE_CODE (exp) == STRING_CST
10127 || TREE_CODE (exp) == CONSTRUCTOR
10128 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10129 return decl_readonly_section (exp, 0);
10130 else
10131 return false;
10132 }
10133
10134 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10135 to the call, and TYPE is its return type.
10136
10137 Return NULL_TREE if no simplification was possible, otherwise return the
10138 simplified form of the call as a tree.
10139
10140 The simplified form may be a constant or other expression which
10141 computes the same value, but in a more efficient manner (including
10142 calls to other builtin functions).
10143
10144 The call may contain arguments which need to be evaluated, but
10145 which are not useful to determine the result of the call. In
10146 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10147 COMPOUND_EXPR will be an argument which must be evaluated.
10148 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10149 COMPOUND_EXPR in the chain will contain the tree for the simplified
10150 form of the builtin function call. */
10151
10152 static tree
10153 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10154 {
10155 if (!validate_arg (s1, POINTER_TYPE)
10156 || !validate_arg (s2, POINTER_TYPE))
10157 return NULL_TREE;
10158 else
10159 {
10160 tree fn;
10161 const char *p1, *p2;
10162
10163 p2 = c_getstr (s2);
10164 if (p2 == NULL)
10165 return NULL_TREE;
10166
10167 p1 = c_getstr (s1);
10168 if (p1 != NULL)
10169 {
10170 const char *r = strstr (p1, p2);
10171 tree tem;
10172
10173 if (r == NULL)
10174 return build_int_cst (TREE_TYPE (s1), 0);
10175
10176 /* Return an offset into the constant string argument. */
10177 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10178 return fold_convert_loc (loc, type, tem);
10179 }
10180
10181 /* The argument is const char *, and the result is char *, so we need
10182 a type conversion here to avoid a warning. */
10183 if (p2[0] == '\0')
10184 return fold_convert_loc (loc, type, s1);
10185
10186 if (p2[1] != '\0')
10187 return NULL_TREE;
10188
10189 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10190 if (!fn)
10191 return NULL_TREE;
10192
10193 /* New argument list transforming strstr(s1, s2) to
10194 strchr(s1, s2[0]). */
10195 return build_call_expr_loc (loc, fn, 2, s1,
10196 build_int_cst (integer_type_node, p2[0]));
10197 }
10198 }
10199
10200 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10201 the call, and TYPE is its return type.
10202
10203 Return NULL_TREE if no simplification was possible, otherwise return the
10204 simplified form of the call as a tree.
10205
10206 The simplified form may be a constant or other expression which
10207 computes the same value, but in a more efficient manner (including
10208 calls to other builtin functions).
10209
10210 The call may contain arguments which need to be evaluated, but
10211 which are not useful to determine the result of the call. In
10212 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10213 COMPOUND_EXPR will be an argument which must be evaluated.
10214 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10215 COMPOUND_EXPR in the chain will contain the tree for the simplified
10216 form of the builtin function call. */
10217
10218 static tree
10219 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10220 {
10221 if (!validate_arg (s1, POINTER_TYPE)
10222 || !validate_arg (s2, INTEGER_TYPE))
10223 return NULL_TREE;
10224 else
10225 {
10226 const char *p1;
10227
10228 if (TREE_CODE (s2) != INTEGER_CST)
10229 return NULL_TREE;
10230
10231 p1 = c_getstr (s1);
10232 if (p1 != NULL)
10233 {
10234 char c;
10235 const char *r;
10236 tree tem;
10237
10238 if (target_char_cast (s2, &c))
10239 return NULL_TREE;
10240
10241 r = strchr (p1, c);
10242
10243 if (r == NULL)
10244 return build_int_cst (TREE_TYPE (s1), 0);
10245
10246 /* Return an offset into the constant string argument. */
10247 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10248 return fold_convert_loc (loc, type, tem);
10249 }
10250 return NULL_TREE;
10251 }
10252 }
10253
10254 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10255 the call, and TYPE is its return type.
10256
10257 Return NULL_TREE if no simplification was possible, otherwise return the
10258 simplified form of the call as a tree.
10259
10260 The simplified form may be a constant or other expression which
10261 computes the same value, but in a more efficient manner (including
10262 calls to other builtin functions).
10263
10264 The call may contain arguments which need to be evaluated, but
10265 which are not useful to determine the result of the call. In
10266 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10267 COMPOUND_EXPR will be an argument which must be evaluated.
10268 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10269 COMPOUND_EXPR in the chain will contain the tree for the simplified
10270 form of the builtin function call. */
10271
10272 static tree
10273 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10274 {
10275 if (!validate_arg (s1, POINTER_TYPE)
10276 || !validate_arg (s2, INTEGER_TYPE))
10277 return NULL_TREE;
10278 else
10279 {
10280 tree fn;
10281 const char *p1;
10282
10283 if (TREE_CODE (s2) != INTEGER_CST)
10284 return NULL_TREE;
10285
10286 p1 = c_getstr (s1);
10287 if (p1 != NULL)
10288 {
10289 char c;
10290 const char *r;
10291 tree tem;
10292
10293 if (target_char_cast (s2, &c))
10294 return NULL_TREE;
10295
10296 r = strrchr (p1, c);
10297
10298 if (r == NULL)
10299 return build_int_cst (TREE_TYPE (s1), 0);
10300
10301 /* Return an offset into the constant string argument. */
10302 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10303 return fold_convert_loc (loc, type, tem);
10304 }
10305
10306 if (! integer_zerop (s2))
10307 return NULL_TREE;
10308
10309 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10310 if (!fn)
10311 return NULL_TREE;
10312
10313 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10314 return build_call_expr_loc (loc, fn, 2, s1, s2);
10315 }
10316 }
10317
10318 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10319 to the call, and TYPE is its return type.
10320
10321 Return NULL_TREE if no simplification was possible, otherwise return the
10322 simplified form of the call as a tree.
10323
10324 The simplified form may be a constant or other expression which
10325 computes the same value, but in a more efficient manner (including
10326 calls to other builtin functions).
10327
10328 The call may contain arguments which need to be evaluated, but
10329 which are not useful to determine the result of the call. In
10330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10331 COMPOUND_EXPR will be an argument which must be evaluated.
10332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10333 COMPOUND_EXPR in the chain will contain the tree for the simplified
10334 form of the builtin function call. */
10335
10336 static tree
10337 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10338 {
10339 if (!validate_arg (s1, POINTER_TYPE)
10340 || !validate_arg (s2, POINTER_TYPE))
10341 return NULL_TREE;
10342 else
10343 {
10344 tree fn;
10345 const char *p1, *p2;
10346
10347 p2 = c_getstr (s2);
10348 if (p2 == NULL)
10349 return NULL_TREE;
10350
10351 p1 = c_getstr (s1);
10352 if (p1 != NULL)
10353 {
10354 const char *r = strpbrk (p1, p2);
10355 tree tem;
10356
10357 if (r == NULL)
10358 return build_int_cst (TREE_TYPE (s1), 0);
10359
10360 /* Return an offset into the constant string argument. */
10361 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10362 return fold_convert_loc (loc, type, tem);
10363 }
10364
10365 if (p2[0] == '\0')
10366 /* strpbrk(x, "") == NULL.
10367 Evaluate and ignore s1 in case it had side-effects. */
10368 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10369
10370 if (p2[1] != '\0')
10371 return NULL_TREE; /* Really call strpbrk. */
10372
10373 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10374 if (!fn)
10375 return NULL_TREE;
10376
10377 /* New argument list transforming strpbrk(s1, s2) to
10378 strchr(s1, s2[0]). */
10379 return build_call_expr_loc (loc, fn, 2, s1,
10380 build_int_cst (integer_type_node, p2[0]));
10381 }
10382 }
10383
10384 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10385 to the call.
10386
10387 Return NULL_TREE if no simplification was possible, otherwise return the
10388 simplified form of the call as a tree.
10389
10390 The simplified form may be a constant or other expression which
10391 computes the same value, but in a more efficient manner (including
10392 calls to other builtin functions).
10393
10394 The call may contain arguments which need to be evaluated, but
10395 which are not useful to determine the result of the call. In
10396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10397 COMPOUND_EXPR will be an argument which must be evaluated.
10398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10399 COMPOUND_EXPR in the chain will contain the tree for the simplified
10400 form of the builtin function call. */
10401
10402 static tree
10403 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10404 {
10405 if (!validate_arg (s1, POINTER_TYPE)
10406 || !validate_arg (s2, POINTER_TYPE))
10407 return NULL_TREE;
10408 else
10409 {
10410 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10411
10412 /* If both arguments are constants, evaluate at compile-time. */
10413 if (p1 && p2)
10414 {
10415 const size_t r = strspn (p1, p2);
10416 return build_int_cst (size_type_node, r);
10417 }
10418
10419 /* If either argument is "", return NULL_TREE. */
10420 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10421 /* Evaluate and ignore both arguments in case either one has
10422 side-effects. */
10423 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10424 s1, s2);
10425 return NULL_TREE;
10426 }
10427 }
10428
10429 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10430 to the call.
10431
10432 Return NULL_TREE if no simplification was possible, otherwise return the
10433 simplified form of the call as a tree.
10434
10435 The simplified form may be a constant or other expression which
10436 computes the same value, but in a more efficient manner (including
10437 calls to other builtin functions).
10438
10439 The call may contain arguments which need to be evaluated, but
10440 which are not useful to determine the result of the call. In
10441 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10442 COMPOUND_EXPR will be an argument which must be evaluated.
10443 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10444 COMPOUND_EXPR in the chain will contain the tree for the simplified
10445 form of the builtin function call. */
10446
10447 static tree
10448 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10449 {
10450 if (!validate_arg (s1, POINTER_TYPE)
10451 || !validate_arg (s2, POINTER_TYPE))
10452 return NULL_TREE;
10453 else
10454 {
10455 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10456
10457 /* If both arguments are constants, evaluate at compile-time. */
10458 if (p1 && p2)
10459 {
10460 const size_t r = strcspn (p1, p2);
10461 return build_int_cst (size_type_node, r);
10462 }
10463
10464 /* If the first argument is "", return NULL_TREE. */
10465 if (p1 && *p1 == '\0')
10466 {
10467 /* Evaluate and ignore argument s2 in case it has
10468 side-effects. */
10469 return omit_one_operand_loc (loc, size_type_node,
10470 size_zero_node, s2);
10471 }
10472
10473 /* If the second argument is "", return __builtin_strlen(s1). */
10474 if (p2 && *p2 == '\0')
10475 {
10476 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10477
10478 /* If the replacement _DECL isn't initialized, don't do the
10479 transformation. */
10480 if (!fn)
10481 return NULL_TREE;
10482
10483 return build_call_expr_loc (loc, fn, 1, s1);
10484 }
10485 return NULL_TREE;
10486 }
10487 }
10488
10489 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10490 produced. False otherwise. This is done so that we don't output the error
10491 or warning twice or three times. */
10492
10493 bool
10494 fold_builtin_next_arg (tree exp, bool va_start_p)
10495 {
10496 tree fntype = TREE_TYPE (current_function_decl);
10497 int nargs = call_expr_nargs (exp);
10498 tree arg;
10499 /* There is good chance the current input_location points inside the
10500 definition of the va_start macro (perhaps on the token for
10501 builtin) in a system header, so warnings will not be emitted.
10502 Use the location in real source code. */
10503 source_location current_location =
10504 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10505 NULL);
10506
10507 if (!stdarg_p (fntype))
10508 {
10509 error ("%<va_start%> used in function with fixed args");
10510 return true;
10511 }
10512
10513 if (va_start_p)
10514 {
10515 if (va_start_p && (nargs != 2))
10516 {
10517 error ("wrong number of arguments to function %<va_start%>");
10518 return true;
10519 }
10520 arg = CALL_EXPR_ARG (exp, 1);
10521 }
10522 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10523 when we checked the arguments and if needed issued a warning. */
10524 else
10525 {
10526 if (nargs == 0)
10527 {
10528 /* Evidently an out of date version of <stdarg.h>; can't validate
10529 va_start's second argument, but can still work as intended. */
10530 warning_at (current_location,
10531 OPT_Wvarargs,
10532 "%<__builtin_next_arg%> called without an argument");
10533 return true;
10534 }
10535 else if (nargs > 1)
10536 {
10537 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10538 return true;
10539 }
10540 arg = CALL_EXPR_ARG (exp, 0);
10541 }
10542
10543 if (TREE_CODE (arg) == SSA_NAME)
10544 arg = SSA_NAME_VAR (arg);
10545
10546 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10547 or __builtin_next_arg (0) the first time we see it, after checking
10548 the arguments and if needed issuing a warning. */
10549 if (!integer_zerop (arg))
10550 {
10551 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10552
10553 /* Strip off all nops for the sake of the comparison. This
10554 is not quite the same as STRIP_NOPS. It does more.
10555 We must also strip off INDIRECT_EXPR for C++ reference
10556 parameters. */
10557 while (CONVERT_EXPR_P (arg)
10558 || TREE_CODE (arg) == INDIRECT_REF)
10559 arg = TREE_OPERAND (arg, 0);
10560 if (arg != last_parm)
10561 {
10562 /* FIXME: Sometimes with the tree optimizers we can get the
10563 not the last argument even though the user used the last
10564 argument. We just warn and set the arg to be the last
10565 argument so that we will get wrong-code because of
10566 it. */
10567 warning_at (current_location,
10568 OPT_Wvarargs,
10569 "second parameter of %<va_start%> not last named argument");
10570 }
10571
10572 /* Undefined by C99 7.15.1.4p4 (va_start):
10573 "If the parameter parmN is declared with the register storage
10574 class, with a function or array type, or with a type that is
10575 not compatible with the type that results after application of
10576 the default argument promotions, the behavior is undefined."
10577 */
10578 else if (DECL_REGISTER (arg))
10579 {
10580 warning_at (current_location,
10581 OPT_Wvarargs,
10582 "undefined behaviour when second parameter of "
10583 "%<va_start%> is declared with %<register%> storage");
10584 }
10585
10586 /* We want to verify the second parameter just once before the tree
10587 optimizers are run and then avoid keeping it in the tree,
10588 as otherwise we could warn even for correct code like:
10589 void foo (int i, ...)
10590 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10591 if (va_start_p)
10592 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10593 else
10594 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10595 }
10596 return false;
10597 }
10598
10599
10600 /* Expand a call EXP to __builtin_object_size. */
10601
10602 static rtx
10603 expand_builtin_object_size (tree exp)
10604 {
10605 tree ost;
10606 int object_size_type;
10607 tree fndecl = get_callee_fndecl (exp);
10608
10609 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10610 {
10611 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10612 exp, fndecl);
10613 expand_builtin_trap ();
10614 return const0_rtx;
10615 }
10616
10617 ost = CALL_EXPR_ARG (exp, 1);
10618 STRIP_NOPS (ost);
10619
10620 if (TREE_CODE (ost) != INTEGER_CST
10621 || tree_int_cst_sgn (ost) < 0
10622 || compare_tree_int (ost, 3) > 0)
10623 {
10624 error ("%Klast argument of %D is not integer constant between 0 and 3",
10625 exp, fndecl);
10626 expand_builtin_trap ();
10627 return const0_rtx;
10628 }
10629
10630 object_size_type = tree_to_shwi (ost);
10631
10632 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10633 }
10634
10635 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10636 FCODE is the BUILT_IN_* to use.
10637 Return NULL_RTX if we failed; the caller should emit a normal call,
10638 otherwise try to get the result in TARGET, if convenient (and in
10639 mode MODE if that's convenient). */
10640
10641 static rtx
10642 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10643 enum built_in_function fcode)
10644 {
10645 tree dest, src, len, size;
10646
10647 if (!validate_arglist (exp,
10648 POINTER_TYPE,
10649 fcode == BUILT_IN_MEMSET_CHK
10650 ? INTEGER_TYPE : POINTER_TYPE,
10651 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10652 return NULL_RTX;
10653
10654 dest = CALL_EXPR_ARG (exp, 0);
10655 src = CALL_EXPR_ARG (exp, 1);
10656 len = CALL_EXPR_ARG (exp, 2);
10657 size = CALL_EXPR_ARG (exp, 3);
10658
10659 if (! tree_fits_uhwi_p (size))
10660 return NULL_RTX;
10661
10662 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10663 {
10664 tree fn;
10665
10666 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10667 {
10668 warning_at (tree_nonartificial_location (exp),
10669 0, "%Kcall to %D will always overflow destination buffer",
10670 exp, get_callee_fndecl (exp));
10671 return NULL_RTX;
10672 }
10673
10674 fn = NULL_TREE;
10675 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10676 mem{cpy,pcpy,move,set} is available. */
10677 switch (fcode)
10678 {
10679 case BUILT_IN_MEMCPY_CHK:
10680 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10681 break;
10682 case BUILT_IN_MEMPCPY_CHK:
10683 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10684 break;
10685 case BUILT_IN_MEMMOVE_CHK:
10686 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10687 break;
10688 case BUILT_IN_MEMSET_CHK:
10689 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10690 break;
10691 default:
10692 break;
10693 }
10694
10695 if (! fn)
10696 return NULL_RTX;
10697
10698 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10699 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10700 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10701 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10702 }
10703 else if (fcode == BUILT_IN_MEMSET_CHK)
10704 return NULL_RTX;
10705 else
10706 {
10707 unsigned int dest_align = get_pointer_alignment (dest);
10708
10709 /* If DEST is not a pointer type, call the normal function. */
10710 if (dest_align == 0)
10711 return NULL_RTX;
10712
10713 /* If SRC and DEST are the same (and not volatile), do nothing. */
10714 if (operand_equal_p (src, dest, 0))
10715 {
10716 tree expr;
10717
10718 if (fcode != BUILT_IN_MEMPCPY_CHK)
10719 {
10720 /* Evaluate and ignore LEN in case it has side-effects. */
10721 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10722 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10723 }
10724
10725 expr = fold_build_pointer_plus (dest, len);
10726 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10727 }
10728
10729 /* __memmove_chk special case. */
10730 if (fcode == BUILT_IN_MEMMOVE_CHK)
10731 {
10732 unsigned int src_align = get_pointer_alignment (src);
10733
10734 if (src_align == 0)
10735 return NULL_RTX;
10736
10737 /* If src is categorized for a readonly section we can use
10738 normal __memcpy_chk. */
10739 if (readonly_data_expr (src))
10740 {
10741 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10742 if (!fn)
10743 return NULL_RTX;
10744 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10745 dest, src, len, size);
10746 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10747 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10748 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10749 }
10750 }
10751 return NULL_RTX;
10752 }
10753 }
10754
10755 /* Emit warning if a buffer overflow is detected at compile time. */
10756
10757 static void
10758 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10759 {
10760 int is_strlen = 0;
10761 tree len, size;
10762 location_t loc = tree_nonartificial_location (exp);
10763
10764 switch (fcode)
10765 {
10766 case BUILT_IN_STRCPY_CHK:
10767 case BUILT_IN_STPCPY_CHK:
10768 /* For __strcat_chk the warning will be emitted only if overflowing
10769 by at least strlen (dest) + 1 bytes. */
10770 case BUILT_IN_STRCAT_CHK:
10771 len = CALL_EXPR_ARG (exp, 1);
10772 size = CALL_EXPR_ARG (exp, 2);
10773 is_strlen = 1;
10774 break;
10775 case BUILT_IN_STRNCAT_CHK:
10776 case BUILT_IN_STRNCPY_CHK:
10777 case BUILT_IN_STPNCPY_CHK:
10778 len = CALL_EXPR_ARG (exp, 2);
10779 size = CALL_EXPR_ARG (exp, 3);
10780 break;
10781 case BUILT_IN_SNPRINTF_CHK:
10782 case BUILT_IN_VSNPRINTF_CHK:
10783 len = CALL_EXPR_ARG (exp, 1);
10784 size = CALL_EXPR_ARG (exp, 3);
10785 break;
10786 default:
10787 gcc_unreachable ();
10788 }
10789
10790 if (!len || !size)
10791 return;
10792
10793 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10794 return;
10795
10796 if (is_strlen)
10797 {
10798 len = c_strlen (len, 1);
10799 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10800 return;
10801 }
10802 else if (fcode == BUILT_IN_STRNCAT_CHK)
10803 {
10804 tree src = CALL_EXPR_ARG (exp, 1);
10805 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10806 return;
10807 src = c_strlen (src, 1);
10808 if (! src || ! tree_fits_uhwi_p (src))
10809 {
10810 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10811 exp, get_callee_fndecl (exp));
10812 return;
10813 }
10814 else if (tree_int_cst_lt (src, size))
10815 return;
10816 }
10817 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10818 return;
10819
10820 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10821 exp, get_callee_fndecl (exp));
10822 }
10823
10824 /* Emit warning if a buffer overflow is detected at compile time
10825 in __sprintf_chk/__vsprintf_chk calls. */
10826
10827 static void
10828 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10829 {
10830 tree size, len, fmt;
10831 const char *fmt_str;
10832 int nargs = call_expr_nargs (exp);
10833
10834 /* Verify the required arguments in the original call. */
10835
10836 if (nargs < 4)
10837 return;
10838 size = CALL_EXPR_ARG (exp, 2);
10839 fmt = CALL_EXPR_ARG (exp, 3);
10840
10841 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10842 return;
10843
10844 /* Check whether the format is a literal string constant. */
10845 fmt_str = c_getstr (fmt);
10846 if (fmt_str == NULL)
10847 return;
10848
10849 if (!init_target_chars ())
10850 return;
10851
10852 /* If the format doesn't contain % args or %%, we know its size. */
10853 if (strchr (fmt_str, target_percent) == 0)
10854 len = build_int_cstu (size_type_node, strlen (fmt_str));
10855 /* If the format is "%s" and first ... argument is a string literal,
10856 we know it too. */
10857 else if (fcode == BUILT_IN_SPRINTF_CHK
10858 && strcmp (fmt_str, target_percent_s) == 0)
10859 {
10860 tree arg;
10861
10862 if (nargs < 5)
10863 return;
10864 arg = CALL_EXPR_ARG (exp, 4);
10865 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10866 return;
10867
10868 len = c_strlen (arg, 1);
10869 if (!len || ! tree_fits_uhwi_p (len))
10870 return;
10871 }
10872 else
10873 return;
10874
10875 if (! tree_int_cst_lt (len, size))
10876 warning_at (tree_nonartificial_location (exp),
10877 0, "%Kcall to %D will always overflow destination buffer",
10878 exp, get_callee_fndecl (exp));
10879 }
10880
10881 /* Emit warning if a free is called with address of a variable. */
10882
10883 static void
10884 maybe_emit_free_warning (tree exp)
10885 {
10886 tree arg = CALL_EXPR_ARG (exp, 0);
10887
10888 STRIP_NOPS (arg);
10889 if (TREE_CODE (arg) != ADDR_EXPR)
10890 return;
10891
10892 arg = get_base_address (TREE_OPERAND (arg, 0));
10893 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10894 return;
10895
10896 if (SSA_VAR_P (arg))
10897 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10898 "%Kattempt to free a non-heap object %qD", exp, arg);
10899 else
10900 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10901 "%Kattempt to free a non-heap object", exp);
10902 }
10903
10904 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10905 if possible. */
10906
10907 static tree
10908 fold_builtin_object_size (tree ptr, tree ost)
10909 {
10910 unsigned HOST_WIDE_INT bytes;
10911 int object_size_type;
10912
10913 if (!validate_arg (ptr, POINTER_TYPE)
10914 || !validate_arg (ost, INTEGER_TYPE))
10915 return NULL_TREE;
10916
10917 STRIP_NOPS (ost);
10918
10919 if (TREE_CODE (ost) != INTEGER_CST
10920 || tree_int_cst_sgn (ost) < 0
10921 || compare_tree_int (ost, 3) > 0)
10922 return NULL_TREE;
10923
10924 object_size_type = tree_to_shwi (ost);
10925
10926 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10927 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10928 and (size_t) 0 for types 2 and 3. */
10929 if (TREE_SIDE_EFFECTS (ptr))
10930 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10931
10932 if (TREE_CODE (ptr) == ADDR_EXPR)
10933 {
10934 bytes = compute_builtin_object_size (ptr, object_size_type);
10935 if (wi::fits_to_tree_p (bytes, size_type_node))
10936 return build_int_cstu (size_type_node, bytes);
10937 }
10938 else if (TREE_CODE (ptr) == SSA_NAME)
10939 {
10940 /* If object size is not known yet, delay folding until
10941 later. Maybe subsequent passes will help determining
10942 it. */
10943 bytes = compute_builtin_object_size (ptr, object_size_type);
10944 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
10945 && wi::fits_to_tree_p (bytes, size_type_node))
10946 return build_int_cstu (size_type_node, bytes);
10947 }
10948
10949 return NULL_TREE;
10950 }
10951
10952 /* Builtins with folding operations that operate on "..." arguments
10953 need special handling; we need to store the arguments in a convenient
10954 data structure before attempting any folding. Fortunately there are
10955 only a few builtins that fall into this category. FNDECL is the
10956 function, EXP is the CALL_EXPR for the call. */
10957
10958 static tree
10959 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10960 {
10961 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10962 tree ret = NULL_TREE;
10963
10964 switch (fcode)
10965 {
10966 case BUILT_IN_FPCLASSIFY:
10967 ret = fold_builtin_fpclassify (loc, args, nargs);
10968 break;
10969
10970 default:
10971 break;
10972 }
10973 if (ret)
10974 {
10975 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10976 SET_EXPR_LOCATION (ret, loc);
10977 TREE_NO_WARNING (ret) = 1;
10978 return ret;
10979 }
10980 return NULL_TREE;
10981 }
10982
10983 /* Initialize format string characters in the target charset. */
10984
10985 bool
10986 init_target_chars (void)
10987 {
10988 static bool init;
10989 if (!init)
10990 {
10991 target_newline = lang_hooks.to_target_charset ('\n');
10992 target_percent = lang_hooks.to_target_charset ('%');
10993 target_c = lang_hooks.to_target_charset ('c');
10994 target_s = lang_hooks.to_target_charset ('s');
10995 if (target_newline == 0 || target_percent == 0 || target_c == 0
10996 || target_s == 0)
10997 return false;
10998
10999 target_percent_c[0] = target_percent;
11000 target_percent_c[1] = target_c;
11001 target_percent_c[2] = '\0';
11002
11003 target_percent_s[0] = target_percent;
11004 target_percent_s[1] = target_s;
11005 target_percent_s[2] = '\0';
11006
11007 target_percent_s_newline[0] = target_percent;
11008 target_percent_s_newline[1] = target_s;
11009 target_percent_s_newline[2] = target_newline;
11010 target_percent_s_newline[3] = '\0';
11011
11012 init = true;
11013 }
11014 return true;
11015 }
11016
11017 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11018 and no overflow/underflow occurred. INEXACT is true if M was not
11019 exactly calculated. TYPE is the tree type for the result. This
11020 function assumes that you cleared the MPFR flags and then
11021 calculated M to see if anything subsequently set a flag prior to
11022 entering this function. Return NULL_TREE if any checks fail. */
11023
11024 static tree
11025 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11026 {
11027 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11028 overflow/underflow occurred. If -frounding-math, proceed iff the
11029 result of calling FUNC was exact. */
11030 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11031 && (!flag_rounding_math || !inexact))
11032 {
11033 REAL_VALUE_TYPE rr;
11034
11035 real_from_mpfr (&rr, m, type, GMP_RNDN);
11036 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11037 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11038 but the mpft_t is not, then we underflowed in the
11039 conversion. */
11040 if (real_isfinite (&rr)
11041 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11042 {
11043 REAL_VALUE_TYPE rmode;
11044
11045 real_convert (&rmode, TYPE_MODE (type), &rr);
11046 /* Proceed iff the specified mode can hold the value. */
11047 if (real_identical (&rmode, &rr))
11048 return build_real (type, rmode);
11049 }
11050 }
11051 return NULL_TREE;
11052 }
11053
11054 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11055 number and no overflow/underflow occurred. INEXACT is true if M
11056 was not exactly calculated. TYPE is the tree type for the result.
11057 This function assumes that you cleared the MPFR flags and then
11058 calculated M to see if anything subsequently set a flag prior to
11059 entering this function. Return NULL_TREE if any checks fail, if
11060 FORCE_CONVERT is true, then bypass the checks. */
11061
11062 static tree
11063 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11064 {
11065 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11066 overflow/underflow occurred. If -frounding-math, proceed iff the
11067 result of calling FUNC was exact. */
11068 if (force_convert
11069 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11070 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11071 && (!flag_rounding_math || !inexact)))
11072 {
11073 REAL_VALUE_TYPE re, im;
11074
11075 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11076 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11077 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11078 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11079 but the mpft_t is not, then we underflowed in the
11080 conversion. */
11081 if (force_convert
11082 || (real_isfinite (&re) && real_isfinite (&im)
11083 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11084 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11085 {
11086 REAL_VALUE_TYPE re_mode, im_mode;
11087
11088 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11089 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11090 /* Proceed iff the specified mode can hold the value. */
11091 if (force_convert
11092 || (real_identical (&re_mode, &re)
11093 && real_identical (&im_mode, &im)))
11094 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11095 build_real (TREE_TYPE (type), im_mode));
11096 }
11097 }
11098 return NULL_TREE;
11099 }
11100
11101 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11102 FUNC on it and return the resulting value as a tree with type TYPE.
11103 If MIN and/or MAX are not NULL, then the supplied ARG must be
11104 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11105 acceptable values, otherwise they are not. The mpfr precision is
11106 set to the precision of TYPE. We assume that function FUNC returns
11107 zero if the result could be calculated exactly within the requested
11108 precision. */
11109
11110 static tree
11111 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11112 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11113 bool inclusive)
11114 {
11115 tree result = NULL_TREE;
11116
11117 STRIP_NOPS (arg);
11118
11119 /* To proceed, MPFR must exactly represent the target floating point
11120 format, which only happens when the target base equals two. */
11121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11122 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11123 {
11124 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11125
11126 if (real_isfinite (ra)
11127 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11128 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11129 {
11130 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11131 const int prec = fmt->p;
11132 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11133 int inexact;
11134 mpfr_t m;
11135
11136 mpfr_init2 (m, prec);
11137 mpfr_from_real (m, ra, GMP_RNDN);
11138 mpfr_clear_flags ();
11139 inexact = func (m, m, rnd);
11140 result = do_mpfr_ckconv (m, type, inexact);
11141 mpfr_clear (m);
11142 }
11143 }
11144
11145 return result;
11146 }
11147
11148 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11149 FUNC on it and return the resulting value as a tree with type TYPE.
11150 The mpfr precision is set to the precision of TYPE. We assume that
11151 function FUNC returns zero if the result could be calculated
11152 exactly within the requested precision. */
11153
11154 static tree
11155 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11156 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11157 {
11158 tree result = NULL_TREE;
11159
11160 STRIP_NOPS (arg1);
11161 STRIP_NOPS (arg2);
11162
11163 /* To proceed, MPFR must exactly represent the target floating point
11164 format, which only happens when the target base equals two. */
11165 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11166 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11167 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11168 {
11169 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11170 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11171
11172 if (real_isfinite (ra1) && real_isfinite (ra2))
11173 {
11174 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11175 const int prec = fmt->p;
11176 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11177 int inexact;
11178 mpfr_t m1, m2;
11179
11180 mpfr_inits2 (prec, m1, m2, NULL);
11181 mpfr_from_real (m1, ra1, GMP_RNDN);
11182 mpfr_from_real (m2, ra2, GMP_RNDN);
11183 mpfr_clear_flags ();
11184 inexact = func (m1, m1, m2, rnd);
11185 result = do_mpfr_ckconv (m1, type, inexact);
11186 mpfr_clears (m1, m2, NULL);
11187 }
11188 }
11189
11190 return result;
11191 }
11192
11193 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11194 FUNC on it and return the resulting value as a tree with type TYPE.
11195 The mpfr precision is set to the precision of TYPE. We assume that
11196 function FUNC returns zero if the result could be calculated
11197 exactly within the requested precision. */
11198
11199 static tree
11200 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11201 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11202 {
11203 tree result = NULL_TREE;
11204
11205 STRIP_NOPS (arg1);
11206 STRIP_NOPS (arg2);
11207 STRIP_NOPS (arg3);
11208
11209 /* To proceed, MPFR must exactly represent the target floating point
11210 format, which only happens when the target base equals two. */
11211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11212 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11213 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11214 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11215 {
11216 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11217 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11218 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11219
11220 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11221 {
11222 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11223 const int prec = fmt->p;
11224 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11225 int inexact;
11226 mpfr_t m1, m2, m3;
11227
11228 mpfr_inits2 (prec, m1, m2, m3, NULL);
11229 mpfr_from_real (m1, ra1, GMP_RNDN);
11230 mpfr_from_real (m2, ra2, GMP_RNDN);
11231 mpfr_from_real (m3, ra3, GMP_RNDN);
11232 mpfr_clear_flags ();
11233 inexact = func (m1, m1, m2, m3, rnd);
11234 result = do_mpfr_ckconv (m1, type, inexact);
11235 mpfr_clears (m1, m2, m3, NULL);
11236 }
11237 }
11238
11239 return result;
11240 }
11241
11242 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11243 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11244 If ARG_SINP and ARG_COSP are NULL then the result is returned
11245 as a complex value.
11246 The type is taken from the type of ARG and is used for setting the
11247 precision of the calculation and results. */
11248
11249 static tree
11250 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11251 {
11252 tree const type = TREE_TYPE (arg);
11253 tree result = NULL_TREE;
11254
11255 STRIP_NOPS (arg);
11256
11257 /* To proceed, MPFR must exactly represent the target floating point
11258 format, which only happens when the target base equals two. */
11259 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11260 && TREE_CODE (arg) == REAL_CST
11261 && !TREE_OVERFLOW (arg))
11262 {
11263 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11264
11265 if (real_isfinite (ra))
11266 {
11267 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11268 const int prec = fmt->p;
11269 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11270 tree result_s, result_c;
11271 int inexact;
11272 mpfr_t m, ms, mc;
11273
11274 mpfr_inits2 (prec, m, ms, mc, NULL);
11275 mpfr_from_real (m, ra, GMP_RNDN);
11276 mpfr_clear_flags ();
11277 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11278 result_s = do_mpfr_ckconv (ms, type, inexact);
11279 result_c = do_mpfr_ckconv (mc, type, inexact);
11280 mpfr_clears (m, ms, mc, NULL);
11281 if (result_s && result_c)
11282 {
11283 /* If we are to return in a complex value do so. */
11284 if (!arg_sinp && !arg_cosp)
11285 return build_complex (build_complex_type (type),
11286 result_c, result_s);
11287
11288 /* Dereference the sin/cos pointer arguments. */
11289 arg_sinp = build_fold_indirect_ref (arg_sinp);
11290 arg_cosp = build_fold_indirect_ref (arg_cosp);
11291 /* Proceed if valid pointer type were passed in. */
11292 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11293 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11294 {
11295 /* Set the values. */
11296 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11297 result_s);
11298 TREE_SIDE_EFFECTS (result_s) = 1;
11299 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11300 result_c);
11301 TREE_SIDE_EFFECTS (result_c) = 1;
11302 /* Combine the assignments into a compound expr. */
11303 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11304 result_s, result_c));
11305 }
11306 }
11307 }
11308 }
11309 return result;
11310 }
11311
11312 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11313 two-argument mpfr order N Bessel function FUNC on them and return
11314 the resulting value as a tree with type TYPE. The mpfr precision
11315 is set to the precision of TYPE. We assume that function FUNC
11316 returns zero if the result could be calculated exactly within the
11317 requested precision. */
11318 static tree
11319 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11320 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11321 const REAL_VALUE_TYPE *min, bool inclusive)
11322 {
11323 tree result = NULL_TREE;
11324
11325 STRIP_NOPS (arg1);
11326 STRIP_NOPS (arg2);
11327
11328 /* To proceed, MPFR must exactly represent the target floating point
11329 format, which only happens when the target base equals two. */
11330 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11331 && tree_fits_shwi_p (arg1)
11332 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11333 {
11334 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11335 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11336
11337 if (n == (long)n
11338 && real_isfinite (ra)
11339 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11340 {
11341 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11342 const int prec = fmt->p;
11343 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11344 int inexact;
11345 mpfr_t m;
11346
11347 mpfr_init2 (m, prec);
11348 mpfr_from_real (m, ra, GMP_RNDN);
11349 mpfr_clear_flags ();
11350 inexact = func (m, n, m, rnd);
11351 result = do_mpfr_ckconv (m, type, inexact);
11352 mpfr_clear (m);
11353 }
11354 }
11355
11356 return result;
11357 }
11358
11359 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11360 the pointer *(ARG_QUO) and return the result. The type is taken
11361 from the type of ARG0 and is used for setting the precision of the
11362 calculation and results. */
11363
11364 static tree
11365 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11366 {
11367 tree const type = TREE_TYPE (arg0);
11368 tree result = NULL_TREE;
11369
11370 STRIP_NOPS (arg0);
11371 STRIP_NOPS (arg1);
11372
11373 /* To proceed, MPFR must exactly represent the target floating point
11374 format, which only happens when the target base equals two. */
11375 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11376 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11377 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11378 {
11379 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11380 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11381
11382 if (real_isfinite (ra0) && real_isfinite (ra1))
11383 {
11384 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11385 const int prec = fmt->p;
11386 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11387 tree result_rem;
11388 long integer_quo;
11389 mpfr_t m0, m1;
11390
11391 mpfr_inits2 (prec, m0, m1, NULL);
11392 mpfr_from_real (m0, ra0, GMP_RNDN);
11393 mpfr_from_real (m1, ra1, GMP_RNDN);
11394 mpfr_clear_flags ();
11395 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11396 /* Remquo is independent of the rounding mode, so pass
11397 inexact=0 to do_mpfr_ckconv(). */
11398 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11399 mpfr_clears (m0, m1, NULL);
11400 if (result_rem)
11401 {
11402 /* MPFR calculates quo in the host's long so it may
11403 return more bits in quo than the target int can hold
11404 if sizeof(host long) > sizeof(target int). This can
11405 happen even for native compilers in LP64 mode. In
11406 these cases, modulo the quo value with the largest
11407 number that the target int can hold while leaving one
11408 bit for the sign. */
11409 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11410 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11411
11412 /* Dereference the quo pointer argument. */
11413 arg_quo = build_fold_indirect_ref (arg_quo);
11414 /* Proceed iff a valid pointer type was passed in. */
11415 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11416 {
11417 /* Set the value. */
11418 tree result_quo
11419 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11420 build_int_cst (TREE_TYPE (arg_quo),
11421 integer_quo));
11422 TREE_SIDE_EFFECTS (result_quo) = 1;
11423 /* Combine the quo assignment with the rem. */
11424 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11425 result_quo, result_rem));
11426 }
11427 }
11428 }
11429 }
11430 return result;
11431 }
11432
11433 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11434 resulting value as a tree with type TYPE. The mpfr precision is
11435 set to the precision of TYPE. We assume that this mpfr function
11436 returns zero if the result could be calculated exactly within the
11437 requested precision. In addition, the integer pointer represented
11438 by ARG_SG will be dereferenced and set to the appropriate signgam
11439 (-1,1) value. */
11440
11441 static tree
11442 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11443 {
11444 tree result = NULL_TREE;
11445
11446 STRIP_NOPS (arg);
11447
11448 /* To proceed, MPFR must exactly represent the target floating point
11449 format, which only happens when the target base equals two. Also
11450 verify ARG is a constant and that ARG_SG is an int pointer. */
11451 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11452 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11453 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11454 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11455 {
11456 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11457
11458 /* In addition to NaN and Inf, the argument cannot be zero or a
11459 negative integer. */
11460 if (real_isfinite (ra)
11461 && ra->cl != rvc_zero
11462 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11463 {
11464 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11465 const int prec = fmt->p;
11466 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11467 int inexact, sg;
11468 mpfr_t m;
11469 tree result_lg;
11470
11471 mpfr_init2 (m, prec);
11472 mpfr_from_real (m, ra, GMP_RNDN);
11473 mpfr_clear_flags ();
11474 inexact = mpfr_lgamma (m, &sg, m, rnd);
11475 result_lg = do_mpfr_ckconv (m, type, inexact);
11476 mpfr_clear (m);
11477 if (result_lg)
11478 {
11479 tree result_sg;
11480
11481 /* Dereference the arg_sg pointer argument. */
11482 arg_sg = build_fold_indirect_ref (arg_sg);
11483 /* Assign the signgam value into *arg_sg. */
11484 result_sg = fold_build2 (MODIFY_EXPR,
11485 TREE_TYPE (arg_sg), arg_sg,
11486 build_int_cst (TREE_TYPE (arg_sg), sg));
11487 TREE_SIDE_EFFECTS (result_sg) = 1;
11488 /* Combine the signgam assignment with the lgamma result. */
11489 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11490 result_sg, result_lg));
11491 }
11492 }
11493 }
11494
11495 return result;
11496 }
11497
11498 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11499 function FUNC on it and return the resulting value as a tree with
11500 type TYPE. The mpfr precision is set to the precision of TYPE. We
11501 assume that function FUNC returns zero if the result could be
11502 calculated exactly within the requested precision. */
11503
11504 static tree
11505 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11506 {
11507 tree result = NULL_TREE;
11508
11509 STRIP_NOPS (arg);
11510
11511 /* To proceed, MPFR must exactly represent the target floating point
11512 format, which only happens when the target base equals two. */
11513 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11514 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11515 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11516 {
11517 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11518 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11519
11520 if (real_isfinite (re) && real_isfinite (im))
11521 {
11522 const struct real_format *const fmt =
11523 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11524 const int prec = fmt->p;
11525 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11526 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11527 int inexact;
11528 mpc_t m;
11529
11530 mpc_init2 (m, prec);
11531 mpfr_from_real (mpc_realref (m), re, rnd);
11532 mpfr_from_real (mpc_imagref (m), im, rnd);
11533 mpfr_clear_flags ();
11534 inexact = func (m, m, crnd);
11535 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11536 mpc_clear (m);
11537 }
11538 }
11539
11540 return result;
11541 }
11542
11543 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11544 mpc function FUNC on it and return the resulting value as a tree
11545 with type TYPE. The mpfr precision is set to the precision of
11546 TYPE. We assume that function FUNC returns zero if the result
11547 could be calculated exactly within the requested precision. If
11548 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11549 in the arguments and/or results. */
11550
11551 tree
11552 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11553 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11554 {
11555 tree result = NULL_TREE;
11556
11557 STRIP_NOPS (arg0);
11558 STRIP_NOPS (arg1);
11559
11560 /* To proceed, MPFR must exactly represent the target floating point
11561 format, which only happens when the target base equals two. */
11562 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11563 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11564 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11566 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11567 {
11568 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11569 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11570 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11571 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11572
11573 if (do_nonfinite
11574 || (real_isfinite (re0) && real_isfinite (im0)
11575 && real_isfinite (re1) && real_isfinite (im1)))
11576 {
11577 const struct real_format *const fmt =
11578 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11579 const int prec = fmt->p;
11580 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11581 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11582 int inexact;
11583 mpc_t m0, m1;
11584
11585 mpc_init2 (m0, prec);
11586 mpc_init2 (m1, prec);
11587 mpfr_from_real (mpc_realref (m0), re0, rnd);
11588 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11589 mpfr_from_real (mpc_realref (m1), re1, rnd);
11590 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11591 mpfr_clear_flags ();
11592 inexact = func (m0, m0, m1, crnd);
11593 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11594 mpc_clear (m0);
11595 mpc_clear (m1);
11596 }
11597 }
11598
11599 return result;
11600 }
11601
11602 /* A wrapper function for builtin folding that prevents warnings for
11603 "statement without effect" and the like, caused by removing the
11604 call node earlier than the warning is generated. */
11605
11606 tree
11607 fold_call_stmt (gcall *stmt, bool ignore)
11608 {
11609 tree ret = NULL_TREE;
11610 tree fndecl = gimple_call_fndecl (stmt);
11611 location_t loc = gimple_location (stmt);
11612 if (fndecl
11613 && TREE_CODE (fndecl) == FUNCTION_DECL
11614 && DECL_BUILT_IN (fndecl)
11615 && !gimple_call_va_arg_pack_p (stmt))
11616 {
11617 int nargs = gimple_call_num_args (stmt);
11618 tree *args = (nargs > 0
11619 ? gimple_call_arg_ptr (stmt, 0)
11620 : &error_mark_node);
11621
11622 if (avoid_folding_inline_builtin (fndecl))
11623 return NULL_TREE;
11624 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11625 {
11626 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11627 }
11628 else
11629 {
11630 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11631 if (ret)
11632 {
11633 /* Propagate location information from original call to
11634 expansion of builtin. Otherwise things like
11635 maybe_emit_chk_warning, that operate on the expansion
11636 of a builtin, will use the wrong location information. */
11637 if (gimple_has_location (stmt))
11638 {
11639 tree realret = ret;
11640 if (TREE_CODE (ret) == NOP_EXPR)
11641 realret = TREE_OPERAND (ret, 0);
11642 if (CAN_HAVE_LOCATION_P (realret)
11643 && !EXPR_HAS_LOCATION (realret))
11644 SET_EXPR_LOCATION (realret, loc);
11645 return realret;
11646 }
11647 return ret;
11648 }
11649 }
11650 }
11651 return NULL_TREE;
11652 }
11653
11654 /* Look up the function in builtin_decl that corresponds to DECL
11655 and set ASMSPEC as its user assembler name. DECL must be a
11656 function decl that declares a builtin. */
11657
11658 void
11659 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11660 {
11661 tree builtin;
11662 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11663 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11664 && asmspec != 0);
11665
11666 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11667 set_user_assembler_name (builtin, asmspec);
11668 switch (DECL_FUNCTION_CODE (decl))
11669 {
11670 case BUILT_IN_MEMCPY:
11671 init_block_move_fn (asmspec);
11672 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11673 break;
11674 case BUILT_IN_MEMSET:
11675 init_block_clear_fn (asmspec);
11676 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11677 break;
11678 case BUILT_IN_MEMMOVE:
11679 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11680 break;
11681 case BUILT_IN_MEMCMP:
11682 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11683 break;
11684 case BUILT_IN_ABORT:
11685 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11686 break;
11687 case BUILT_IN_FFS:
11688 if (INT_TYPE_SIZE < BITS_PER_WORD)
11689 {
11690 set_user_assembler_libfunc ("ffs", asmspec);
11691 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11692 MODE_INT, 0), "ffs");
11693 }
11694 break;
11695 default:
11696 break;
11697 }
11698 }
11699
11700 /* Return true if DECL is a builtin that expands to a constant or similarly
11701 simple code. */
11702 bool
11703 is_simple_builtin (tree decl)
11704 {
11705 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11706 switch (DECL_FUNCTION_CODE (decl))
11707 {
11708 /* Builtins that expand to constants. */
11709 case BUILT_IN_CONSTANT_P:
11710 case BUILT_IN_EXPECT:
11711 case BUILT_IN_OBJECT_SIZE:
11712 case BUILT_IN_UNREACHABLE:
11713 /* Simple register moves or loads from stack. */
11714 case BUILT_IN_ASSUME_ALIGNED:
11715 case BUILT_IN_RETURN_ADDRESS:
11716 case BUILT_IN_EXTRACT_RETURN_ADDR:
11717 case BUILT_IN_FROB_RETURN_ADDR:
11718 case BUILT_IN_RETURN:
11719 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11720 case BUILT_IN_FRAME_ADDRESS:
11721 case BUILT_IN_VA_END:
11722 case BUILT_IN_STACK_SAVE:
11723 case BUILT_IN_STACK_RESTORE:
11724 /* Exception state returns or moves registers around. */
11725 case BUILT_IN_EH_FILTER:
11726 case BUILT_IN_EH_POINTER:
11727 case BUILT_IN_EH_COPY_VALUES:
11728 return true;
11729
11730 default:
11731 return false;
11732 }
11733
11734 return false;
11735 }
11736
11737 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11738 most probably expanded inline into reasonably simple code. This is a
11739 superset of is_simple_builtin. */
11740 bool
11741 is_inexpensive_builtin (tree decl)
11742 {
11743 if (!decl)
11744 return false;
11745 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11746 return true;
11747 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11748 switch (DECL_FUNCTION_CODE (decl))
11749 {
11750 case BUILT_IN_ABS:
11751 case BUILT_IN_ALLOCA:
11752 case BUILT_IN_ALLOCA_WITH_ALIGN:
11753 case BUILT_IN_BSWAP16:
11754 case BUILT_IN_BSWAP32:
11755 case BUILT_IN_BSWAP64:
11756 case BUILT_IN_CLZ:
11757 case BUILT_IN_CLZIMAX:
11758 case BUILT_IN_CLZL:
11759 case BUILT_IN_CLZLL:
11760 case BUILT_IN_CTZ:
11761 case BUILT_IN_CTZIMAX:
11762 case BUILT_IN_CTZL:
11763 case BUILT_IN_CTZLL:
11764 case BUILT_IN_FFS:
11765 case BUILT_IN_FFSIMAX:
11766 case BUILT_IN_FFSL:
11767 case BUILT_IN_FFSLL:
11768 case BUILT_IN_IMAXABS:
11769 case BUILT_IN_FINITE:
11770 case BUILT_IN_FINITEF:
11771 case BUILT_IN_FINITEL:
11772 case BUILT_IN_FINITED32:
11773 case BUILT_IN_FINITED64:
11774 case BUILT_IN_FINITED128:
11775 case BUILT_IN_FPCLASSIFY:
11776 case BUILT_IN_ISFINITE:
11777 case BUILT_IN_ISINF_SIGN:
11778 case BUILT_IN_ISINF:
11779 case BUILT_IN_ISINFF:
11780 case BUILT_IN_ISINFL:
11781 case BUILT_IN_ISINFD32:
11782 case BUILT_IN_ISINFD64:
11783 case BUILT_IN_ISINFD128:
11784 case BUILT_IN_ISNAN:
11785 case BUILT_IN_ISNANF:
11786 case BUILT_IN_ISNANL:
11787 case BUILT_IN_ISNAND32:
11788 case BUILT_IN_ISNAND64:
11789 case BUILT_IN_ISNAND128:
11790 case BUILT_IN_ISNORMAL:
11791 case BUILT_IN_ISGREATER:
11792 case BUILT_IN_ISGREATEREQUAL:
11793 case BUILT_IN_ISLESS:
11794 case BUILT_IN_ISLESSEQUAL:
11795 case BUILT_IN_ISLESSGREATER:
11796 case BUILT_IN_ISUNORDERED:
11797 case BUILT_IN_VA_ARG_PACK:
11798 case BUILT_IN_VA_ARG_PACK_LEN:
11799 case BUILT_IN_VA_COPY:
11800 case BUILT_IN_TRAP:
11801 case BUILT_IN_SAVEREGS:
11802 case BUILT_IN_POPCOUNTL:
11803 case BUILT_IN_POPCOUNTLL:
11804 case BUILT_IN_POPCOUNTIMAX:
11805 case BUILT_IN_POPCOUNT:
11806 case BUILT_IN_PARITYL:
11807 case BUILT_IN_PARITYLL:
11808 case BUILT_IN_PARITYIMAX:
11809 case BUILT_IN_PARITY:
11810 case BUILT_IN_LABS:
11811 case BUILT_IN_LLABS:
11812 case BUILT_IN_PREFETCH:
11813 case BUILT_IN_ACC_ON_DEVICE:
11814 return true;
11815
11816 default:
11817 return is_simple_builtin (decl);
11818 }
11819
11820 return false;
11821 }