]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
02bf9f65a33907fdcfc77e5621f6acff22fa55cf
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_trunc (location_t, tree, tree);
164 static tree fold_builtin_floor (location_t, tree, tree);
165 static tree fold_builtin_ceil (location_t, tree, tree);
166 static tree fold_builtin_round (location_t, tree, tree);
167 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
168 static tree fold_builtin_bitop (tree, tree);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_0 (location_t, tree);
183 static tree fold_builtin_1 (location_t, tree, tree);
184 static tree fold_builtin_2 (location_t, tree, tree, tree);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_varargs (location_t, tree, tree*, int);
187
188 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
189 static tree fold_builtin_strstr (location_t, tree, tree, tree);
190 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201
202 unsigned HOST_WIDE_INT target_newline;
203 unsigned HOST_WIDE_INT target_percent;
204 static unsigned HOST_WIDE_INT target_c;
205 static unsigned HOST_WIDE_INT target_s;
206 char target_percent_c[3];
207 char target_percent_s[3];
208 char target_percent_s_newline[4];
209 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
210 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
211 static tree do_mpfr_arg2 (tree, tree, tree,
212 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
213 static tree do_mpfr_arg3 (tree, tree, tree, tree,
214 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
215 static tree do_mpfr_sincos (tree, tree, tree);
216 static tree do_mpfr_bessel_n (tree, tree, tree,
217 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
218 const REAL_VALUE_TYPE *, bool);
219 static tree do_mpfr_remquo (tree, tree, tree);
220 static tree do_mpfr_lgamma_r (tree, tree, tree);
221 static void expand_builtin_sync_synchronize (void);
222
223 /* Return true if NAME starts with __builtin_ or __sync_. */
224
225 static bool
226 is_builtin_name (const char *name)
227 {
228 if (strncmp (name, "__builtin_", 10) == 0)
229 return true;
230 if (strncmp (name, "__sync_", 7) == 0)
231 return true;
232 if (strncmp (name, "__atomic_", 9) == 0)
233 return true;
234 if (flag_cilkplus
235 && (!strcmp (name, "__cilkrts_detach")
236 || !strcmp (name, "__cilkrts_pop_frame")))
237 return true;
238 return false;
239 }
240
241
242 /* Return true if DECL is a function symbol representing a built-in. */
243
244 bool
245 is_builtin_fn (tree decl)
246 {
247 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
248 }
249
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
253
254 static bool
255 called_as_built_in (tree node)
256 {
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
262 }
263
264 /* Compute values M and N such that M divides (address of EXP - N) and such
265 that N < M. If these numbers can be determined, store M in alignp and N in
266 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
267 *alignp and any bit-offset to *bitposp.
268
269 Note that the address (and thus the alignment) computed here is based
270 on the address to which a symbol resolves, whereas DECL_ALIGN is based
271 on the address at which an object is actually located. These two
272 addresses are not always the same. For example, on ARM targets,
273 the address &foo of a Thumb function foo() has the lowest bit set,
274 whereas foo() itself starts on an even address.
275
276 If ADDR_P is true we are taking the address of the memory reference EXP
277 and thus cannot rely on the access taking place. */
278
279 static bool
280 get_object_alignment_2 (tree exp, unsigned int *alignp,
281 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
282 {
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 machine_mode mode;
286 int unsignedp, volatilep;
287 unsigned int align = BITS_PER_UNIT;
288 bool known_alignment = false;
289
290 /* Get the innermost object and the constant (bitpos) and possibly
291 variable (offset) offset of the access. */
292 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
293 &mode, &unsignedp, &volatilep, true);
294
295 /* Extract alignment information from the innermost object and
296 possibly adjust bitpos and offset. */
297 if (TREE_CODE (exp) == FUNCTION_DECL)
298 {
299 /* Function addresses can encode extra information besides their
300 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
301 allows the low bit to be used as a virtual bit, we know
302 that the address itself must be at least 2-byte aligned. */
303 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
304 align = 2 * BITS_PER_UNIT;
305 }
306 else if (TREE_CODE (exp) == LABEL_DECL)
307 ;
308 else if (TREE_CODE (exp) == CONST_DECL)
309 {
310 /* The alignment of a CONST_DECL is determined by its initializer. */
311 exp = DECL_INITIAL (exp);
312 align = TYPE_ALIGN (TREE_TYPE (exp));
313 if (CONSTANT_CLASS_P (exp))
314 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
315
316 known_alignment = true;
317 }
318 else if (DECL_P (exp))
319 {
320 align = DECL_ALIGN (exp);
321 known_alignment = true;
322 }
323 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
324 {
325 align = TYPE_ALIGN (TREE_TYPE (exp));
326 }
327 else if (TREE_CODE (exp) == INDIRECT_REF
328 || TREE_CODE (exp) == MEM_REF
329 || TREE_CODE (exp) == TARGET_MEM_REF)
330 {
331 tree addr = TREE_OPERAND (exp, 0);
332 unsigned ptr_align;
333 unsigned HOST_WIDE_INT ptr_bitpos;
334 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
335
336 /* If the address is explicitely aligned, handle that. */
337 if (TREE_CODE (addr) == BIT_AND_EXPR
338 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
339 {
340 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
341 ptr_bitmask *= BITS_PER_UNIT;
342 align = ptr_bitmask & -ptr_bitmask;
343 addr = TREE_OPERAND (addr, 0);
344 }
345
346 known_alignment
347 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
348 align = MAX (ptr_align, align);
349
350 /* Re-apply explicit alignment to the bitpos. */
351 ptr_bitpos &= ptr_bitmask;
352
353 /* The alignment of the pointer operand in a TARGET_MEM_REF
354 has to take the variable offset parts into account. */
355 if (TREE_CODE (exp) == TARGET_MEM_REF)
356 {
357 if (TMR_INDEX (exp))
358 {
359 unsigned HOST_WIDE_INT step = 1;
360 if (TMR_STEP (exp))
361 step = TREE_INT_CST_LOW (TMR_STEP (exp));
362 align = MIN (align, (step & -step) * BITS_PER_UNIT);
363 }
364 if (TMR_INDEX2 (exp))
365 align = BITS_PER_UNIT;
366 known_alignment = false;
367 }
368
369 /* When EXP is an actual memory reference then we can use
370 TYPE_ALIGN of a pointer indirection to derive alignment.
371 Do so only if get_pointer_alignment_1 did not reveal absolute
372 alignment knowledge and if using that alignment would
373 improve the situation. */
374 if (!addr_p && !known_alignment
375 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
376 align = TYPE_ALIGN (TREE_TYPE (exp));
377 else
378 {
379 /* Else adjust bitpos accordingly. */
380 bitpos += ptr_bitpos;
381 if (TREE_CODE (exp) == MEM_REF
382 || TREE_CODE (exp) == TARGET_MEM_REF)
383 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
384 }
385 }
386 else if (TREE_CODE (exp) == STRING_CST)
387 {
388 /* STRING_CST are the only constant objects we allow to be not
389 wrapped inside a CONST_DECL. */
390 align = TYPE_ALIGN (TREE_TYPE (exp));
391 if (CONSTANT_CLASS_P (exp))
392 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
393
394 known_alignment = true;
395 }
396
397 /* If there is a non-constant offset part extract the maximum
398 alignment that can prevail. */
399 if (offset)
400 {
401 unsigned int trailing_zeros = tree_ctz (offset);
402 if (trailing_zeros < HOST_BITS_PER_INT)
403 {
404 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
405 if (inner)
406 align = MIN (align, inner);
407 }
408 }
409
410 *alignp = align;
411 *bitposp = bitpos & (*alignp - 1);
412 return known_alignment;
413 }
414
415 /* For a memory reference expression EXP compute values M and N such that M
416 divides (&EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Otherwise return false
418 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
419
420 bool
421 get_object_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 return get_object_alignment_2 (exp, alignp, bitposp, false);
425 }
426
427 /* Return the alignment in bits of EXP, an object. */
428
429 unsigned int
430 get_object_alignment (tree exp)
431 {
432 unsigned HOST_WIDE_INT bitpos = 0;
433 unsigned int align;
434
435 get_object_alignment_1 (exp, &align, &bitpos);
436
437 /* align and bitpos now specify known low bits of the pointer.
438 ptr & (align - 1) == bitpos. */
439
440 if (bitpos != 0)
441 align = (bitpos & -bitpos);
442 return align;
443 }
444
445 /* For a pointer valued expression EXP compute values M and N such that M
446 divides (EXP - N) and such that N < M. If these numbers can be determined,
447 store M in alignp and N in *BITPOSP and return true. Return false if
448 the results are just a conservative approximation.
449
450 If EXP is not a pointer, false is returned too. */
451
452 bool
453 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
454 unsigned HOST_WIDE_INT *bitposp)
455 {
456 STRIP_NOPS (exp);
457
458 if (TREE_CODE (exp) == ADDR_EXPR)
459 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
460 alignp, bitposp, true);
461 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
462 {
463 unsigned int align;
464 unsigned HOST_WIDE_INT bitpos;
465 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
466 &align, &bitpos);
467 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
468 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
469 else
470 {
471 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
472 if (trailing_zeros < HOST_BITS_PER_INT)
473 {
474 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
475 if (inner)
476 align = MIN (align, inner);
477 }
478 }
479 *alignp = align;
480 *bitposp = bitpos & (align - 1);
481 return res;
482 }
483 else if (TREE_CODE (exp) == SSA_NAME
484 && POINTER_TYPE_P (TREE_TYPE (exp)))
485 {
486 unsigned int ptr_align, ptr_misalign;
487 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
488
489 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
490 {
491 *bitposp = ptr_misalign * BITS_PER_UNIT;
492 *alignp = ptr_align * BITS_PER_UNIT;
493 /* We cannot really tell whether this result is an approximation. */
494 return true;
495 }
496 else
497 {
498 *bitposp = 0;
499 *alignp = BITS_PER_UNIT;
500 return false;
501 }
502 }
503 else if (TREE_CODE (exp) == INTEGER_CST)
504 {
505 *alignp = BIGGEST_ALIGNMENT;
506 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
507 & (BIGGEST_ALIGNMENT - 1));
508 return true;
509 }
510
511 *bitposp = 0;
512 *alignp = BITS_PER_UNIT;
513 return false;
514 }
515
516 /* Return the alignment in bits of EXP, a pointer valued expression.
517 The alignment returned is, by default, the alignment of the thing that
518 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
519
520 Otherwise, look at the expression to see if we can do better, i.e., if the
521 expression is actually pointing at an object whose alignment is tighter. */
522
523 unsigned int
524 get_pointer_alignment (tree exp)
525 {
526 unsigned HOST_WIDE_INT bitpos = 0;
527 unsigned int align;
528
529 get_pointer_alignment_1 (exp, &align, &bitpos);
530
531 /* align and bitpos now specify known low bits of the pointer.
532 ptr & (align - 1) == bitpos. */
533
534 if (bitpos != 0)
535 align = (bitpos & -bitpos);
536
537 return align;
538 }
539
540 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
541 way, because it could contain a zero byte in the middle.
542 TREE_STRING_LENGTH is the size of the character array, not the string.
543
544 ONLY_VALUE should be nonzero if the result is not going to be emitted
545 into the instruction stream and zero if it is going to be expanded.
546 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
547 is returned, otherwise NULL, since
548 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
549 evaluate the side-effects.
550
551 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
552 accesses. Note that this implies the result is not going to be emitted
553 into the instruction stream.
554
555 The value returned is of type `ssizetype'.
556
557 Unfortunately, string_constant can't access the values of const char
558 arrays with initializers, so neither can we do so here. */
559
560 tree
561 c_strlen (tree src, int only_value)
562 {
563 tree offset_node;
564 HOST_WIDE_INT offset;
565 int max;
566 const char *ptr;
567 location_t loc;
568
569 STRIP_NOPS (src);
570 if (TREE_CODE (src) == COND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
572 {
573 tree len1, len2;
574
575 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
576 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
577 if (tree_int_cst_equal (len1, len2))
578 return len1;
579 }
580
581 if (TREE_CODE (src) == COMPOUND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 return c_strlen (TREE_OPERAND (src, 1), only_value);
584
585 loc = EXPR_LOC_OR_LOC (src, input_location);
586
587 src = string_constant (src, &offset_node);
588 if (src == 0)
589 return NULL_TREE;
590
591 max = TREE_STRING_LENGTH (src) - 1;
592 ptr = TREE_STRING_POINTER (src);
593
594 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
595 {
596 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
597 compute the offset to the following null if we don't know where to
598 start searching for it. */
599 int i;
600
601 for (i = 0; i < max; i++)
602 if (ptr[i] == 0)
603 return NULL_TREE;
604
605 /* We don't know the starting offset, but we do know that the string
606 has no internal zero bytes. We can assume that the offset falls
607 within the bounds of the string; otherwise, the programmer deserves
608 what he gets. Subtract the offset from the length of the string,
609 and return that. This would perhaps not be valid if we were dealing
610 with named arrays in addition to literal string constants. */
611
612 return size_diffop_loc (loc, size_int (max), offset_node);
613 }
614
615 /* We have a known offset into the string. Start searching there for
616 a null character if we can represent it as a single HOST_WIDE_INT. */
617 if (offset_node == 0)
618 offset = 0;
619 else if (! tree_fits_shwi_p (offset_node))
620 offset = -1;
621 else
622 offset = tree_to_shwi (offset_node);
623
624 /* If the offset is known to be out of bounds, warn, and call strlen at
625 runtime. */
626 if (offset < 0 || offset > max)
627 {
628 /* Suppress multiple warnings for propagated constant strings. */
629 if (only_value != 2
630 && !TREE_NO_WARNING (src))
631 {
632 warning_at (loc, 0, "offset outside bounds of constant string");
633 TREE_NO_WARNING (src) = 1;
634 }
635 return NULL_TREE;
636 }
637
638 /* Use strlen to search for the first zero byte. Since any strings
639 constructed with build_string will have nulls appended, we win even
640 if we get handed something like (char[4])"abcd".
641
642 Since OFFSET is our starting index into the string, no further
643 calculation is needed. */
644 return ssize_int (strlen (ptr + offset));
645 }
646
647 /* Return a char pointer for a C string if it is a string constant
648 or sum of string constant and integer constant. */
649
650 const char *
651 c_getstr (tree src)
652 {
653 tree offset_node;
654
655 src = string_constant (src, &offset_node);
656 if (src == 0)
657 return 0;
658
659 if (offset_node == 0)
660 return TREE_STRING_POINTER (src);
661 else if (!tree_fits_uhwi_p (offset_node)
662 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
663 return 0;
664
665 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
666 }
667
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
670
671 static rtx
672 c_readstr (const char *str, machine_mode mode)
673 {
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
681
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
685
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
688 {
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
696
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
700 }
701
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
704 }
705
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
709
710 static int
711 target_char_cast (tree cst, char *p)
712 {
713 unsigned HOST_WIDE_INT val, hostval;
714
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
718
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
721
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734 }
735
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740 static tree
741 builtin_save_expr (tree exp)
742 {
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
747 return exp;
748
749 return save_expr (exp);
750 }
751
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
755
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 {
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
762 {
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
767
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
775 {
776 tem = hard_frame_pointer_rtx;
777
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
780 }
781 }
782
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
792
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
795 {
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
802 }
803
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
808
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
818 }
819
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
822
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
826
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
829 {
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
833
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
836
837 buf_addr = convert_memory_address (Pmode, buf_addr);
838
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
840
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
844
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
848
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
852
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
855
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
861
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
865
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
868 }
869
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
873
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
876 {
877 rtx chain;
878
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
882
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
888
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
892 {
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
897
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
912 }
913
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
915 {
916 #ifdef ELIMINABLE_REGS
917 /* If the argument pointer can be eliminated in favor of the
918 frame pointer, we don't need to restore it. We assume here
919 that if such an elimination is present, it can always be used.
920 This is the case on all known machines; if we don't make this
921 assumption, we do unnecessary saving on many machines. */
922 size_t i;
923 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
924
925 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
926 if (elim_regs[i].from == ARG_POINTER_REGNUM
927 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
928 break;
929
930 if (i == ARRAY_SIZE (elim_regs))
931 #endif
932 {
933 /* Now restore our arg pointer from the address at which it
934 was saved in our stack frame. */
935 emit_move_insn (crtl->args.internal_arg_pointer,
936 copy_to_reg (get_arg_pointer_save_area ()));
937 }
938 }
939
940 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
941 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
942 else if (targetm.have_nonlocal_goto_receiver ())
943 emit_insn (targetm.gen_nonlocal_goto_receiver ());
944 else
945 { /* Nothing */ }
946
947 /* We must not allow the code we just generated to be reordered by
948 scheduling. Specifically, the update of the frame pointer must
949 happen immediately, not later. */
950 emit_insn (gen_blockage ());
951 }
952
953 /* __builtin_longjmp is passed a pointer to an array of five words (not
954 all will be used on all machines). It operates similarly to the C
955 library function of the same name, but is more efficient. Much of
956 the code below is copied from the handling of non-local gotos. */
957
958 static void
959 expand_builtin_longjmp (rtx buf_addr, rtx value)
960 {
961 rtx fp, lab, stack;
962 rtx_insn *insn, *last;
963 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
964
965 /* DRAP is needed for stack realign if longjmp is expanded to current
966 function */
967 if (SUPPORTS_STACK_ALIGNMENT)
968 crtl->need_drap = true;
969
970 if (setjmp_alias_set == -1)
971 setjmp_alias_set = new_alias_set ();
972
973 buf_addr = convert_memory_address (Pmode, buf_addr);
974
975 buf_addr = force_reg (Pmode, buf_addr);
976
977 /* We require that the user must pass a second argument of 1, because
978 that is what builtin_setjmp will return. */
979 gcc_assert (value == const1_rtx);
980
981 last = get_last_insn ();
982 if (targetm.have_builtin_longjmp ())
983 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
984 else
985 {
986 fp = gen_rtx_MEM (Pmode, buf_addr);
987 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
988 GET_MODE_SIZE (Pmode)));
989
990 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
991 2 * GET_MODE_SIZE (Pmode)));
992 set_mem_alias_set (fp, setjmp_alias_set);
993 set_mem_alias_set (lab, setjmp_alias_set);
994 set_mem_alias_set (stack, setjmp_alias_set);
995
996 /* Pick up FP, label, and SP from the block and jump. This code is
997 from expand_goto in stmt.c; see there for detailed comments. */
998 if (targetm.have_nonlocal_goto ())
999 /* We have to pass a value to the nonlocal_goto pattern that will
1000 get copied into the static_chain pointer, but it does not matter
1001 what that value is, because builtin_setjmp does not use it. */
1002 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1003 else
1004 {
1005 lab = copy_to_reg (lab);
1006
1007 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1008 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1009
1010 emit_move_insn (hard_frame_pointer_rtx, fp);
1011 emit_stack_restore (SAVE_NONLOCAL, stack);
1012
1013 emit_use (hard_frame_pointer_rtx);
1014 emit_use (stack_pointer_rtx);
1015 emit_indirect_jump (lab);
1016 }
1017 }
1018
1019 /* Search backwards and mark the jump insn as a non-local goto.
1020 Note that this precludes the use of __builtin_longjmp to a
1021 __builtin_setjmp target in the same function. However, we've
1022 already cautioned the user that these functions are for
1023 internal exception handling use only. */
1024 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1025 {
1026 gcc_assert (insn != last);
1027
1028 if (JUMP_P (insn))
1029 {
1030 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1031 break;
1032 }
1033 else if (CALL_P (insn))
1034 break;
1035 }
1036 }
1037
1038 static inline bool
1039 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1040 {
1041 return (iter->i < iter->n);
1042 }
1043
1044 /* This function validates the types of a function call argument list
1045 against a specified list of tree_codes. If the last specifier is a 0,
1046 that represents an ellipses, otherwise the last specifier must be a
1047 VOID_TYPE. */
1048
1049 static bool
1050 validate_arglist (const_tree callexpr, ...)
1051 {
1052 enum tree_code code;
1053 bool res = 0;
1054 va_list ap;
1055 const_call_expr_arg_iterator iter;
1056 const_tree arg;
1057
1058 va_start (ap, callexpr);
1059 init_const_call_expr_arg_iterator (callexpr, &iter);
1060
1061 do
1062 {
1063 code = (enum tree_code) va_arg (ap, int);
1064 switch (code)
1065 {
1066 case 0:
1067 /* This signifies an ellipses, any further arguments are all ok. */
1068 res = true;
1069 goto end;
1070 case VOID_TYPE:
1071 /* This signifies an endlink, if no arguments remain, return
1072 true, otherwise return false. */
1073 res = !more_const_call_expr_args_p (&iter);
1074 goto end;
1075 default:
1076 /* If no parameters remain or the parameter's code does not
1077 match the specified code, return false. Otherwise continue
1078 checking any remaining arguments. */
1079 arg = next_const_call_expr_arg (&iter);
1080 if (!validate_arg (arg, code))
1081 goto end;
1082 break;
1083 }
1084 }
1085 while (1);
1086
1087 /* We need gotos here since we can only have one VA_CLOSE in a
1088 function. */
1089 end: ;
1090 va_end (ap);
1091
1092 return res;
1093 }
1094
1095 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1096 and the address of the save area. */
1097
1098 static rtx
1099 expand_builtin_nonlocal_goto (tree exp)
1100 {
1101 tree t_label, t_save_area;
1102 rtx r_label, r_save_area, r_fp, r_sp;
1103 rtx_insn *insn;
1104
1105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1106 return NULL_RTX;
1107
1108 t_label = CALL_EXPR_ARG (exp, 0);
1109 t_save_area = CALL_EXPR_ARG (exp, 1);
1110
1111 r_label = expand_normal (t_label);
1112 r_label = convert_memory_address (Pmode, r_label);
1113 r_save_area = expand_normal (t_save_area);
1114 r_save_area = convert_memory_address (Pmode, r_save_area);
1115 /* Copy the address of the save location to a register just in case it was
1116 based on the frame pointer. */
1117 r_save_area = copy_to_reg (r_save_area);
1118 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1119 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1120 plus_constant (Pmode, r_save_area,
1121 GET_MODE_SIZE (Pmode)));
1122
1123 crtl->has_nonlocal_goto = 1;
1124
1125 /* ??? We no longer need to pass the static chain value, afaik. */
1126 if (targetm.have_nonlocal_goto ())
1127 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1128 else
1129 {
1130 r_label = copy_to_reg (r_label);
1131
1132 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1133 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1134
1135 /* Restore frame pointer for containing function. */
1136 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1137 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1138
1139 /* USE of hard_frame_pointer_rtx added for consistency;
1140 not clear if really needed. */
1141 emit_use (hard_frame_pointer_rtx);
1142 emit_use (stack_pointer_rtx);
1143
1144 /* If the architecture is using a GP register, we must
1145 conservatively assume that the target function makes use of it.
1146 The prologue of functions with nonlocal gotos must therefore
1147 initialize the GP register to the appropriate value, and we
1148 must then make sure that this value is live at the point
1149 of the jump. (Note that this doesn't necessarily apply
1150 to targets with a nonlocal_goto pattern; they are free
1151 to implement it in their own way. Note also that this is
1152 a no-op if the GP register is a global invariant.) */
1153 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1154 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1155 emit_use (pic_offset_table_rtx);
1156
1157 emit_indirect_jump (r_label);
1158 }
1159
1160 /* Search backwards to the jump insn and mark it as a
1161 non-local goto. */
1162 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1163 {
1164 if (JUMP_P (insn))
1165 {
1166 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1167 break;
1168 }
1169 else if (CALL_P (insn))
1170 break;
1171 }
1172
1173 return const0_rtx;
1174 }
1175
1176 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1177 (not all will be used on all machines) that was passed to __builtin_setjmp.
1178 It updates the stack pointer in that block to the current value. This is
1179 also called directly by the SJLJ exception handling code. */
1180
1181 void
1182 expand_builtin_update_setjmp_buf (rtx buf_addr)
1183 {
1184 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1185 rtx stack_save
1186 = gen_rtx_MEM (sa_mode,
1187 memory_address
1188 (sa_mode,
1189 plus_constant (Pmode, buf_addr,
1190 2 * GET_MODE_SIZE (Pmode))));
1191
1192 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1193 }
1194
1195 /* Expand a call to __builtin_prefetch. For a target that does not support
1196 data prefetch, evaluate the memory address argument in case it has side
1197 effects. */
1198
1199 static void
1200 expand_builtin_prefetch (tree exp)
1201 {
1202 tree arg0, arg1, arg2;
1203 int nargs;
1204 rtx op0, op1, op2;
1205
1206 if (!validate_arglist (exp, POINTER_TYPE, 0))
1207 return;
1208
1209 arg0 = CALL_EXPR_ARG (exp, 0);
1210
1211 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1212 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1213 locality). */
1214 nargs = call_expr_nargs (exp);
1215 if (nargs > 1)
1216 arg1 = CALL_EXPR_ARG (exp, 1);
1217 else
1218 arg1 = integer_zero_node;
1219 if (nargs > 2)
1220 arg2 = CALL_EXPR_ARG (exp, 2);
1221 else
1222 arg2 = integer_three_node;
1223
1224 /* Argument 0 is an address. */
1225 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1226
1227 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1228 if (TREE_CODE (arg1) != INTEGER_CST)
1229 {
1230 error ("second argument to %<__builtin_prefetch%> must be a constant");
1231 arg1 = integer_zero_node;
1232 }
1233 op1 = expand_normal (arg1);
1234 /* Argument 1 must be either zero or one. */
1235 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1236 {
1237 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1238 " using zero");
1239 op1 = const0_rtx;
1240 }
1241
1242 /* Argument 2 (locality) must be a compile-time constant int. */
1243 if (TREE_CODE (arg2) != INTEGER_CST)
1244 {
1245 error ("third argument to %<__builtin_prefetch%> must be a constant");
1246 arg2 = integer_zero_node;
1247 }
1248 op2 = expand_normal (arg2);
1249 /* Argument 2 must be 0, 1, 2, or 3. */
1250 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1251 {
1252 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1253 op2 = const0_rtx;
1254 }
1255
1256 if (targetm.have_prefetch ())
1257 {
1258 struct expand_operand ops[3];
1259
1260 create_address_operand (&ops[0], op0);
1261 create_integer_operand (&ops[1], INTVAL (op1));
1262 create_integer_operand (&ops[2], INTVAL (op2));
1263 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1264 return;
1265 }
1266
1267 /* Don't do anything with direct references to volatile memory, but
1268 generate code to handle other side effects. */
1269 if (!MEM_P (op0) && side_effects_p (op0))
1270 emit_insn (op0);
1271 }
1272
1273 /* Get a MEM rtx for expression EXP which is the address of an operand
1274 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1275 the maximum length of the block of memory that might be accessed or
1276 NULL if unknown. */
1277
1278 static rtx
1279 get_memory_rtx (tree exp, tree len)
1280 {
1281 tree orig_exp = exp;
1282 rtx addr, mem;
1283
1284 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1285 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1286 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1287 exp = TREE_OPERAND (exp, 0);
1288
1289 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1290 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1291
1292 /* Get an expression we can use to find the attributes to assign to MEM.
1293 First remove any nops. */
1294 while (CONVERT_EXPR_P (exp)
1295 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1296 exp = TREE_OPERAND (exp, 0);
1297
1298 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1299 (as builtin stringops may alias with anything). */
1300 exp = fold_build2 (MEM_REF,
1301 build_array_type (char_type_node,
1302 build_range_type (sizetype,
1303 size_one_node, len)),
1304 exp, build_int_cst (ptr_type_node, 0));
1305
1306 /* If the MEM_REF has no acceptable address, try to get the base object
1307 from the original address we got, and build an all-aliasing
1308 unknown-sized access to that one. */
1309 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1310 set_mem_attributes (mem, exp, 0);
1311 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1312 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1313 0))))
1314 {
1315 exp = build_fold_addr_expr (exp);
1316 exp = fold_build2 (MEM_REF,
1317 build_array_type (char_type_node,
1318 build_range_type (sizetype,
1319 size_zero_node,
1320 NULL)),
1321 exp, build_int_cst (ptr_type_node, 0));
1322 set_mem_attributes (mem, exp, 0);
1323 }
1324 set_mem_alias_set (mem, 0);
1325 return mem;
1326 }
1327 \f
1328 /* Built-in functions to perform an untyped call and return. */
1329
1330 #define apply_args_mode \
1331 (this_target_builtins->x_apply_args_mode)
1332 #define apply_result_mode \
1333 (this_target_builtins->x_apply_result_mode)
1334
1335 /* Return the size required for the block returned by __builtin_apply_args,
1336 and initialize apply_args_mode. */
1337
1338 static int
1339 apply_args_size (void)
1340 {
1341 static int size = -1;
1342 int align;
1343 unsigned int regno;
1344 machine_mode mode;
1345
1346 /* The values computed by this function never change. */
1347 if (size < 0)
1348 {
1349 /* The first value is the incoming arg-pointer. */
1350 size = GET_MODE_SIZE (Pmode);
1351
1352 /* The second value is the structure value address unless this is
1353 passed as an "invisible" first argument. */
1354 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1355 size += GET_MODE_SIZE (Pmode);
1356
1357 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1358 if (FUNCTION_ARG_REGNO_P (regno))
1359 {
1360 mode = targetm.calls.get_raw_arg_mode (regno);
1361
1362 gcc_assert (mode != VOIDmode);
1363
1364 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1365 if (size % align != 0)
1366 size = CEIL (size, align) * align;
1367 size += GET_MODE_SIZE (mode);
1368 apply_args_mode[regno] = mode;
1369 }
1370 else
1371 {
1372 apply_args_mode[regno] = VOIDmode;
1373 }
1374 }
1375 return size;
1376 }
1377
1378 /* Return the size required for the block returned by __builtin_apply,
1379 and initialize apply_result_mode. */
1380
1381 static int
1382 apply_result_size (void)
1383 {
1384 static int size = -1;
1385 int align, regno;
1386 machine_mode mode;
1387
1388 /* The values computed by this function never change. */
1389 if (size < 0)
1390 {
1391 size = 0;
1392
1393 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1394 if (targetm.calls.function_value_regno_p (regno))
1395 {
1396 mode = targetm.calls.get_raw_result_mode (regno);
1397
1398 gcc_assert (mode != VOIDmode);
1399
1400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1401 if (size % align != 0)
1402 size = CEIL (size, align) * align;
1403 size += GET_MODE_SIZE (mode);
1404 apply_result_mode[regno] = mode;
1405 }
1406 else
1407 apply_result_mode[regno] = VOIDmode;
1408
1409 /* Allow targets that use untyped_call and untyped_return to override
1410 the size so that machine-specific information can be stored here. */
1411 #ifdef APPLY_RESULT_SIZE
1412 size = APPLY_RESULT_SIZE;
1413 #endif
1414 }
1415 return size;
1416 }
1417
1418 /* Create a vector describing the result block RESULT. If SAVEP is true,
1419 the result block is used to save the values; otherwise it is used to
1420 restore the values. */
1421
1422 static rtx
1423 result_vector (int savep, rtx result)
1424 {
1425 int regno, size, align, nelts;
1426 machine_mode mode;
1427 rtx reg, mem;
1428 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1429
1430 size = nelts = 0;
1431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1432 if ((mode = apply_result_mode[regno]) != VOIDmode)
1433 {
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1438 mem = adjust_address (result, mode, size);
1439 savevec[nelts++] = (savep
1440 ? gen_rtx_SET (mem, reg)
1441 : gen_rtx_SET (reg, mem));
1442 size += GET_MODE_SIZE (mode);
1443 }
1444 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1445 }
1446
1447 /* Save the state required to perform an untyped call with the same
1448 arguments as were passed to the current function. */
1449
1450 static rtx
1451 expand_builtin_apply_args_1 (void)
1452 {
1453 rtx registers, tem;
1454 int size, align, regno;
1455 machine_mode mode;
1456 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1457
1458 /* Create a block where the arg-pointer, structure value address,
1459 and argument registers can be saved. */
1460 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1461
1462 /* Walk past the arg-pointer and structure value address. */
1463 size = GET_MODE_SIZE (Pmode);
1464 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1465 size += GET_MODE_SIZE (Pmode);
1466
1467 /* Save each register used in calling a function to the block. */
1468 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1469 if ((mode = apply_args_mode[regno]) != VOIDmode)
1470 {
1471 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1472 if (size % align != 0)
1473 size = CEIL (size, align) * align;
1474
1475 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1476
1477 emit_move_insn (adjust_address (registers, mode, size), tem);
1478 size += GET_MODE_SIZE (mode);
1479 }
1480
1481 /* Save the arg pointer to the block. */
1482 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1483 /* We need the pointer as the caller actually passed them to us, not
1484 as we might have pretended they were passed. Make sure it's a valid
1485 operand, as emit_move_insn isn't expected to handle a PLUS. */
1486 if (STACK_GROWS_DOWNWARD)
1487 tem
1488 = force_operand (plus_constant (Pmode, tem,
1489 crtl->args.pretend_args_size),
1490 NULL_RTX);
1491 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1492
1493 size = GET_MODE_SIZE (Pmode);
1494
1495 /* Save the structure value address unless this is passed as an
1496 "invisible" first argument. */
1497 if (struct_incoming_value)
1498 {
1499 emit_move_insn (adjust_address (registers, Pmode, size),
1500 copy_to_reg (struct_incoming_value));
1501 size += GET_MODE_SIZE (Pmode);
1502 }
1503
1504 /* Return the address of the block. */
1505 return copy_addr_to_reg (XEXP (registers, 0));
1506 }
1507
1508 /* __builtin_apply_args returns block of memory allocated on
1509 the stack into which is stored the arg pointer, structure
1510 value address, static chain, and all the registers that might
1511 possibly be used in performing a function call. The code is
1512 moved to the start of the function so the incoming values are
1513 saved. */
1514
1515 static rtx
1516 expand_builtin_apply_args (void)
1517 {
1518 /* Don't do __builtin_apply_args more than once in a function.
1519 Save the result of the first call and reuse it. */
1520 if (apply_args_value != 0)
1521 return apply_args_value;
1522 {
1523 /* When this function is called, it means that registers must be
1524 saved on entry to this function. So we migrate the
1525 call to the first insn of this function. */
1526 rtx temp;
1527
1528 start_sequence ();
1529 temp = expand_builtin_apply_args_1 ();
1530 rtx_insn *seq = get_insns ();
1531 end_sequence ();
1532
1533 apply_args_value = temp;
1534
1535 /* Put the insns after the NOTE that starts the function.
1536 If this is inside a start_sequence, make the outer-level insn
1537 chain current, so the code is placed at the start of the
1538 function. If internal_arg_pointer is a non-virtual pseudo,
1539 it needs to be placed after the function that initializes
1540 that pseudo. */
1541 push_topmost_sequence ();
1542 if (REG_P (crtl->args.internal_arg_pointer)
1543 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1544 emit_insn_before (seq, parm_birth_insn);
1545 else
1546 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1547 pop_topmost_sequence ();
1548 return temp;
1549 }
1550 }
1551
1552 /* Perform an untyped call and save the state required to perform an
1553 untyped return of whatever value was returned by the given function. */
1554
1555 static rtx
1556 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1557 {
1558 int size, align, regno;
1559 machine_mode mode;
1560 rtx incoming_args, result, reg, dest, src;
1561 rtx_call_insn *call_insn;
1562 rtx old_stack_level = 0;
1563 rtx call_fusage = 0;
1564 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1565
1566 arguments = convert_memory_address (Pmode, arguments);
1567
1568 /* Create a block where the return registers can be saved. */
1569 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1570
1571 /* Fetch the arg pointer from the ARGUMENTS block. */
1572 incoming_args = gen_reg_rtx (Pmode);
1573 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1574 if (!STACK_GROWS_DOWNWARD)
1575 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1576 incoming_args, 0, OPTAB_LIB_WIDEN);
1577
1578 /* Push a new argument block and copy the arguments. Do not allow
1579 the (potential) memcpy call below to interfere with our stack
1580 manipulations. */
1581 do_pending_stack_adjust ();
1582 NO_DEFER_POP;
1583
1584 /* Save the stack with nonlocal if available. */
1585 if (targetm.have_save_stack_nonlocal ())
1586 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1587 else
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1589
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. We can pass TRUE
1592 as the 4th argument because we just saved the stack pointer
1593 and will restore it right after the call. */
1594 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1595
1596 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1597 may have already set current_function_calls_alloca to true.
1598 current_function_calls_alloca won't be set if argsize is zero,
1599 so we have to guarantee need_drap is true here. */
1600 if (SUPPORTS_STACK_ALIGNMENT)
1601 crtl->need_drap = true;
1602
1603 dest = virtual_outgoing_args_rtx;
1604 if (!STACK_GROWS_DOWNWARD)
1605 {
1606 if (CONST_INT_P (argsize))
1607 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1608 else
1609 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1610 }
1611 dest = gen_rtx_MEM (BLKmode, dest);
1612 set_mem_align (dest, PARM_BOUNDARY);
1613 src = gen_rtx_MEM (BLKmode, incoming_args);
1614 set_mem_align (src, PARM_BOUNDARY);
1615 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1616
1617 /* Refer to the argument block. */
1618 apply_args_size ();
1619 arguments = gen_rtx_MEM (BLKmode, arguments);
1620 set_mem_align (arguments, PARM_BOUNDARY);
1621
1622 /* Walk past the arg-pointer and structure value address. */
1623 size = GET_MODE_SIZE (Pmode);
1624 if (struct_value)
1625 size += GET_MODE_SIZE (Pmode);
1626
1627 /* Restore each of the registers previously saved. Make USE insns
1628 for each of these registers for use in making the call. */
1629 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1630 if ((mode = apply_args_mode[regno]) != VOIDmode)
1631 {
1632 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1633 if (size % align != 0)
1634 size = CEIL (size, align) * align;
1635 reg = gen_rtx_REG (mode, regno);
1636 emit_move_insn (reg, adjust_address (arguments, mode, size));
1637 use_reg (&call_fusage, reg);
1638 size += GET_MODE_SIZE (mode);
1639 }
1640
1641 /* Restore the structure value address unless this is passed as an
1642 "invisible" first argument. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 {
1646 rtx value = gen_reg_rtx (Pmode);
1647 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1648 emit_move_insn (struct_value, value);
1649 if (REG_P (struct_value))
1650 use_reg (&call_fusage, struct_value);
1651 size += GET_MODE_SIZE (Pmode);
1652 }
1653
1654 /* All arguments and registers used for the call are set up by now! */
1655 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1656
1657 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1658 and we don't want to load it into a register as an optimization,
1659 because prepare_call_address already did it if it should be done. */
1660 if (GET_CODE (function) != SYMBOL_REF)
1661 function = memory_address (FUNCTION_MODE, function);
1662
1663 /* Generate the actual call instruction and save the return value. */
1664 if (targetm.have_untyped_call ())
1665 {
1666 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1667 emit_call_insn (targetm.gen_untyped_call (mem, result,
1668 result_vector (1, result)));
1669 }
1670 else if (targetm.have_call_value ())
1671 {
1672 rtx valreg = 0;
1673
1674 /* Locate the unique return register. It is not possible to
1675 express a call that sets more than one return register using
1676 call_value; use untyped_call for that. In fact, untyped_call
1677 only needs to save the return registers in the given block. */
1678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679 if ((mode = apply_result_mode[regno]) != VOIDmode)
1680 {
1681 gcc_assert (!valreg); /* have_untyped_call required. */
1682
1683 valreg = gen_rtx_REG (mode, regno);
1684 }
1685
1686 emit_insn (targetm.gen_call_value (valreg,
1687 gen_rtx_MEM (FUNCTION_MODE, function),
1688 const0_rtx, NULL_RTX, const0_rtx));
1689
1690 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1691 }
1692 else
1693 gcc_unreachable ();
1694
1695 /* Find the CALL insn we just emitted, and attach the register usage
1696 information. */
1697 call_insn = last_call_insn ();
1698 add_function_usage_to (call_insn, call_fusage);
1699
1700 /* Restore the stack. */
1701 if (targetm.have_save_stack_nonlocal ())
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1703 else
1704 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1705 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1706
1707 OK_DEFER_POP;
1708
1709 /* Return the address of the result block. */
1710 result = copy_addr_to_reg (XEXP (result, 0));
1711 return convert_memory_address (ptr_mode, result);
1712 }
1713
1714 /* Perform an untyped return. */
1715
1716 static void
1717 expand_builtin_return (rtx result)
1718 {
1719 int size, align, regno;
1720 machine_mode mode;
1721 rtx reg;
1722 rtx_insn *call_fusage = 0;
1723
1724 result = convert_memory_address (Pmode, result);
1725
1726 apply_result_size ();
1727 result = gen_rtx_MEM (BLKmode, result);
1728
1729 if (targetm.have_untyped_return ())
1730 {
1731 rtx vector = result_vector (0, result);
1732 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1733 emit_barrier ();
1734 return;
1735 }
1736
1737 /* Restore the return value and note that each value is used. */
1738 size = 0;
1739 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1740 if ((mode = apply_result_mode[regno]) != VOIDmode)
1741 {
1742 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1743 if (size % align != 0)
1744 size = CEIL (size, align) * align;
1745 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1746 emit_move_insn (reg, adjust_address (result, mode, size));
1747
1748 push_to_sequence (call_fusage);
1749 emit_use (reg);
1750 call_fusage = get_insns ();
1751 end_sequence ();
1752 size += GET_MODE_SIZE (mode);
1753 }
1754
1755 /* Put the USE insns before the return. */
1756 emit_insn (call_fusage);
1757
1758 /* Return whatever values was restored by jumping directly to the end
1759 of the function. */
1760 expand_naked_return ();
1761 }
1762
1763 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1764
1765 static enum type_class
1766 type_to_class (tree type)
1767 {
1768 switch (TREE_CODE (type))
1769 {
1770 case VOID_TYPE: return void_type_class;
1771 case INTEGER_TYPE: return integer_type_class;
1772 case ENUMERAL_TYPE: return enumeral_type_class;
1773 case BOOLEAN_TYPE: return boolean_type_class;
1774 case POINTER_TYPE: return pointer_type_class;
1775 case REFERENCE_TYPE: return reference_type_class;
1776 case OFFSET_TYPE: return offset_type_class;
1777 case REAL_TYPE: return real_type_class;
1778 case COMPLEX_TYPE: return complex_type_class;
1779 case FUNCTION_TYPE: return function_type_class;
1780 case METHOD_TYPE: return method_type_class;
1781 case RECORD_TYPE: return record_type_class;
1782 case UNION_TYPE:
1783 case QUAL_UNION_TYPE: return union_type_class;
1784 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1785 ? string_type_class : array_type_class);
1786 case LANG_TYPE: return lang_type_class;
1787 default: return no_type_class;
1788 }
1789 }
1790
1791 /* Expand a call EXP to __builtin_classify_type. */
1792
1793 static rtx
1794 expand_builtin_classify_type (tree exp)
1795 {
1796 if (call_expr_nargs (exp))
1797 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1798 return GEN_INT (no_type_class);
1799 }
1800
1801 /* This helper macro, meant to be used in mathfn_built_in below,
1802 determines which among a set of three builtin math functions is
1803 appropriate for a given type mode. The `F' and `L' cases are
1804 automatically generated from the `double' case. */
1805 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1806 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1807 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1808 fcodel = BUILT_IN_MATHFN##L ; break;
1809 /* Similar to above, but appends _R after any F/L suffix. */
1810 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1811 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1812 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1813 fcodel = BUILT_IN_MATHFN##L_R ; break;
1814
1815 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1816 if available. If IMPLICIT is true use the implicit builtin declaration,
1817 otherwise use the explicit declaration. If we can't do the conversion,
1818 return zero. */
1819
1820 static tree
1821 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1822 {
1823 enum built_in_function fcode, fcodef, fcodel, fcode2;
1824
1825 switch (fn)
1826 {
1827 CASE_MATHFN (BUILT_IN_ACOS)
1828 CASE_MATHFN (BUILT_IN_ACOSH)
1829 CASE_MATHFN (BUILT_IN_ASIN)
1830 CASE_MATHFN (BUILT_IN_ASINH)
1831 CASE_MATHFN (BUILT_IN_ATAN)
1832 CASE_MATHFN (BUILT_IN_ATAN2)
1833 CASE_MATHFN (BUILT_IN_ATANH)
1834 CASE_MATHFN (BUILT_IN_CBRT)
1835 CASE_MATHFN (BUILT_IN_CEIL)
1836 CASE_MATHFN (BUILT_IN_CEXPI)
1837 CASE_MATHFN (BUILT_IN_COPYSIGN)
1838 CASE_MATHFN (BUILT_IN_COS)
1839 CASE_MATHFN (BUILT_IN_COSH)
1840 CASE_MATHFN (BUILT_IN_DREM)
1841 CASE_MATHFN (BUILT_IN_ERF)
1842 CASE_MATHFN (BUILT_IN_ERFC)
1843 CASE_MATHFN (BUILT_IN_EXP)
1844 CASE_MATHFN (BUILT_IN_EXP10)
1845 CASE_MATHFN (BUILT_IN_EXP2)
1846 CASE_MATHFN (BUILT_IN_EXPM1)
1847 CASE_MATHFN (BUILT_IN_FABS)
1848 CASE_MATHFN (BUILT_IN_FDIM)
1849 CASE_MATHFN (BUILT_IN_FLOOR)
1850 CASE_MATHFN (BUILT_IN_FMA)
1851 CASE_MATHFN (BUILT_IN_FMAX)
1852 CASE_MATHFN (BUILT_IN_FMIN)
1853 CASE_MATHFN (BUILT_IN_FMOD)
1854 CASE_MATHFN (BUILT_IN_FREXP)
1855 CASE_MATHFN (BUILT_IN_GAMMA)
1856 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1857 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1858 CASE_MATHFN (BUILT_IN_HYPOT)
1859 CASE_MATHFN (BUILT_IN_ILOGB)
1860 CASE_MATHFN (BUILT_IN_ICEIL)
1861 CASE_MATHFN (BUILT_IN_IFLOOR)
1862 CASE_MATHFN (BUILT_IN_INF)
1863 CASE_MATHFN (BUILT_IN_IRINT)
1864 CASE_MATHFN (BUILT_IN_IROUND)
1865 CASE_MATHFN (BUILT_IN_ISINF)
1866 CASE_MATHFN (BUILT_IN_J0)
1867 CASE_MATHFN (BUILT_IN_J1)
1868 CASE_MATHFN (BUILT_IN_JN)
1869 CASE_MATHFN (BUILT_IN_LCEIL)
1870 CASE_MATHFN (BUILT_IN_LDEXP)
1871 CASE_MATHFN (BUILT_IN_LFLOOR)
1872 CASE_MATHFN (BUILT_IN_LGAMMA)
1873 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1874 CASE_MATHFN (BUILT_IN_LLCEIL)
1875 CASE_MATHFN (BUILT_IN_LLFLOOR)
1876 CASE_MATHFN (BUILT_IN_LLRINT)
1877 CASE_MATHFN (BUILT_IN_LLROUND)
1878 CASE_MATHFN (BUILT_IN_LOG)
1879 CASE_MATHFN (BUILT_IN_LOG10)
1880 CASE_MATHFN (BUILT_IN_LOG1P)
1881 CASE_MATHFN (BUILT_IN_LOG2)
1882 CASE_MATHFN (BUILT_IN_LOGB)
1883 CASE_MATHFN (BUILT_IN_LRINT)
1884 CASE_MATHFN (BUILT_IN_LROUND)
1885 CASE_MATHFN (BUILT_IN_MODF)
1886 CASE_MATHFN (BUILT_IN_NAN)
1887 CASE_MATHFN (BUILT_IN_NANS)
1888 CASE_MATHFN (BUILT_IN_NEARBYINT)
1889 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1890 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1891 CASE_MATHFN (BUILT_IN_POW)
1892 CASE_MATHFN (BUILT_IN_POWI)
1893 CASE_MATHFN (BUILT_IN_POW10)
1894 CASE_MATHFN (BUILT_IN_REMAINDER)
1895 CASE_MATHFN (BUILT_IN_REMQUO)
1896 CASE_MATHFN (BUILT_IN_RINT)
1897 CASE_MATHFN (BUILT_IN_ROUND)
1898 CASE_MATHFN (BUILT_IN_SCALB)
1899 CASE_MATHFN (BUILT_IN_SCALBLN)
1900 CASE_MATHFN (BUILT_IN_SCALBN)
1901 CASE_MATHFN (BUILT_IN_SIGNBIT)
1902 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1903 CASE_MATHFN (BUILT_IN_SIN)
1904 CASE_MATHFN (BUILT_IN_SINCOS)
1905 CASE_MATHFN (BUILT_IN_SINH)
1906 CASE_MATHFN (BUILT_IN_SQRT)
1907 CASE_MATHFN (BUILT_IN_TAN)
1908 CASE_MATHFN (BUILT_IN_TANH)
1909 CASE_MATHFN (BUILT_IN_TGAMMA)
1910 CASE_MATHFN (BUILT_IN_TRUNC)
1911 CASE_MATHFN (BUILT_IN_Y0)
1912 CASE_MATHFN (BUILT_IN_Y1)
1913 CASE_MATHFN (BUILT_IN_YN)
1914
1915 default:
1916 return NULL_TREE;
1917 }
1918
1919 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1920 fcode2 = fcode;
1921 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1922 fcode2 = fcodef;
1923 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1924 fcode2 = fcodel;
1925 else
1926 return NULL_TREE;
1927
1928 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1929 return NULL_TREE;
1930
1931 return builtin_decl_explicit (fcode2);
1932 }
1933
1934 /* Like mathfn_built_in_1(), but always use the implicit array. */
1935
1936 tree
1937 mathfn_built_in (tree type, enum built_in_function fn)
1938 {
1939 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1940 }
1941
1942 /* If errno must be maintained, expand the RTL to check if the result,
1943 TARGET, of a built-in function call, EXP, is NaN, and if so set
1944 errno to EDOM. */
1945
1946 static void
1947 expand_errno_check (tree exp, rtx target)
1948 {
1949 rtx_code_label *lab = gen_label_rtx ();
1950
1951 /* Test the result; if it is NaN, set errno=EDOM because
1952 the argument was not in the domain. */
1953 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1954 NULL_RTX, NULL, lab,
1955 /* The jump is very likely. */
1956 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1957
1958 #ifdef TARGET_EDOM
1959 /* If this built-in doesn't throw an exception, set errno directly. */
1960 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1961 {
1962 #ifdef GEN_ERRNO_RTX
1963 rtx errno_rtx = GEN_ERRNO_RTX;
1964 #else
1965 rtx errno_rtx
1966 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1967 #endif
1968 emit_move_insn (errno_rtx,
1969 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1970 emit_label (lab);
1971 return;
1972 }
1973 #endif
1974
1975 /* Make sure the library call isn't expanded as a tail call. */
1976 CALL_EXPR_TAILCALL (exp) = 0;
1977
1978 /* We can't set errno=EDOM directly; let the library call do it.
1979 Pop the arguments right away in case the call gets deleted. */
1980 NO_DEFER_POP;
1981 expand_call (exp, target, 0);
1982 OK_DEFER_POP;
1983 emit_label (lab);
1984 }
1985
1986 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1987 Return NULL_RTX if a normal call should be emitted rather than expanding
1988 the function in-line. EXP is the expression that is a call to the builtin
1989 function; if convenient, the result should be placed in TARGET.
1990 SUBTARGET may be used as the target for computing one of EXP's operands. */
1991
1992 static rtx
1993 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1994 {
1995 optab builtin_optab;
1996 rtx op0;
1997 rtx_insn *insns;
1998 tree fndecl = get_callee_fndecl (exp);
1999 machine_mode mode;
2000 bool errno_set = false;
2001 bool try_widening = false;
2002 tree arg;
2003
2004 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2005 return NULL_RTX;
2006
2007 arg = CALL_EXPR_ARG (exp, 0);
2008
2009 switch (DECL_FUNCTION_CODE (fndecl))
2010 {
2011 CASE_FLT_FN (BUILT_IN_SQRT):
2012 errno_set = ! tree_expr_nonnegative_p (arg);
2013 try_widening = true;
2014 builtin_optab = sqrt_optab;
2015 break;
2016 CASE_FLT_FN (BUILT_IN_EXP):
2017 errno_set = true; builtin_optab = exp_optab; break;
2018 CASE_FLT_FN (BUILT_IN_EXP10):
2019 CASE_FLT_FN (BUILT_IN_POW10):
2020 errno_set = true; builtin_optab = exp10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXP2):
2022 errno_set = true; builtin_optab = exp2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_EXPM1):
2024 errno_set = true; builtin_optab = expm1_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOGB):
2026 errno_set = true; builtin_optab = logb_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG):
2028 errno_set = true; builtin_optab = log_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOG10):
2030 errno_set = true; builtin_optab = log10_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG2):
2032 errno_set = true; builtin_optab = log2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_LOG1P):
2034 errno_set = true; builtin_optab = log1p_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ASIN):
2036 builtin_optab = asin_optab; break;
2037 CASE_FLT_FN (BUILT_IN_ACOS):
2038 builtin_optab = acos_optab; break;
2039 CASE_FLT_FN (BUILT_IN_TAN):
2040 builtin_optab = tan_optab; break;
2041 CASE_FLT_FN (BUILT_IN_ATAN):
2042 builtin_optab = atan_optab; break;
2043 CASE_FLT_FN (BUILT_IN_FLOOR):
2044 builtin_optab = floor_optab; break;
2045 CASE_FLT_FN (BUILT_IN_CEIL):
2046 builtin_optab = ceil_optab; break;
2047 CASE_FLT_FN (BUILT_IN_TRUNC):
2048 builtin_optab = btrunc_optab; break;
2049 CASE_FLT_FN (BUILT_IN_ROUND):
2050 builtin_optab = round_optab; break;
2051 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2052 builtin_optab = nearbyint_optab;
2053 if (flag_trapping_math)
2054 break;
2055 /* Else fallthrough and expand as rint. */
2056 CASE_FLT_FN (BUILT_IN_RINT):
2057 builtin_optab = rint_optab; break;
2058 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2059 builtin_optab = significand_optab; break;
2060 default:
2061 gcc_unreachable ();
2062 }
2063
2064 /* Make a suitable register to place result in. */
2065 mode = TYPE_MODE (TREE_TYPE (exp));
2066
2067 if (! flag_errno_math || ! HONOR_NANS (mode))
2068 errno_set = false;
2069
2070 /* Before working hard, check whether the instruction is available, but try
2071 to widen the mode for specific operations. */
2072 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2073 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2074 && (!errno_set || !optimize_insn_for_size_p ()))
2075 {
2076 rtx result = gen_reg_rtx (mode);
2077
2078 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2079 need to expand the argument again. This way, we will not perform
2080 side-effects more the once. */
2081 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2082
2083 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2084
2085 start_sequence ();
2086
2087 /* Compute into RESULT.
2088 Set RESULT to wherever the result comes back. */
2089 result = expand_unop (mode, builtin_optab, op0, result, 0);
2090
2091 if (result != 0)
2092 {
2093 if (errno_set)
2094 expand_errno_check (exp, result);
2095
2096 /* Output the entire sequence. */
2097 insns = get_insns ();
2098 end_sequence ();
2099 emit_insn (insns);
2100 return result;
2101 }
2102
2103 /* If we were unable to expand via the builtin, stop the sequence
2104 (without outputting the insns) and call to the library function
2105 with the stabilized argument list. */
2106 end_sequence ();
2107 }
2108
2109 return expand_call (exp, target, target == const0_rtx);
2110 }
2111
2112 /* Expand a call to the builtin binary math functions (pow and atan2).
2113 Return NULL_RTX if a normal call should be emitted rather than expanding the
2114 function in-line. EXP is the expression that is a call to the builtin
2115 function; if convenient, the result should be placed in TARGET.
2116 SUBTARGET may be used as the target for computing one of EXP's
2117 operands. */
2118
2119 static rtx
2120 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2121 {
2122 optab builtin_optab;
2123 rtx op0, op1, result;
2124 rtx_insn *insns;
2125 int op1_type = REAL_TYPE;
2126 tree fndecl = get_callee_fndecl (exp);
2127 tree arg0, arg1;
2128 machine_mode mode;
2129 bool errno_set = true;
2130
2131 switch (DECL_FUNCTION_CODE (fndecl))
2132 {
2133 CASE_FLT_FN (BUILT_IN_SCALBN):
2134 CASE_FLT_FN (BUILT_IN_SCALBLN):
2135 CASE_FLT_FN (BUILT_IN_LDEXP):
2136 op1_type = INTEGER_TYPE;
2137 default:
2138 break;
2139 }
2140
2141 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2142 return NULL_RTX;
2143
2144 arg0 = CALL_EXPR_ARG (exp, 0);
2145 arg1 = CALL_EXPR_ARG (exp, 1);
2146
2147 switch (DECL_FUNCTION_CODE (fndecl))
2148 {
2149 CASE_FLT_FN (BUILT_IN_POW):
2150 builtin_optab = pow_optab; break;
2151 CASE_FLT_FN (BUILT_IN_ATAN2):
2152 builtin_optab = atan2_optab; break;
2153 CASE_FLT_FN (BUILT_IN_SCALB):
2154 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2155 return 0;
2156 builtin_optab = scalb_optab; break;
2157 CASE_FLT_FN (BUILT_IN_SCALBN):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN):
2159 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2160 return 0;
2161 /* Fall through... */
2162 CASE_FLT_FN (BUILT_IN_LDEXP):
2163 builtin_optab = ldexp_optab; break;
2164 CASE_FLT_FN (BUILT_IN_FMOD):
2165 builtin_optab = fmod_optab; break;
2166 CASE_FLT_FN (BUILT_IN_REMAINDER):
2167 CASE_FLT_FN (BUILT_IN_DREM):
2168 builtin_optab = remainder_optab; break;
2169 default:
2170 gcc_unreachable ();
2171 }
2172
2173 /* Make a suitable register to place result in. */
2174 mode = TYPE_MODE (TREE_TYPE (exp));
2175
2176 /* Before working hard, check whether the instruction is available. */
2177 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2178 return NULL_RTX;
2179
2180 result = gen_reg_rtx (mode);
2181
2182 if (! flag_errno_math || ! HONOR_NANS (mode))
2183 errno_set = false;
2184
2185 if (errno_set && optimize_insn_for_size_p ())
2186 return 0;
2187
2188 /* Always stabilize the argument list. */
2189 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2190 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2191
2192 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2193 op1 = expand_normal (arg1);
2194
2195 start_sequence ();
2196
2197 /* Compute into RESULT.
2198 Set RESULT to wherever the result comes back. */
2199 result = expand_binop (mode, builtin_optab, op0, op1,
2200 result, 0, OPTAB_DIRECT);
2201
2202 /* If we were unable to expand via the builtin, stop the sequence
2203 (without outputting the insns) and call to the library function
2204 with the stabilized argument list. */
2205 if (result == 0)
2206 {
2207 end_sequence ();
2208 return expand_call (exp, target, target == const0_rtx);
2209 }
2210
2211 if (errno_set)
2212 expand_errno_check (exp, result);
2213
2214 /* Output the entire sequence. */
2215 insns = get_insns ();
2216 end_sequence ();
2217 emit_insn (insns);
2218
2219 return result;
2220 }
2221
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2228
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 {
2232 optab builtin_optab;
2233 rtx op0, op1, op2, result;
2234 rtx_insn *insns;
2235 tree fndecl = get_callee_fndecl (exp);
2236 tree arg0, arg1, arg2;
2237 machine_mode mode;
2238
2239 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240 return NULL_RTX;
2241
2242 arg0 = CALL_EXPR_ARG (exp, 0);
2243 arg1 = CALL_EXPR_ARG (exp, 1);
2244 arg2 = CALL_EXPR_ARG (exp, 2);
2245
2246 switch (DECL_FUNCTION_CODE (fndecl))
2247 {
2248 CASE_FLT_FN (BUILT_IN_FMA):
2249 builtin_optab = fma_optab; break;
2250 default:
2251 gcc_unreachable ();
2252 }
2253
2254 /* Make a suitable register to place result in. */
2255 mode = TYPE_MODE (TREE_TYPE (exp));
2256
2257 /* Before working hard, check whether the instruction is available. */
2258 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2259 return NULL_RTX;
2260
2261 result = gen_reg_rtx (mode);
2262
2263 /* Always stabilize the argument list. */
2264 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2265 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2266 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2267
2268 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2269 op1 = expand_normal (arg1);
2270 op2 = expand_normal (arg2);
2271
2272 start_sequence ();
2273
2274 /* Compute into RESULT.
2275 Set RESULT to wherever the result comes back. */
2276 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2277 result, 0);
2278
2279 /* If we were unable to expand via the builtin, stop the sequence
2280 (without outputting the insns) and call to the library function
2281 with the stabilized argument list. */
2282 if (result == 0)
2283 {
2284 end_sequence ();
2285 return expand_call (exp, target, target == const0_rtx);
2286 }
2287
2288 /* Output the entire sequence. */
2289 insns = get_insns ();
2290 end_sequence ();
2291 emit_insn (insns);
2292
2293 return result;
2294 }
2295
2296 /* Expand a call to the builtin sin and cos math functions.
2297 Return NULL_RTX if a normal call should be emitted rather than expanding the
2298 function in-line. EXP is the expression that is a call to the builtin
2299 function; if convenient, the result should be placed in TARGET.
2300 SUBTARGET may be used as the target for computing one of EXP's
2301 operands. */
2302
2303 static rtx
2304 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2305 {
2306 optab builtin_optab;
2307 rtx op0;
2308 rtx_insn *insns;
2309 tree fndecl = get_callee_fndecl (exp);
2310 machine_mode mode;
2311 tree arg;
2312
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2315
2316 arg = CALL_EXPR_ARG (exp, 0);
2317
2318 switch (DECL_FUNCTION_CODE (fndecl))
2319 {
2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = sincos_optab; break;
2323 default:
2324 gcc_unreachable ();
2325 }
2326
2327 /* Make a suitable register to place result in. */
2328 mode = TYPE_MODE (TREE_TYPE (exp));
2329
2330 /* Check if sincos insn is available, otherwise fallback
2331 to sin or cos insn. */
2332 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2333 switch (DECL_FUNCTION_CODE (fndecl))
2334 {
2335 CASE_FLT_FN (BUILT_IN_SIN):
2336 builtin_optab = sin_optab; break;
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 builtin_optab = cos_optab; break;
2339 default:
2340 gcc_unreachable ();
2341 }
2342
2343 /* Before working hard, check whether the instruction is available. */
2344 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2345 {
2346 rtx result = gen_reg_rtx (mode);
2347
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
2351 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2352
2353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2354
2355 start_sequence ();
2356
2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
2359 if (builtin_optab == sincos_optab)
2360 {
2361 int ok;
2362
2363 switch (DECL_FUNCTION_CODE (fndecl))
2364 {
2365 CASE_FLT_FN (BUILT_IN_SIN):
2366 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2367 break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2370 break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374 gcc_assert (ok);
2375 }
2376 else
2377 result = expand_unop (mode, builtin_optab, op0, result, 0);
2378
2379 if (result != 0)
2380 {
2381 /* Output the entire sequence. */
2382 insns = get_insns ();
2383 end_sequence ();
2384 emit_insn (insns);
2385 return result;
2386 }
2387
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2391 end_sequence ();
2392 }
2393
2394 return expand_call (exp, target, target == const0_rtx);
2395 }
2396
2397 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
2400
2401 static enum insn_code
2402 interclass_mathfn_icode (tree arg, tree fndecl)
2403 {
2404 bool errno_set = false;
2405 optab builtin_optab = unknown_optab;
2406 machine_mode mode;
2407
2408 switch (DECL_FUNCTION_CODE (fndecl))
2409 {
2410 CASE_FLT_FN (BUILT_IN_ILOGB):
2411 errno_set = true; builtin_optab = ilogb_optab; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF):
2413 builtin_optab = isinf_optab; break;
2414 case BUILT_IN_ISNORMAL:
2415 case BUILT_IN_ISFINITE:
2416 CASE_FLT_FN (BUILT_IN_FINITE):
2417 case BUILT_IN_FINITED32:
2418 case BUILT_IN_FINITED64:
2419 case BUILT_IN_FINITED128:
2420 case BUILT_IN_ISINFD32:
2421 case BUILT_IN_ISINFD64:
2422 case BUILT_IN_ISINFD128:
2423 /* These builtins have no optabs (yet). */
2424 break;
2425 default:
2426 gcc_unreachable ();
2427 }
2428
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math && errno_set)
2431 return CODE_FOR_nothing;
2432
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
2436 if (builtin_optab)
2437 return optab_handler (builtin_optab, mode);
2438 return CODE_FOR_nothing;
2439 }
2440
2441 /* Expand a call to one of the builtin math functions that operate on
2442 floating point argument and output an integer result (ilogb, isinf,
2443 isnan, etc).
2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2447
2448 static rtx
2449 expand_builtin_interclass_mathfn (tree exp, rtx target)
2450 {
2451 enum insn_code icode = CODE_FOR_nothing;
2452 rtx op0;
2453 tree fndecl = get_callee_fndecl (exp);
2454 machine_mode mode;
2455 tree arg;
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 icode = interclass_mathfn_icode (arg, fndecl);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
2464 if (icode != CODE_FOR_nothing)
2465 {
2466 struct expand_operand ops[1];
2467 rtx_insn *last = get_last_insn ();
2468 tree orig_arg = arg;
2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2474
2475 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2476
2477 if (mode != GET_MODE (op0))
2478 op0 = convert_to_mode (mode, op0, 0);
2479
2480 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2481 if (maybe_legitimize_operands (icode, 0, 1, ops)
2482 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2483 return ops[0].value;
2484
2485 delete_insns_since (last);
2486 CALL_EXPR_ARG (exp, 0) = orig_arg;
2487 }
2488
2489 return NULL_RTX;
2490 }
2491
2492 /* Expand a call to the builtin sincos math function.
2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2495 function. */
2496
2497 static rtx
2498 expand_builtin_sincos (tree exp)
2499 {
2500 rtx op0, op1, op2, target1, target2;
2501 machine_mode mode;
2502 tree arg, sinp, cosp;
2503 int result;
2504 location_t loc = EXPR_LOCATION (exp);
2505 tree alias_type, alias_off;
2506
2507 if (!validate_arglist (exp, REAL_TYPE,
2508 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2509 return NULL_RTX;
2510
2511 arg = CALL_EXPR_ARG (exp, 0);
2512 sinp = CALL_EXPR_ARG (exp, 1);
2513 cosp = CALL_EXPR_ARG (exp, 2);
2514
2515 /* Make a suitable register to place result in. */
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2517
2518 /* Check if sincos insn is available, otherwise emit the call. */
2519 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2520 return NULL_RTX;
2521
2522 target1 = gen_reg_rtx (mode);
2523 target2 = gen_reg_rtx (mode);
2524
2525 op0 = expand_normal (arg);
2526 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2527 alias_off = build_int_cst (alias_type, 0);
2528 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 sinp, alias_off));
2530 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2531 cosp, alias_off));
2532
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2536 gcc_assert (result);
2537
2538 /* Move target1 and target2 to the memory locations indicated
2539 by op1 and op2. */
2540 emit_move_insn (op1, target1);
2541 emit_move_insn (op2, target2);
2542
2543 return const0_rtx;
2544 }
2545
2546 /* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
2548 the result should be placed in TARGET. */
2549
2550 static rtx
2551 expand_builtin_cexpi (tree exp, rtx target)
2552 {
2553 tree fndecl = get_callee_fndecl (exp);
2554 tree arg, type;
2555 machine_mode mode;
2556 rtx op0, op1, op2;
2557 location_t loc = EXPR_LOCATION (exp);
2558
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 return NULL_RTX;
2561
2562 arg = CALL_EXPR_ARG (exp, 0);
2563 type = TREE_TYPE (arg);
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2565
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
2569 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2570 {
2571 op1 = gen_reg_rtx (mode);
2572 op2 = gen_reg_rtx (mode);
2573
2574 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2575
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2578 }
2579 else if (targetm.libc_has_function (function_sincos))
2580 {
2581 tree call, fn = NULL_TREE;
2582 tree top1, top2;
2583 rtx op1a, op2a;
2584
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2588 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2590 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2591 else
2592 gcc_unreachable ();
2593
2594 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2595 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op1a = copy_addr_to_reg (XEXP (op1, 0));
2597 op2a = copy_addr_to_reg (XEXP (op2, 0));
2598 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2599 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2600
2601 /* Make sure not to fold the sincos call again. */
2602 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2604 call, 3, arg, top1, top2));
2605 }
2606 else
2607 {
2608 tree call, fn = NULL_TREE, narg;
2609 tree ctype = build_complex_type (type);
2610
2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2612 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2614 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2616 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2617 else
2618 gcc_unreachable ();
2619
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn == NULL_TREE)
2624 {
2625 tree fntype;
2626 const char *name = NULL;
2627
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 name = "cexpf";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 name = "cexp";
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 name = "cexpl";
2634
2635 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2636 fn = build_fn_decl (name, fntype);
2637 }
2638
2639 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2640 build_real (type, dconst0), arg);
2641
2642 /* Make sure not to fold the cexp call again. */
2643 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2644 return expand_expr (build_call_nary (ctype, call, 1, narg),
2645 target, VOIDmode, EXPAND_NORMAL);
2646 }
2647
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2650 make_tree (TREE_TYPE (arg), op2),
2651 make_tree (TREE_TYPE (arg), op1)),
2652 target, VOIDmode, EXPAND_NORMAL);
2653 }
2654
2655 /* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2659
2660 static tree
2661 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2662 {
2663 va_list ap;
2664 tree fntype = TREE_TYPE (fndecl);
2665 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2666
2667 va_start (ap, n);
2668 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2669 va_end (ap);
2670 SET_EXPR_LOCATION (fn, loc);
2671 return fn;
2672 }
2673
2674 /* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
2679 if convenient, the result should be placed in TARGET. */
2680
2681 static rtx
2682 expand_builtin_int_roundingfn (tree exp, rtx target)
2683 {
2684 convert_optab builtin_optab;
2685 rtx op0, tmp;
2686 rtx_insn *insns;
2687 tree fndecl = get_callee_fndecl (exp);
2688 enum built_in_function fallback_fn;
2689 tree fallback_fndecl;
2690 machine_mode mode;
2691 tree arg;
2692
2693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2694 gcc_unreachable ();
2695
2696 arg = CALL_EXPR_ARG (exp, 0);
2697
2698 switch (DECL_FUNCTION_CODE (fndecl))
2699 {
2700 CASE_FLT_FN (BUILT_IN_ICEIL):
2701 CASE_FLT_FN (BUILT_IN_LCEIL):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL):
2703 builtin_optab = lceil_optab;
2704 fallback_fn = BUILT_IN_CEIL;
2705 break;
2706
2707 CASE_FLT_FN (BUILT_IN_IFLOOR):
2708 CASE_FLT_FN (BUILT_IN_LFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2710 builtin_optab = lfloor_optab;
2711 fallback_fn = BUILT_IN_FLOOR;
2712 break;
2713
2714 default:
2715 gcc_unreachable ();
2716 }
2717
2718 /* Make a suitable register to place result in. */
2719 mode = TYPE_MODE (TREE_TYPE (exp));
2720
2721 target = gen_reg_rtx (mode);
2722
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2727
2728 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2729
2730 start_sequence ();
2731
2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target, op0, builtin_optab))
2734 {
2735 /* Output the entire sequence. */
2736 insns = get_insns ();
2737 end_sequence ();
2738 emit_insn (insns);
2739 return target;
2740 }
2741
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2744 end_sequence ();
2745
2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2748
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl == NULL_TREE)
2754 {
2755 tree fntype;
2756 const char *name = NULL;
2757
2758 switch (DECL_FUNCTION_CODE (fndecl))
2759 {
2760 case BUILT_IN_ICEIL:
2761 case BUILT_IN_LCEIL:
2762 case BUILT_IN_LLCEIL:
2763 name = "ceil";
2764 break;
2765 case BUILT_IN_ICEILF:
2766 case BUILT_IN_LCEILF:
2767 case BUILT_IN_LLCEILF:
2768 name = "ceilf";
2769 break;
2770 case BUILT_IN_ICEILL:
2771 case BUILT_IN_LCEILL:
2772 case BUILT_IN_LLCEILL:
2773 name = "ceill";
2774 break;
2775 case BUILT_IN_IFLOOR:
2776 case BUILT_IN_LFLOOR:
2777 case BUILT_IN_LLFLOOR:
2778 name = "floor";
2779 break;
2780 case BUILT_IN_IFLOORF:
2781 case BUILT_IN_LFLOORF:
2782 case BUILT_IN_LLFLOORF:
2783 name = "floorf";
2784 break;
2785 case BUILT_IN_IFLOORL:
2786 case BUILT_IN_LFLOORL:
2787 case BUILT_IN_LLFLOORL:
2788 name = "floorl";
2789 break;
2790 default:
2791 gcc_unreachable ();
2792 }
2793
2794 fntype = build_function_type_list (TREE_TYPE (arg),
2795 TREE_TYPE (arg), NULL_TREE);
2796 fallback_fndecl = build_fn_decl (name, fntype);
2797 }
2798
2799 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2800
2801 tmp = expand_normal (exp);
2802 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2803
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target = gen_reg_rtx (mode);
2807 expand_fix (target, tmp, 0);
2808
2809 return target;
2810 }
2811
2812 /* Expand a call to one of the builtin math functions doing integer
2813 conversion (lrint).
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
2816 function; if convenient, the result should be placed in TARGET. */
2817
2818 static rtx
2819 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2820 {
2821 convert_optab builtin_optab;
2822 rtx op0;
2823 rtx_insn *insns;
2824 tree fndecl = get_callee_fndecl (exp);
2825 tree arg;
2826 machine_mode mode;
2827 enum built_in_function fallback_fn = BUILT_IN_NONE;
2828
2829 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2830 gcc_unreachable ();
2831
2832 arg = CALL_EXPR_ARG (exp, 0);
2833
2834 switch (DECL_FUNCTION_CODE (fndecl))
2835 {
2836 CASE_FLT_FN (BUILT_IN_IRINT):
2837 fallback_fn = BUILT_IN_LRINT;
2838 /* FALLTHRU */
2839 CASE_FLT_FN (BUILT_IN_LRINT):
2840 CASE_FLT_FN (BUILT_IN_LLRINT):
2841 builtin_optab = lrint_optab;
2842 break;
2843
2844 CASE_FLT_FN (BUILT_IN_IROUND):
2845 fallback_fn = BUILT_IN_LROUND;
2846 /* FALLTHRU */
2847 CASE_FLT_FN (BUILT_IN_LROUND):
2848 CASE_FLT_FN (BUILT_IN_LLROUND):
2849 builtin_optab = lround_optab;
2850 break;
2851
2852 default:
2853 gcc_unreachable ();
2854 }
2855
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2858 return NULL_RTX;
2859
2860 /* Make a suitable register to place result in. */
2861 mode = TYPE_MODE (TREE_TYPE (exp));
2862
2863 /* There's no easy way to detect the case we need to set EDOM. */
2864 if (!flag_errno_math)
2865 {
2866 rtx result = gen_reg_rtx (mode);
2867
2868 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2869 need to expand the argument again. This way, we will not perform
2870 side-effects more the once. */
2871 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2872
2873 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2874
2875 start_sequence ();
2876
2877 if (expand_sfix_optab (result, op0, builtin_optab))
2878 {
2879 /* Output the entire sequence. */
2880 insns = get_insns ();
2881 end_sequence ();
2882 emit_insn (insns);
2883 return result;
2884 }
2885
2886 /* If we were unable to expand via the builtin, stop the sequence
2887 (without outputting the insns) and call to the library function
2888 with the stabilized argument list. */
2889 end_sequence ();
2890 }
2891
2892 if (fallback_fn != BUILT_IN_NONE)
2893 {
2894 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2895 targets, (int) round (x) should never be transformed into
2896 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2897 a call to lround in the hope that the target provides at least some
2898 C99 functions. This should result in the best user experience for
2899 not full C99 targets. */
2900 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2901 fallback_fn, 0);
2902
2903 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2904 fallback_fndecl, 1, arg);
2905
2906 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2907 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2908 return convert_to_mode (mode, target, 0);
2909 }
2910
2911 return expand_call (exp, target, target == const0_rtx);
2912 }
2913
2914 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2915 a normal call should be emitted rather than expanding the function
2916 in-line. EXP is the expression that is a call to the builtin
2917 function; if convenient, the result should be placed in TARGET. */
2918
2919 static rtx
2920 expand_builtin_powi (tree exp, rtx target)
2921 {
2922 tree arg0, arg1;
2923 rtx op0, op1;
2924 machine_mode mode;
2925 machine_mode mode2;
2926
2927 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2928 return NULL_RTX;
2929
2930 arg0 = CALL_EXPR_ARG (exp, 0);
2931 arg1 = CALL_EXPR_ARG (exp, 1);
2932 mode = TYPE_MODE (TREE_TYPE (exp));
2933
2934 /* Emit a libcall to libgcc. */
2935
2936 /* Mode of the 2nd argument must match that of an int. */
2937 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2938
2939 if (target == NULL_RTX)
2940 target = gen_reg_rtx (mode);
2941
2942 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2943 if (GET_MODE (op0) != mode)
2944 op0 = convert_to_mode (mode, op0, 0);
2945 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2946 if (GET_MODE (op1) != mode2)
2947 op1 = convert_to_mode (mode2, op1, 0);
2948
2949 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2950 target, LCT_CONST, mode, 2,
2951 op0, mode, op1, mode2);
2952
2953 return target;
2954 }
2955
2956 /* Expand expression EXP which is a call to the strlen builtin. Return
2957 NULL_RTX if we failed the caller should emit a normal call, otherwise
2958 try to get the result in TARGET, if convenient. */
2959
2960 static rtx
2961 expand_builtin_strlen (tree exp, rtx target,
2962 machine_mode target_mode)
2963 {
2964 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
2966 else
2967 {
2968 struct expand_operand ops[4];
2969 rtx pat;
2970 tree len;
2971 tree src = CALL_EXPR_ARG (exp, 0);
2972 rtx src_reg;
2973 rtx_insn *before_strlen;
2974 machine_mode insn_mode = target_mode;
2975 enum insn_code icode = CODE_FOR_nothing;
2976 unsigned int align;
2977
2978 /* If the length can be computed at compile-time, return it. */
2979 len = c_strlen (src, 0);
2980 if (len)
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2982
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len = c_strlen (src, 1);
2989 if (len && TREE_CODE (len) == INTEGER_CST)
2990 {
2991 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2993 }
2994
2995 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2996
2997 /* If SRC is not a pointer type, don't do this operation inline. */
2998 if (align == 0)
2999 return NULL_RTX;
3000
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 while (insn_mode != VOIDmode)
3003 {
3004 icode = optab_handler (strlen_optab, insn_mode);
3005 if (icode != CODE_FOR_nothing)
3006 break;
3007
3008 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3009 }
3010 if (insn_mode == VOIDmode)
3011 return NULL_RTX;
3012
3013 /* Make a place to hold the source address. We will not expand
3014 the actual source until we are sure that the expansion will
3015 not fail -- there are trees that cannot be expanded twice. */
3016 src_reg = gen_reg_rtx (Pmode);
3017
3018 /* Mark the beginning of the strlen sequence so we can emit the
3019 source operand later. */
3020 before_strlen = get_last_insn ();
3021
3022 create_output_operand (&ops[0], target, insn_mode);
3023 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3024 create_integer_operand (&ops[2], 0);
3025 create_integer_operand (&ops[3], align);
3026 if (!maybe_expand_insn (icode, 4, ops))
3027 return NULL_RTX;
3028
3029 /* Now that we are assured of success, expand the source. */
3030 start_sequence ();
3031 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3032 if (pat != src_reg)
3033 {
3034 #ifdef POINTERS_EXTEND_UNSIGNED
3035 if (GET_MODE (pat) != Pmode)
3036 pat = convert_to_mode (Pmode, pat,
3037 POINTERS_EXTEND_UNSIGNED);
3038 #endif
3039 emit_move_insn (src_reg, pat);
3040 }
3041 pat = get_insns ();
3042 end_sequence ();
3043
3044 if (before_strlen)
3045 emit_insn_after (pat, before_strlen);
3046 else
3047 emit_insn_before (pat, get_insns ());
3048
3049 /* Return the value in the proper mode for this function. */
3050 if (GET_MODE (ops[0].value) == target_mode)
3051 target = ops[0].value;
3052 else if (target != 0)
3053 convert_move (target, ops[0].value, 0);
3054 else
3055 target = convert_to_mode (target_mode, ops[0].value, 0);
3056
3057 return target;
3058 }
3059 }
3060
3061 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3062 bytes from constant string DATA + OFFSET and return it as target
3063 constant. */
3064
3065 static rtx
3066 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3067 machine_mode mode)
3068 {
3069 const char *str = (const char *) data;
3070
3071 gcc_assert (offset >= 0
3072 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3073 <= strlen (str) + 1));
3074
3075 return c_readstr (str + offset, mode);
3076 }
3077
3078 /* LEN specify length of the block of memcpy/memset operation.
3079 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3080 In some cases we can make very likely guess on max size, then we
3081 set it into PROBABLE_MAX_SIZE. */
3082
3083 static void
3084 determine_block_size (tree len, rtx len_rtx,
3085 unsigned HOST_WIDE_INT *min_size,
3086 unsigned HOST_WIDE_INT *max_size,
3087 unsigned HOST_WIDE_INT *probable_max_size)
3088 {
3089 if (CONST_INT_P (len_rtx))
3090 {
3091 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3092 return;
3093 }
3094 else
3095 {
3096 wide_int min, max;
3097 enum value_range_type range_type = VR_UNDEFINED;
3098
3099 /* Determine bounds from the type. */
3100 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3101 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3102 else
3103 *min_size = 0;
3104 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3105 *probable_max_size = *max_size
3106 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3107 else
3108 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3109
3110 if (TREE_CODE (len) == SSA_NAME)
3111 range_type = get_range_info (len, &min, &max);
3112 if (range_type == VR_RANGE)
3113 {
3114 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3115 *min_size = min.to_uhwi ();
3116 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3117 *probable_max_size = *max_size = max.to_uhwi ();
3118 }
3119 else if (range_type == VR_ANTI_RANGE)
3120 {
3121 /* Anti range 0...N lets us to determine minimal size to N+1. */
3122 if (min == 0)
3123 {
3124 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3125 *min_size = max.to_uhwi () + 1;
3126 }
3127 /* Code like
3128
3129 int n;
3130 if (n < 100)
3131 memcpy (a, b, n)
3132
3133 Produce anti range allowing negative values of N. We still
3134 can use the information and make a guess that N is not negative.
3135 */
3136 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3137 *probable_max_size = min.to_uhwi () - 1;
3138 }
3139 }
3140 gcc_checking_assert (*max_size <=
3141 (unsigned HOST_WIDE_INT)
3142 GET_MODE_MASK (GET_MODE (len_rtx)));
3143 }
3144
3145 /* Helper function to do the actual work for expand_builtin_memcpy. */
3146
3147 static rtx
3148 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3149 {
3150 const char *src_str;
3151 unsigned int src_align = get_pointer_alignment (src);
3152 unsigned int dest_align = get_pointer_alignment (dest);
3153 rtx dest_mem, src_mem, dest_addr, len_rtx;
3154 HOST_WIDE_INT expected_size = -1;
3155 unsigned int expected_align = 0;
3156 unsigned HOST_WIDE_INT min_size;
3157 unsigned HOST_WIDE_INT max_size;
3158 unsigned HOST_WIDE_INT probable_max_size;
3159
3160 /* If DEST is not a pointer type, call the normal function. */
3161 if (dest_align == 0)
3162 return NULL_RTX;
3163
3164 /* If either SRC is not a pointer type, don't do this
3165 operation in-line. */
3166 if (src_align == 0)
3167 return NULL_RTX;
3168
3169 if (currently_expanding_gimple_stmt)
3170 stringop_block_profile (currently_expanding_gimple_stmt,
3171 &expected_align, &expected_size);
3172
3173 if (expected_align < dest_align)
3174 expected_align = dest_align;
3175 dest_mem = get_memory_rtx (dest, len);
3176 set_mem_align (dest_mem, dest_align);
3177 len_rtx = expand_normal (len);
3178 determine_block_size (len, len_rtx, &min_size, &max_size,
3179 &probable_max_size);
3180 src_str = c_getstr (src);
3181
3182 /* If SRC is a string constant and block move would be done
3183 by pieces, we can avoid loading the string from memory
3184 and only stored the computed constants. */
3185 if (src_str
3186 && CONST_INT_P (len_rtx)
3187 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3188 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3189 CONST_CAST (char *, src_str),
3190 dest_align, false))
3191 {
3192 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3193 builtin_memcpy_read_str,
3194 CONST_CAST (char *, src_str),
3195 dest_align, false, 0);
3196 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3197 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3198 return dest_mem;
3199 }
3200
3201 src_mem = get_memory_rtx (src, len);
3202 set_mem_align (src_mem, src_align);
3203
3204 /* Copy word part most expediently. */
3205 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3206 CALL_EXPR_TAILCALL (exp)
3207 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3208 expected_align, expected_size,
3209 min_size, max_size, probable_max_size);
3210
3211 if (dest_addr == 0)
3212 {
3213 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3214 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3215 }
3216
3217 return dest_addr;
3218 }
3219
3220 /* Expand a call EXP to the memcpy builtin.
3221 Return NULL_RTX if we failed, the caller should emit a normal call,
3222 otherwise try to get the result in TARGET, if convenient (and in
3223 mode MODE if that's convenient). */
3224
3225 static rtx
3226 expand_builtin_memcpy (tree exp, rtx target)
3227 {
3228 if (!validate_arglist (exp,
3229 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3230 return NULL_RTX;
3231 else
3232 {
3233 tree dest = CALL_EXPR_ARG (exp, 0);
3234 tree src = CALL_EXPR_ARG (exp, 1);
3235 tree len = CALL_EXPR_ARG (exp, 2);
3236 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3237 }
3238 }
3239
3240 /* Expand an instrumented call EXP to the memcpy builtin.
3241 Return NULL_RTX if we failed, the caller should emit a normal call,
3242 otherwise try to get the result in TARGET, if convenient (and in
3243 mode MODE if that's convenient). */
3244
3245 static rtx
3246 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3247 {
3248 if (!validate_arglist (exp,
3249 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3250 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3251 INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
3253 else
3254 {
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 2);
3257 tree len = CALL_EXPR_ARG (exp, 4);
3258 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3259
3260 /* Return src bounds with the result. */
3261 if (res)
3262 {
3263 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3264 expand_normal (CALL_EXPR_ARG (exp, 1)));
3265 res = chkp_join_splitted_slot (res, bnd);
3266 }
3267 return res;
3268 }
3269 }
3270
3271 /* Expand a call EXP to the mempcpy builtin.
3272 Return NULL_RTX if we failed; the caller should emit a normal call,
3273 otherwise try to get the result in TARGET, if convenient (and in
3274 mode MODE if that's convenient). If ENDP is 0 return the
3275 destination pointer, if ENDP is 1 return the end pointer ala
3276 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3277 stpcpy. */
3278
3279 static rtx
3280 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3281 {
3282 if (!validate_arglist (exp,
3283 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3284 return NULL_RTX;
3285 else
3286 {
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 1);
3289 tree len = CALL_EXPR_ARG (exp, 2);
3290 return expand_builtin_mempcpy_args (dest, src, len,
3291 target, mode, /*endp=*/ 1,
3292 exp);
3293 }
3294 }
3295
3296 /* Expand an instrumented call EXP to the mempcpy builtin.
3297 Return NULL_RTX if we failed, the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). */
3300
3301 static rtx
3302 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3303 {
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3306 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3307 INTEGER_TYPE, VOID_TYPE))
3308 return NULL_RTX;
3309 else
3310 {
3311 tree dest = CALL_EXPR_ARG (exp, 0);
3312 tree src = CALL_EXPR_ARG (exp, 2);
3313 tree len = CALL_EXPR_ARG (exp, 4);
3314 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3315 mode, 1, exp);
3316
3317 /* Return src bounds with the result. */
3318 if (res)
3319 {
3320 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3321 expand_normal (CALL_EXPR_ARG (exp, 1)));
3322 res = chkp_join_splitted_slot (res, bnd);
3323 }
3324 return res;
3325 }
3326 }
3327
3328 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3329 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3330 so that this can also be called without constructing an actual CALL_EXPR.
3331 The other arguments and return value are the same as for
3332 expand_builtin_mempcpy. */
3333
3334 static rtx
3335 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3336 rtx target, machine_mode mode, int endp,
3337 tree orig_exp)
3338 {
3339 tree fndecl = get_callee_fndecl (orig_exp);
3340
3341 /* If return value is ignored, transform mempcpy into memcpy. */
3342 if (target == const0_rtx
3343 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3344 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3345 {
3346 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3347 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3348 dest, src, len);
3349 return expand_expr (result, target, mode, EXPAND_NORMAL);
3350 }
3351 else if (target == const0_rtx
3352 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3353 {
3354 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3355 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3356 dest, src, len);
3357 return expand_expr (result, target, mode, EXPAND_NORMAL);
3358 }
3359 else
3360 {
3361 const char *src_str;
3362 unsigned int src_align = get_pointer_alignment (src);
3363 unsigned int dest_align = get_pointer_alignment (dest);
3364 rtx dest_mem, src_mem, len_rtx;
3365
3366 /* If either SRC or DEST is not a pointer type, don't do this
3367 operation in-line. */
3368 if (dest_align == 0 || src_align == 0)
3369 return NULL_RTX;
3370
3371 /* If LEN is not constant, call the normal function. */
3372 if (! tree_fits_uhwi_p (len))
3373 return NULL_RTX;
3374
3375 len_rtx = expand_normal (len);
3376 src_str = c_getstr (src);
3377
3378 /* If SRC is a string constant and block move would be done
3379 by pieces, we can avoid loading the string from memory
3380 and only stored the computed constants. */
3381 if (src_str
3382 && CONST_INT_P (len_rtx)
3383 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3384 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3385 CONST_CAST (char *, src_str),
3386 dest_align, false))
3387 {
3388 dest_mem = get_memory_rtx (dest, len);
3389 set_mem_align (dest_mem, dest_align);
3390 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3391 builtin_memcpy_read_str,
3392 CONST_CAST (char *, src_str),
3393 dest_align, false, endp);
3394 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3395 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3396 return dest_mem;
3397 }
3398
3399 if (CONST_INT_P (len_rtx)
3400 && can_move_by_pieces (INTVAL (len_rtx),
3401 MIN (dest_align, src_align)))
3402 {
3403 dest_mem = get_memory_rtx (dest, len);
3404 set_mem_align (dest_mem, dest_align);
3405 src_mem = get_memory_rtx (src, len);
3406 set_mem_align (src_mem, src_align);
3407 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3408 MIN (dest_align, src_align), endp);
3409 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3410 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3411 return dest_mem;
3412 }
3413
3414 return NULL_RTX;
3415 }
3416 }
3417
3418 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3419 we failed, the caller should emit a normal call, otherwise try to
3420 get the result in TARGET, if convenient. If ENDP is 0 return the
3421 destination pointer, if ENDP is 1 return the end pointer ala
3422 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3423 stpcpy. */
3424
3425 static rtx
3426 expand_movstr (tree dest, tree src, rtx target, int endp)
3427 {
3428 struct expand_operand ops[3];
3429 rtx dest_mem;
3430 rtx src_mem;
3431
3432 if (!targetm.have_movstr ())
3433 return NULL_RTX;
3434
3435 dest_mem = get_memory_rtx (dest, NULL);
3436 src_mem = get_memory_rtx (src, NULL);
3437 if (!endp)
3438 {
3439 target = force_reg (Pmode, XEXP (dest_mem, 0));
3440 dest_mem = replace_equiv_address (dest_mem, target);
3441 }
3442
3443 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3444 create_fixed_operand (&ops[1], dest_mem);
3445 create_fixed_operand (&ops[2], src_mem);
3446 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3447 return NULL_RTX;
3448
3449 if (endp && target != const0_rtx)
3450 {
3451 target = ops[0].value;
3452 /* movstr is supposed to set end to the address of the NUL
3453 terminator. If the caller requested a mempcpy-like return value,
3454 adjust it. */
3455 if (endp == 1)
3456 {
3457 rtx tem = plus_constant (GET_MODE (target),
3458 gen_lowpart (GET_MODE (target), target), 1);
3459 emit_move_insn (target, force_operand (tem, NULL_RTX));
3460 }
3461 }
3462 return target;
3463 }
3464
3465 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3466 NULL_RTX if we failed the caller should emit a normal call, otherwise
3467 try to get the result in TARGET, if convenient (and in mode MODE if that's
3468 convenient). */
3469
3470 static rtx
3471 expand_builtin_strcpy (tree exp, rtx target)
3472 {
3473 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3474 {
3475 tree dest = CALL_EXPR_ARG (exp, 0);
3476 tree src = CALL_EXPR_ARG (exp, 1);
3477 return expand_builtin_strcpy_args (dest, src, target);
3478 }
3479 return NULL_RTX;
3480 }
3481
3482 /* Helper function to do the actual work for expand_builtin_strcpy. The
3483 arguments to the builtin_strcpy call DEST and SRC are broken out
3484 so that this can also be called without constructing an actual CALL_EXPR.
3485 The other arguments and return value are the same as for
3486 expand_builtin_strcpy. */
3487
3488 static rtx
3489 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3490 {
3491 return expand_movstr (dest, src, target, /*endp=*/0);
3492 }
3493
3494 /* Expand a call EXP to the stpcpy builtin.
3495 Return NULL_RTX if we failed the caller should emit a normal call,
3496 otherwise try to get the result in TARGET, if convenient (and in
3497 mode MODE if that's convenient). */
3498
3499 static rtx
3500 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3501 {
3502 tree dst, src;
3503 location_t loc = EXPR_LOCATION (exp);
3504
3505 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3506 return NULL_RTX;
3507
3508 dst = CALL_EXPR_ARG (exp, 0);
3509 src = CALL_EXPR_ARG (exp, 1);
3510
3511 /* If return value is ignored, transform stpcpy into strcpy. */
3512 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3513 {
3514 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3515 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3516 return expand_expr (result, target, mode, EXPAND_NORMAL);
3517 }
3518 else
3519 {
3520 tree len, lenp1;
3521 rtx ret;
3522
3523 /* Ensure we get an actual string whose length can be evaluated at
3524 compile-time, not an expression containing a string. This is
3525 because the latter will potentially produce pessimized code
3526 when used to produce the return value. */
3527 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3528 return expand_movstr (dst, src, target, /*endp=*/2);
3529
3530 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3531 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3532 target, mode, /*endp=*/2,
3533 exp);
3534
3535 if (ret)
3536 return ret;
3537
3538 if (TREE_CODE (len) == INTEGER_CST)
3539 {
3540 rtx len_rtx = expand_normal (len);
3541
3542 if (CONST_INT_P (len_rtx))
3543 {
3544 ret = expand_builtin_strcpy_args (dst, src, target);
3545
3546 if (ret)
3547 {
3548 if (! target)
3549 {
3550 if (mode != VOIDmode)
3551 target = gen_reg_rtx (mode);
3552 else
3553 target = gen_reg_rtx (GET_MODE (ret));
3554 }
3555 if (GET_MODE (target) != GET_MODE (ret))
3556 ret = gen_lowpart (GET_MODE (target), ret);
3557
3558 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3559 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3560 gcc_assert (ret);
3561
3562 return target;
3563 }
3564 }
3565 }
3566
3567 return expand_movstr (dst, src, target, /*endp=*/2);
3568 }
3569 }
3570
3571 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3572 bytes from constant string DATA + OFFSET and return it as target
3573 constant. */
3574
3575 rtx
3576 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3577 machine_mode mode)
3578 {
3579 const char *str = (const char *) data;
3580
3581 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3582 return const0_rtx;
3583
3584 return c_readstr (str + offset, mode);
3585 }
3586
3587 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3588 NULL_RTX if we failed the caller should emit a normal call. */
3589
3590 static rtx
3591 expand_builtin_strncpy (tree exp, rtx target)
3592 {
3593 location_t loc = EXPR_LOCATION (exp);
3594
3595 if (validate_arglist (exp,
3596 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3597 {
3598 tree dest = CALL_EXPR_ARG (exp, 0);
3599 tree src = CALL_EXPR_ARG (exp, 1);
3600 tree len = CALL_EXPR_ARG (exp, 2);
3601 tree slen = c_strlen (src, 1);
3602
3603 /* We must be passed a constant len and src parameter. */
3604 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3605 return NULL_RTX;
3606
3607 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3608
3609 /* We're required to pad with trailing zeros if the requested
3610 len is greater than strlen(s2)+1. In that case try to
3611 use store_by_pieces, if it fails, punt. */
3612 if (tree_int_cst_lt (slen, len))
3613 {
3614 unsigned int dest_align = get_pointer_alignment (dest);
3615 const char *p = c_getstr (src);
3616 rtx dest_mem;
3617
3618 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3619 || !can_store_by_pieces (tree_to_uhwi (len),
3620 builtin_strncpy_read_str,
3621 CONST_CAST (char *, p),
3622 dest_align, false))
3623 return NULL_RTX;
3624
3625 dest_mem = get_memory_rtx (dest, len);
3626 store_by_pieces (dest_mem, tree_to_uhwi (len),
3627 builtin_strncpy_read_str,
3628 CONST_CAST (char *, p), dest_align, false, 0);
3629 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3630 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3631 return dest_mem;
3632 }
3633 }
3634 return NULL_RTX;
3635 }
3636
3637 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3638 bytes from constant string DATA + OFFSET and return it as target
3639 constant. */
3640
3641 rtx
3642 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3643 machine_mode mode)
3644 {
3645 const char *c = (const char *) data;
3646 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3647
3648 memset (p, *c, GET_MODE_SIZE (mode));
3649
3650 return c_readstr (p, mode);
3651 }
3652
3653 /* Callback routine for store_by_pieces. Return the RTL of a register
3654 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3655 char value given in the RTL register data. For example, if mode is
3656 4 bytes wide, return the RTL for 0x01010101*data. */
3657
3658 static rtx
3659 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3660 machine_mode mode)
3661 {
3662 rtx target, coeff;
3663 size_t size;
3664 char *p;
3665
3666 size = GET_MODE_SIZE (mode);
3667 if (size == 1)
3668 return (rtx) data;
3669
3670 p = XALLOCAVEC (char, size);
3671 memset (p, 1, size);
3672 coeff = c_readstr (p, mode);
3673
3674 target = convert_to_mode (mode, (rtx) data, 1);
3675 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3676 return force_reg (mode, target);
3677 }
3678
3679 /* Expand expression EXP, which is a call to the memset builtin. Return
3680 NULL_RTX if we failed the caller should emit a normal call, otherwise
3681 try to get the result in TARGET, if convenient (and in mode MODE if that's
3682 convenient). */
3683
3684 static rtx
3685 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3686 {
3687 if (!validate_arglist (exp,
3688 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3689 return NULL_RTX;
3690 else
3691 {
3692 tree dest = CALL_EXPR_ARG (exp, 0);
3693 tree val = CALL_EXPR_ARG (exp, 1);
3694 tree len = CALL_EXPR_ARG (exp, 2);
3695 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3696 }
3697 }
3698
3699 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3700 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3702 convenient). */
3703
3704 static rtx
3705 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3706 {
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3709 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3710 return NULL_RTX;
3711 else
3712 {
3713 tree dest = CALL_EXPR_ARG (exp, 0);
3714 tree val = CALL_EXPR_ARG (exp, 2);
3715 tree len = CALL_EXPR_ARG (exp, 3);
3716 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3717
3718 /* Return src bounds with the result. */
3719 if (res)
3720 {
3721 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3722 expand_normal (CALL_EXPR_ARG (exp, 1)));
3723 res = chkp_join_splitted_slot (res, bnd);
3724 }
3725 return res;
3726 }
3727 }
3728
3729 /* Helper function to do the actual work for expand_builtin_memset. The
3730 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3731 so that this can also be called without constructing an actual CALL_EXPR.
3732 The other arguments and return value are the same as for
3733 expand_builtin_memset. */
3734
3735 static rtx
3736 expand_builtin_memset_args (tree dest, tree val, tree len,
3737 rtx target, machine_mode mode, tree orig_exp)
3738 {
3739 tree fndecl, fn;
3740 enum built_in_function fcode;
3741 machine_mode val_mode;
3742 char c;
3743 unsigned int dest_align;
3744 rtx dest_mem, dest_addr, len_rtx;
3745 HOST_WIDE_INT expected_size = -1;
3746 unsigned int expected_align = 0;
3747 unsigned HOST_WIDE_INT min_size;
3748 unsigned HOST_WIDE_INT max_size;
3749 unsigned HOST_WIDE_INT probable_max_size;
3750
3751 dest_align = get_pointer_alignment (dest);
3752
3753 /* If DEST is not a pointer type, don't do this operation in-line. */
3754 if (dest_align == 0)
3755 return NULL_RTX;
3756
3757 if (currently_expanding_gimple_stmt)
3758 stringop_block_profile (currently_expanding_gimple_stmt,
3759 &expected_align, &expected_size);
3760
3761 if (expected_align < dest_align)
3762 expected_align = dest_align;
3763
3764 /* If the LEN parameter is zero, return DEST. */
3765 if (integer_zerop (len))
3766 {
3767 /* Evaluate and ignore VAL in case it has side-effects. */
3768 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3769 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3770 }
3771
3772 /* Stabilize the arguments in case we fail. */
3773 dest = builtin_save_expr (dest);
3774 val = builtin_save_expr (val);
3775 len = builtin_save_expr (len);
3776
3777 len_rtx = expand_normal (len);
3778 determine_block_size (len, len_rtx, &min_size, &max_size,
3779 &probable_max_size);
3780 dest_mem = get_memory_rtx (dest, len);
3781 val_mode = TYPE_MODE (unsigned_char_type_node);
3782
3783 if (TREE_CODE (val) != INTEGER_CST)
3784 {
3785 rtx val_rtx;
3786
3787 val_rtx = expand_normal (val);
3788 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3789
3790 /* Assume that we can memset by pieces if we can store
3791 * the coefficients by pieces (in the required modes).
3792 * We can't pass builtin_memset_gen_str as that emits RTL. */
3793 c = 1;
3794 if (tree_fits_uhwi_p (len)
3795 && can_store_by_pieces (tree_to_uhwi (len),
3796 builtin_memset_read_str, &c, dest_align,
3797 true))
3798 {
3799 val_rtx = force_reg (val_mode, val_rtx);
3800 store_by_pieces (dest_mem, tree_to_uhwi (len),
3801 builtin_memset_gen_str, val_rtx, dest_align,
3802 true, 0);
3803 }
3804 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3805 dest_align, expected_align,
3806 expected_size, min_size, max_size,
3807 probable_max_size))
3808 goto do_libcall;
3809
3810 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3811 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3812 return dest_mem;
3813 }
3814
3815 if (target_char_cast (val, &c))
3816 goto do_libcall;
3817
3818 if (c)
3819 {
3820 if (tree_fits_uhwi_p (len)
3821 && can_store_by_pieces (tree_to_uhwi (len),
3822 builtin_memset_read_str, &c, dest_align,
3823 true))
3824 store_by_pieces (dest_mem, tree_to_uhwi (len),
3825 builtin_memset_read_str, &c, dest_align, true, 0);
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3827 gen_int_mode (c, val_mode),
3828 dest_align, expected_align,
3829 expected_size, min_size, max_size,
3830 probable_max_size))
3831 goto do_libcall;
3832
3833 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3834 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3835 return dest_mem;
3836 }
3837
3838 set_mem_align (dest_mem, dest_align);
3839 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3840 CALL_EXPR_TAILCALL (orig_exp)
3841 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3842 expected_align, expected_size,
3843 min_size, max_size,
3844 probable_max_size);
3845
3846 if (dest_addr == 0)
3847 {
3848 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3849 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3850 }
3851
3852 return dest_addr;
3853
3854 do_libcall:
3855 fndecl = get_callee_fndecl (orig_exp);
3856 fcode = DECL_FUNCTION_CODE (fndecl);
3857 if (fcode == BUILT_IN_MEMSET
3858 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3859 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3860 dest, val, len);
3861 else if (fcode == BUILT_IN_BZERO)
3862 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3863 dest, len);
3864 else
3865 gcc_unreachable ();
3866 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3867 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3868 return expand_call (fn, target, target == const0_rtx);
3869 }
3870
3871 /* Expand expression EXP, which is a call to the bzero builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call. */
3873
3874 static rtx
3875 expand_builtin_bzero (tree exp)
3876 {
3877 tree dest, size;
3878 location_t loc = EXPR_LOCATION (exp);
3879
3880 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3881 return NULL_RTX;
3882
3883 dest = CALL_EXPR_ARG (exp, 0);
3884 size = CALL_EXPR_ARG (exp, 1);
3885
3886 /* New argument list transforming bzero(ptr x, int y) to
3887 memset(ptr x, int 0, size_t y). This is done this way
3888 so that if it isn't expanded inline, we fallback to
3889 calling bzero instead of memset. */
3890
3891 return expand_builtin_memset_args (dest, integer_zero_node,
3892 fold_convert_loc (loc,
3893 size_type_node, size),
3894 const0_rtx, VOIDmode, exp);
3895 }
3896
3897 /* Try to expand cmpstr operation ICODE with the given operands.
3898 Return the result rtx on success, otherwise return null. */
3899
3900 static rtx
3901 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3902 HOST_WIDE_INT align)
3903 {
3904 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3905
3906 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3907 target = NULL_RTX;
3908
3909 struct expand_operand ops[4];
3910 create_output_operand (&ops[0], target, insn_mode);
3911 create_fixed_operand (&ops[1], arg1_rtx);
3912 create_fixed_operand (&ops[2], arg2_rtx);
3913 create_integer_operand (&ops[3], align);
3914 if (maybe_expand_insn (icode, 4, ops))
3915 return ops[0].value;
3916 return NULL_RTX;
3917 }
3918
3919 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3920 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3921 otherwise return null. */
3922
3923 static rtx
3924 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3925 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3926 HOST_WIDE_INT align)
3927 {
3928 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3929
3930 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3931 target = NULL_RTX;
3932
3933 struct expand_operand ops[5];
3934 create_output_operand (&ops[0], target, insn_mode);
3935 create_fixed_operand (&ops[1], arg1_rtx);
3936 create_fixed_operand (&ops[2], arg2_rtx);
3937 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3938 TYPE_UNSIGNED (arg3_type));
3939 create_integer_operand (&ops[4], align);
3940 if (maybe_expand_insn (icode, 5, ops))
3941 return ops[0].value;
3942 return NULL_RTX;
3943 }
3944
3945 /* Expand expression EXP, which is a call to the memcmp built-in function.
3946 Return NULL_RTX if we failed and the caller should emit a normal call,
3947 otherwise try to get the result in TARGET, if convenient. */
3948
3949 static rtx
3950 expand_builtin_memcmp (tree exp, rtx target)
3951 {
3952 if (!validate_arglist (exp,
3953 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3954 return NULL_RTX;
3955
3956 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3957 implementing memcmp because it will stop if it encounters two
3958 zero bytes. */
3959 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3960 if (icode == CODE_FOR_nothing)
3961 return NULL_RTX;
3962
3963 tree arg1 = CALL_EXPR_ARG (exp, 0);
3964 tree arg2 = CALL_EXPR_ARG (exp, 1);
3965 tree len = CALL_EXPR_ARG (exp, 2);
3966
3967 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3968 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3969
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align == 0 || arg2_align == 0)
3972 return NULL_RTX;
3973
3974 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3975 location_t loc = EXPR_LOCATION (exp);
3976 rtx arg1_rtx = get_memory_rtx (arg1, len);
3977 rtx arg2_rtx = get_memory_rtx (arg2, len);
3978 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3979
3980 /* Set MEM_SIZE as appropriate. */
3981 if (CONST_INT_P (arg3_rtx))
3982 {
3983 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3984 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3985 }
3986
3987 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3988 TREE_TYPE (len), arg3_rtx,
3989 MIN (arg1_align, arg2_align));
3990 if (result)
3991 {
3992 /* Return the value in the proper mode for this function. */
3993 if (GET_MODE (result) == mode)
3994 return result;
3995
3996 if (target != 0)
3997 {
3998 convert_move (target, result, 0);
3999 return target;
4000 }
4001
4002 return convert_to_mode (mode, result, 0);
4003 }
4004
4005 result = target;
4006 if (! (result != 0
4007 && REG_P (result) && GET_MODE (result) == mode
4008 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4009 result = gen_reg_rtx (mode);
4010
4011 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4012 TYPE_MODE (integer_type_node), 3,
4013 XEXP (arg1_rtx, 0), Pmode,
4014 XEXP (arg2_rtx, 0), Pmode,
4015 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4016 TYPE_UNSIGNED (sizetype)),
4017 TYPE_MODE (sizetype));
4018 return result;
4019 }
4020
4021 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4022 if we failed the caller should emit a normal call, otherwise try to get
4023 the result in TARGET, if convenient. */
4024
4025 static rtx
4026 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4027 {
4028 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4029 return NULL_RTX;
4030
4031 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4032 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4033 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4034 {
4035 rtx arg1_rtx, arg2_rtx;
4036 tree fndecl, fn;
4037 tree arg1 = CALL_EXPR_ARG (exp, 0);
4038 tree arg2 = CALL_EXPR_ARG (exp, 1);
4039 rtx result = NULL_RTX;
4040
4041 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4042 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4043
4044 /* If we don't have POINTER_TYPE, call the function. */
4045 if (arg1_align == 0 || arg2_align == 0)
4046 return NULL_RTX;
4047
4048 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4049 arg1 = builtin_save_expr (arg1);
4050 arg2 = builtin_save_expr (arg2);
4051
4052 arg1_rtx = get_memory_rtx (arg1, NULL);
4053 arg2_rtx = get_memory_rtx (arg2, NULL);
4054
4055 /* Try to call cmpstrsi. */
4056 if (cmpstr_icode != CODE_FOR_nothing)
4057 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4058 MIN (arg1_align, arg2_align));
4059
4060 /* Try to determine at least one length and call cmpstrnsi. */
4061 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4062 {
4063 tree len;
4064 rtx arg3_rtx;
4065
4066 tree len1 = c_strlen (arg1, 1);
4067 tree len2 = c_strlen (arg2, 1);
4068
4069 if (len1)
4070 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4071 if (len2)
4072 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4073
4074 /* If we don't have a constant length for the first, use the length
4075 of the second, if we know it. We don't require a constant for
4076 this case; some cost analysis could be done if both are available
4077 but neither is constant. For now, assume they're equally cheap,
4078 unless one has side effects. If both strings have constant lengths,
4079 use the smaller. */
4080
4081 if (!len1)
4082 len = len2;
4083 else if (!len2)
4084 len = len1;
4085 else if (TREE_SIDE_EFFECTS (len1))
4086 len = len2;
4087 else if (TREE_SIDE_EFFECTS (len2))
4088 len = len1;
4089 else if (TREE_CODE (len1) != INTEGER_CST)
4090 len = len2;
4091 else if (TREE_CODE (len2) != INTEGER_CST)
4092 len = len1;
4093 else if (tree_int_cst_lt (len1, len2))
4094 len = len1;
4095 else
4096 len = len2;
4097
4098 /* If both arguments have side effects, we cannot optimize. */
4099 if (len && !TREE_SIDE_EFFECTS (len))
4100 {
4101 arg3_rtx = expand_normal (len);
4102 result = expand_cmpstrn_or_cmpmem
4103 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4104 arg3_rtx, MIN (arg1_align, arg2_align));
4105 }
4106 }
4107
4108 if (result)
4109 {
4110 /* Return the value in the proper mode for this function. */
4111 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4112 if (GET_MODE (result) == mode)
4113 return result;
4114 if (target == 0)
4115 return convert_to_mode (mode, result, 0);
4116 convert_move (target, result, 0);
4117 return target;
4118 }
4119
4120 /* Expand the library call ourselves using a stabilized argument
4121 list to avoid re-evaluating the function's arguments twice. */
4122 fndecl = get_callee_fndecl (exp);
4123 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4124 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4125 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4126 return expand_call (fn, target, target == const0_rtx);
4127 }
4128 return NULL_RTX;
4129 }
4130
4131 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4132 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4133 the result in TARGET, if convenient. */
4134
4135 static rtx
4136 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4137 ATTRIBUTE_UNUSED machine_mode mode)
4138 {
4139 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4140
4141 if (!validate_arglist (exp,
4142 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4143 return NULL_RTX;
4144
4145 /* If c_strlen can determine an expression for one of the string
4146 lengths, and it doesn't have side effects, then emit cmpstrnsi
4147 using length MIN(strlen(string)+1, arg3). */
4148 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4149 if (cmpstrn_icode != CODE_FOR_nothing)
4150 {
4151 tree len, len1, len2;
4152 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4153 rtx result;
4154 tree fndecl, fn;
4155 tree arg1 = CALL_EXPR_ARG (exp, 0);
4156 tree arg2 = CALL_EXPR_ARG (exp, 1);
4157 tree arg3 = CALL_EXPR_ARG (exp, 2);
4158
4159 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4160 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4161
4162 len1 = c_strlen (arg1, 1);
4163 len2 = c_strlen (arg2, 1);
4164
4165 if (len1)
4166 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4167 if (len2)
4168 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4169
4170 /* If we don't have a constant length for the first, use the length
4171 of the second, if we know it. We don't require a constant for
4172 this case; some cost analysis could be done if both are available
4173 but neither is constant. For now, assume they're equally cheap,
4174 unless one has side effects. If both strings have constant lengths,
4175 use the smaller. */
4176
4177 if (!len1)
4178 len = len2;
4179 else if (!len2)
4180 len = len1;
4181 else if (TREE_SIDE_EFFECTS (len1))
4182 len = len2;
4183 else if (TREE_SIDE_EFFECTS (len2))
4184 len = len1;
4185 else if (TREE_CODE (len1) != INTEGER_CST)
4186 len = len2;
4187 else if (TREE_CODE (len2) != INTEGER_CST)
4188 len = len1;
4189 else if (tree_int_cst_lt (len1, len2))
4190 len = len1;
4191 else
4192 len = len2;
4193
4194 /* If both arguments have side effects, we cannot optimize. */
4195 if (!len || TREE_SIDE_EFFECTS (len))
4196 return NULL_RTX;
4197
4198 /* The actual new length parameter is MIN(len,arg3). */
4199 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4200 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4201
4202 /* If we don't have POINTER_TYPE, call the function. */
4203 if (arg1_align == 0 || arg2_align == 0)
4204 return NULL_RTX;
4205
4206 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4207 arg1 = builtin_save_expr (arg1);
4208 arg2 = builtin_save_expr (arg2);
4209 len = builtin_save_expr (len);
4210
4211 arg1_rtx = get_memory_rtx (arg1, len);
4212 arg2_rtx = get_memory_rtx (arg2, len);
4213 arg3_rtx = expand_normal (len);
4214 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4215 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4216 MIN (arg1_align, arg2_align));
4217 if (result)
4218 {
4219 /* Return the value in the proper mode for this function. */
4220 mode = TYPE_MODE (TREE_TYPE (exp));
4221 if (GET_MODE (result) == mode)
4222 return result;
4223 if (target == 0)
4224 return convert_to_mode (mode, result, 0);
4225 convert_move (target, result, 0);
4226 return target;
4227 }
4228
4229 /* Expand the library call ourselves using a stabilized argument
4230 list to avoid re-evaluating the function's arguments twice. */
4231 fndecl = get_callee_fndecl (exp);
4232 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4233 arg1, arg2, len);
4234 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4235 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4236 return expand_call (fn, target, target == const0_rtx);
4237 }
4238 return NULL_RTX;
4239 }
4240
4241 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4242 if that's convenient. */
4243
4244 rtx
4245 expand_builtin_saveregs (void)
4246 {
4247 rtx val;
4248 rtx_insn *seq;
4249
4250 /* Don't do __builtin_saveregs more than once in a function.
4251 Save the result of the first call and reuse it. */
4252 if (saveregs_value != 0)
4253 return saveregs_value;
4254
4255 /* When this function is called, it means that registers must be
4256 saved on entry to this function. So we migrate the call to the
4257 first insn of this function. */
4258
4259 start_sequence ();
4260
4261 /* Do whatever the machine needs done in this case. */
4262 val = targetm.calls.expand_builtin_saveregs ();
4263
4264 seq = get_insns ();
4265 end_sequence ();
4266
4267 saveregs_value = val;
4268
4269 /* Put the insns after the NOTE that starts the function. If this
4270 is inside a start_sequence, make the outer-level insn chain current, so
4271 the code is placed at the start of the function. */
4272 push_topmost_sequence ();
4273 emit_insn_after (seq, entry_of_function ());
4274 pop_topmost_sequence ();
4275
4276 return val;
4277 }
4278
4279 /* Expand a call to __builtin_next_arg. */
4280
4281 static rtx
4282 expand_builtin_next_arg (void)
4283 {
4284 /* Checking arguments is already done in fold_builtin_next_arg
4285 that must be called before this function. */
4286 return expand_binop (ptr_mode, add_optab,
4287 crtl->args.internal_arg_pointer,
4288 crtl->args.arg_offset_rtx,
4289 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4290 }
4291
4292 /* Make it easier for the backends by protecting the valist argument
4293 from multiple evaluations. */
4294
4295 static tree
4296 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4297 {
4298 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4299
4300 /* The current way of determining the type of valist is completely
4301 bogus. We should have the information on the va builtin instead. */
4302 if (!vatype)
4303 vatype = targetm.fn_abi_va_list (cfun->decl);
4304
4305 if (TREE_CODE (vatype) == ARRAY_TYPE)
4306 {
4307 if (TREE_SIDE_EFFECTS (valist))
4308 valist = save_expr (valist);
4309
4310 /* For this case, the backends will be expecting a pointer to
4311 vatype, but it's possible we've actually been given an array
4312 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4313 So fix it. */
4314 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4315 {
4316 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4317 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4318 }
4319 }
4320 else
4321 {
4322 tree pt = build_pointer_type (vatype);
4323
4324 if (! needs_lvalue)
4325 {
4326 if (! TREE_SIDE_EFFECTS (valist))
4327 return valist;
4328
4329 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4330 TREE_SIDE_EFFECTS (valist) = 1;
4331 }
4332
4333 if (TREE_SIDE_EFFECTS (valist))
4334 valist = save_expr (valist);
4335 valist = fold_build2_loc (loc, MEM_REF,
4336 vatype, valist, build_int_cst (pt, 0));
4337 }
4338
4339 return valist;
4340 }
4341
4342 /* The "standard" definition of va_list is void*. */
4343
4344 tree
4345 std_build_builtin_va_list (void)
4346 {
4347 return ptr_type_node;
4348 }
4349
4350 /* The "standard" abi va_list is va_list_type_node. */
4351
4352 tree
4353 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4354 {
4355 return va_list_type_node;
4356 }
4357
4358 /* The "standard" type of va_list is va_list_type_node. */
4359
4360 tree
4361 std_canonical_va_list_type (tree type)
4362 {
4363 tree wtype, htype;
4364
4365 if (INDIRECT_REF_P (type))
4366 type = TREE_TYPE (type);
4367 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4368 type = TREE_TYPE (type);
4369 wtype = va_list_type_node;
4370 htype = type;
4371 /* Treat structure va_list types. */
4372 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4373 htype = TREE_TYPE (htype);
4374 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4375 {
4376 /* If va_list is an array type, the argument may have decayed
4377 to a pointer type, e.g. by being passed to another function.
4378 In that case, unwrap both types so that we can compare the
4379 underlying records. */
4380 if (TREE_CODE (htype) == ARRAY_TYPE
4381 || POINTER_TYPE_P (htype))
4382 {
4383 wtype = TREE_TYPE (wtype);
4384 htype = TREE_TYPE (htype);
4385 }
4386 }
4387 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4388 return va_list_type_node;
4389
4390 return NULL_TREE;
4391 }
4392
4393 /* The "standard" implementation of va_start: just assign `nextarg' to
4394 the variable. */
4395
4396 void
4397 std_expand_builtin_va_start (tree valist, rtx nextarg)
4398 {
4399 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4400 convert_move (va_r, nextarg, 0);
4401
4402 /* We do not have any valid bounds for the pointer, so
4403 just store zero bounds for it. */
4404 if (chkp_function_instrumented_p (current_function_decl))
4405 chkp_expand_bounds_reset_for_mem (valist,
4406 make_tree (TREE_TYPE (valist),
4407 nextarg));
4408 }
4409
4410 /* Expand EXP, a call to __builtin_va_start. */
4411
4412 static rtx
4413 expand_builtin_va_start (tree exp)
4414 {
4415 rtx nextarg;
4416 tree valist;
4417 location_t loc = EXPR_LOCATION (exp);
4418
4419 if (call_expr_nargs (exp) < 2)
4420 {
4421 error_at (loc, "too few arguments to function %<va_start%>");
4422 return const0_rtx;
4423 }
4424
4425 if (fold_builtin_next_arg (exp, true))
4426 return const0_rtx;
4427
4428 nextarg = expand_builtin_next_arg ();
4429 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4430
4431 if (targetm.expand_builtin_va_start)
4432 targetm.expand_builtin_va_start (valist, nextarg);
4433 else
4434 std_expand_builtin_va_start (valist, nextarg);
4435
4436 return const0_rtx;
4437 }
4438
4439 /* Expand EXP, a call to __builtin_va_end. */
4440
4441 static rtx
4442 expand_builtin_va_end (tree exp)
4443 {
4444 tree valist = CALL_EXPR_ARG (exp, 0);
4445
4446 /* Evaluate for side effects, if needed. I hate macros that don't
4447 do that. */
4448 if (TREE_SIDE_EFFECTS (valist))
4449 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4450
4451 return const0_rtx;
4452 }
4453
4454 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4455 builtin rather than just as an assignment in stdarg.h because of the
4456 nastiness of array-type va_list types. */
4457
4458 static rtx
4459 expand_builtin_va_copy (tree exp)
4460 {
4461 tree dst, src, t;
4462 location_t loc = EXPR_LOCATION (exp);
4463
4464 dst = CALL_EXPR_ARG (exp, 0);
4465 src = CALL_EXPR_ARG (exp, 1);
4466
4467 dst = stabilize_va_list_loc (loc, dst, 1);
4468 src = stabilize_va_list_loc (loc, src, 0);
4469
4470 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4471
4472 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4473 {
4474 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4475 TREE_SIDE_EFFECTS (t) = 1;
4476 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4477 }
4478 else
4479 {
4480 rtx dstb, srcb, size;
4481
4482 /* Evaluate to pointers. */
4483 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4484 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4485 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4486 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4487
4488 dstb = convert_memory_address (Pmode, dstb);
4489 srcb = convert_memory_address (Pmode, srcb);
4490
4491 /* "Dereference" to BLKmode memories. */
4492 dstb = gen_rtx_MEM (BLKmode, dstb);
4493 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4494 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4495 srcb = gen_rtx_MEM (BLKmode, srcb);
4496 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4497 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4498
4499 /* Copy. */
4500 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4501 }
4502
4503 return const0_rtx;
4504 }
4505
4506 /* Expand a call to one of the builtin functions __builtin_frame_address or
4507 __builtin_return_address. */
4508
4509 static rtx
4510 expand_builtin_frame_address (tree fndecl, tree exp)
4511 {
4512 /* The argument must be a nonnegative integer constant.
4513 It counts the number of frames to scan up the stack.
4514 The value is either the frame pointer value or the return
4515 address saved in that frame. */
4516 if (call_expr_nargs (exp) == 0)
4517 /* Warning about missing arg was already issued. */
4518 return const0_rtx;
4519 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4520 {
4521 error ("invalid argument to %qD", fndecl);
4522 return const0_rtx;
4523 }
4524 else
4525 {
4526 /* Number of frames to scan up the stack. */
4527 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4528
4529 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4530
4531 /* Some ports cannot access arbitrary stack frames. */
4532 if (tem == NULL)
4533 {
4534 warning (0, "unsupported argument to %qD", fndecl);
4535 return const0_rtx;
4536 }
4537
4538 if (count)
4539 {
4540 /* Warn since no effort is made to ensure that any frame
4541 beyond the current one exists or can be safely reached. */
4542 warning (OPT_Wframe_address, "calling %qD with "
4543 "a nonzero argument is unsafe", fndecl);
4544 }
4545
4546 /* For __builtin_frame_address, return what we've got. */
4547 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4548 return tem;
4549
4550 if (!REG_P (tem)
4551 && ! CONSTANT_P (tem))
4552 tem = copy_addr_to_reg (tem);
4553 return tem;
4554 }
4555 }
4556
4557 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4558 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4559 is the same as for allocate_dynamic_stack_space. */
4560
4561 static rtx
4562 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4563 {
4564 rtx op0;
4565 rtx result;
4566 bool valid_arglist;
4567 unsigned int align;
4568 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4569 == BUILT_IN_ALLOCA_WITH_ALIGN);
4570
4571 valid_arglist
4572 = (alloca_with_align
4573 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4574 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4575
4576 if (!valid_arglist)
4577 return NULL_RTX;
4578
4579 /* Compute the argument. */
4580 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4581
4582 /* Compute the alignment. */
4583 align = (alloca_with_align
4584 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4585 : BIGGEST_ALIGNMENT);
4586
4587 /* Allocate the desired space. */
4588 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4589 result = convert_memory_address (ptr_mode, result);
4590
4591 return result;
4592 }
4593
4594 /* Expand a call to bswap builtin in EXP.
4595 Return NULL_RTX if a normal call should be emitted rather than expanding the
4596 function in-line. If convenient, the result should be placed in TARGET.
4597 SUBTARGET may be used as the target for computing one of EXP's operands. */
4598
4599 static rtx
4600 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4601 rtx subtarget)
4602 {
4603 tree arg;
4604 rtx op0;
4605
4606 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4607 return NULL_RTX;
4608
4609 arg = CALL_EXPR_ARG (exp, 0);
4610 op0 = expand_expr (arg,
4611 subtarget && GET_MODE (subtarget) == target_mode
4612 ? subtarget : NULL_RTX,
4613 target_mode, EXPAND_NORMAL);
4614 if (GET_MODE (op0) != target_mode)
4615 op0 = convert_to_mode (target_mode, op0, 1);
4616
4617 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4618
4619 gcc_assert (target);
4620
4621 return convert_to_mode (target_mode, target, 1);
4622 }
4623
4624 /* Expand a call to a unary builtin in EXP.
4625 Return NULL_RTX if a normal call should be emitted rather than expanding the
4626 function in-line. If convenient, the result should be placed in TARGET.
4627 SUBTARGET may be used as the target for computing one of EXP's operands. */
4628
4629 static rtx
4630 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4631 rtx subtarget, optab op_optab)
4632 {
4633 rtx op0;
4634
4635 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4636 return NULL_RTX;
4637
4638 /* Compute the argument. */
4639 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4640 (subtarget
4641 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4642 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4643 VOIDmode, EXPAND_NORMAL);
4644 /* Compute op, into TARGET if possible.
4645 Set TARGET to wherever the result comes back. */
4646 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4647 op_optab, op0, target, op_optab != clrsb_optab);
4648 gcc_assert (target);
4649
4650 return convert_to_mode (target_mode, target, 0);
4651 }
4652
4653 /* Expand a call to __builtin_expect. We just return our argument
4654 as the builtin_expect semantic should've been already executed by
4655 tree branch prediction pass. */
4656
4657 static rtx
4658 expand_builtin_expect (tree exp, rtx target)
4659 {
4660 tree arg;
4661
4662 if (call_expr_nargs (exp) < 2)
4663 return const0_rtx;
4664 arg = CALL_EXPR_ARG (exp, 0);
4665
4666 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4667 /* When guessing was done, the hints should be already stripped away. */
4668 gcc_assert (!flag_guess_branch_prob
4669 || optimize == 0 || seen_error ());
4670 return target;
4671 }
4672
4673 /* Expand a call to __builtin_assume_aligned. We just return our first
4674 argument as the builtin_assume_aligned semantic should've been already
4675 executed by CCP. */
4676
4677 static rtx
4678 expand_builtin_assume_aligned (tree exp, rtx target)
4679 {
4680 if (call_expr_nargs (exp) < 2)
4681 return const0_rtx;
4682 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4683 EXPAND_NORMAL);
4684 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4685 && (call_expr_nargs (exp) < 3
4686 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4687 return target;
4688 }
4689
4690 void
4691 expand_builtin_trap (void)
4692 {
4693 if (targetm.have_trap ())
4694 {
4695 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4696 /* For trap insns when not accumulating outgoing args force
4697 REG_ARGS_SIZE note to prevent crossjumping of calls with
4698 different args sizes. */
4699 if (!ACCUMULATE_OUTGOING_ARGS)
4700 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4701 }
4702 else
4703 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4704 emit_barrier ();
4705 }
4706
4707 /* Expand a call to __builtin_unreachable. We do nothing except emit
4708 a barrier saying that control flow will not pass here.
4709
4710 It is the responsibility of the program being compiled to ensure
4711 that control flow does never reach __builtin_unreachable. */
4712 static void
4713 expand_builtin_unreachable (void)
4714 {
4715 emit_barrier ();
4716 }
4717
4718 /* Expand EXP, a call to fabs, fabsf or fabsl.
4719 Return NULL_RTX if a normal call should be emitted rather than expanding
4720 the function inline. If convenient, the result should be placed
4721 in TARGET. SUBTARGET may be used as the target for computing
4722 the operand. */
4723
4724 static rtx
4725 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4726 {
4727 machine_mode mode;
4728 tree arg;
4729 rtx op0;
4730
4731 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4732 return NULL_RTX;
4733
4734 arg = CALL_EXPR_ARG (exp, 0);
4735 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4736 mode = TYPE_MODE (TREE_TYPE (arg));
4737 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4738 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4739 }
4740
4741 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4742 Return NULL is a normal call should be emitted rather than expanding the
4743 function inline. If convenient, the result should be placed in TARGET.
4744 SUBTARGET may be used as the target for computing the operand. */
4745
4746 static rtx
4747 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4748 {
4749 rtx op0, op1;
4750 tree arg;
4751
4752 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4753 return NULL_RTX;
4754
4755 arg = CALL_EXPR_ARG (exp, 0);
4756 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4757
4758 arg = CALL_EXPR_ARG (exp, 1);
4759 op1 = expand_normal (arg);
4760
4761 return expand_copysign (op0, op1, target);
4762 }
4763
4764 /* Expand a call to __builtin___clear_cache. */
4765
4766 static rtx
4767 expand_builtin___clear_cache (tree exp)
4768 {
4769 if (!targetm.code_for_clear_cache)
4770 {
4771 #ifdef CLEAR_INSN_CACHE
4772 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4773 does something. Just do the default expansion to a call to
4774 __clear_cache(). */
4775 return NULL_RTX;
4776 #else
4777 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4778 does nothing. There is no need to call it. Do nothing. */
4779 return const0_rtx;
4780 #endif /* CLEAR_INSN_CACHE */
4781 }
4782
4783 /* We have a "clear_cache" insn, and it will handle everything. */
4784 tree begin, end;
4785 rtx begin_rtx, end_rtx;
4786
4787 /* We must not expand to a library call. If we did, any
4788 fallback library function in libgcc that might contain a call to
4789 __builtin___clear_cache() would recurse infinitely. */
4790 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4791 {
4792 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4793 return const0_rtx;
4794 }
4795
4796 if (targetm.have_clear_cache ())
4797 {
4798 struct expand_operand ops[2];
4799
4800 begin = CALL_EXPR_ARG (exp, 0);
4801 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4802
4803 end = CALL_EXPR_ARG (exp, 1);
4804 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4805
4806 create_address_operand (&ops[0], begin_rtx);
4807 create_address_operand (&ops[1], end_rtx);
4808 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4809 return const0_rtx;
4810 }
4811 return const0_rtx;
4812 }
4813
4814 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4815
4816 static rtx
4817 round_trampoline_addr (rtx tramp)
4818 {
4819 rtx temp, addend, mask;
4820
4821 /* If we don't need too much alignment, we'll have been guaranteed
4822 proper alignment by get_trampoline_type. */
4823 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4824 return tramp;
4825
4826 /* Round address up to desired boundary. */
4827 temp = gen_reg_rtx (Pmode);
4828 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4829 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4830
4831 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4832 temp, 0, OPTAB_LIB_WIDEN);
4833 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4834 temp, 0, OPTAB_LIB_WIDEN);
4835
4836 return tramp;
4837 }
4838
4839 static rtx
4840 expand_builtin_init_trampoline (tree exp, bool onstack)
4841 {
4842 tree t_tramp, t_func, t_chain;
4843 rtx m_tramp, r_tramp, r_chain, tmp;
4844
4845 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4846 POINTER_TYPE, VOID_TYPE))
4847 return NULL_RTX;
4848
4849 t_tramp = CALL_EXPR_ARG (exp, 0);
4850 t_func = CALL_EXPR_ARG (exp, 1);
4851 t_chain = CALL_EXPR_ARG (exp, 2);
4852
4853 r_tramp = expand_normal (t_tramp);
4854 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4855 MEM_NOTRAP_P (m_tramp) = 1;
4856
4857 /* If ONSTACK, the TRAMP argument should be the address of a field
4858 within the local function's FRAME decl. Either way, let's see if
4859 we can fill in the MEM_ATTRs for this memory. */
4860 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4861 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4862
4863 /* Creator of a heap trampoline is responsible for making sure the
4864 address is aligned to at least STACK_BOUNDARY. Normally malloc
4865 will ensure this anyhow. */
4866 tmp = round_trampoline_addr (r_tramp);
4867 if (tmp != r_tramp)
4868 {
4869 m_tramp = change_address (m_tramp, BLKmode, tmp);
4870 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4871 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4872 }
4873
4874 /* The FUNC argument should be the address of the nested function.
4875 Extract the actual function decl to pass to the hook. */
4876 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4877 t_func = TREE_OPERAND (t_func, 0);
4878 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4879
4880 r_chain = expand_normal (t_chain);
4881
4882 /* Generate insns to initialize the trampoline. */
4883 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4884
4885 if (onstack)
4886 {
4887 trampolines_created = 1;
4888
4889 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4890 "trampoline generated for nested function %qD", t_func);
4891 }
4892
4893 return const0_rtx;
4894 }
4895
4896 static rtx
4897 expand_builtin_adjust_trampoline (tree exp)
4898 {
4899 rtx tramp;
4900
4901 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4902 return NULL_RTX;
4903
4904 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4905 tramp = round_trampoline_addr (tramp);
4906 if (targetm.calls.trampoline_adjust_address)
4907 tramp = targetm.calls.trampoline_adjust_address (tramp);
4908
4909 return tramp;
4910 }
4911
4912 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4913 function. The function first checks whether the back end provides
4914 an insn to implement signbit for the respective mode. If not, it
4915 checks whether the floating point format of the value is such that
4916 the sign bit can be extracted. If that is not the case, error out.
4917 EXP is the expression that is a call to the builtin function; if
4918 convenient, the result should be placed in TARGET. */
4919 static rtx
4920 expand_builtin_signbit (tree exp, rtx target)
4921 {
4922 const struct real_format *fmt;
4923 machine_mode fmode, imode, rmode;
4924 tree arg;
4925 int word, bitpos;
4926 enum insn_code icode;
4927 rtx temp;
4928 location_t loc = EXPR_LOCATION (exp);
4929
4930 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4931 return NULL_RTX;
4932
4933 arg = CALL_EXPR_ARG (exp, 0);
4934 fmode = TYPE_MODE (TREE_TYPE (arg));
4935 rmode = TYPE_MODE (TREE_TYPE (exp));
4936 fmt = REAL_MODE_FORMAT (fmode);
4937
4938 arg = builtin_save_expr (arg);
4939
4940 /* Expand the argument yielding a RTX expression. */
4941 temp = expand_normal (arg);
4942
4943 /* Check if the back end provides an insn that handles signbit for the
4944 argument's mode. */
4945 icode = optab_handler (signbit_optab, fmode);
4946 if (icode != CODE_FOR_nothing)
4947 {
4948 rtx_insn *last = get_last_insn ();
4949 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4950 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4951 return target;
4952 delete_insns_since (last);
4953 }
4954
4955 /* For floating point formats without a sign bit, implement signbit
4956 as "ARG < 0.0". */
4957 bitpos = fmt->signbit_ro;
4958 if (bitpos < 0)
4959 {
4960 /* But we can't do this if the format supports signed zero. */
4961 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4962
4963 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4964 build_real (TREE_TYPE (arg), dconst0));
4965 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4966 }
4967
4968 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4969 {
4970 imode = int_mode_for_mode (fmode);
4971 gcc_assert (imode != BLKmode);
4972 temp = gen_lowpart (imode, temp);
4973 }
4974 else
4975 {
4976 imode = word_mode;
4977 /* Handle targets with different FP word orders. */
4978 if (FLOAT_WORDS_BIG_ENDIAN)
4979 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4980 else
4981 word = bitpos / BITS_PER_WORD;
4982 temp = operand_subword_force (temp, word, fmode);
4983 bitpos = bitpos % BITS_PER_WORD;
4984 }
4985
4986 /* Force the intermediate word_mode (or narrower) result into a
4987 register. This avoids attempting to create paradoxical SUBREGs
4988 of floating point modes below. */
4989 temp = force_reg (imode, temp);
4990
4991 /* If the bitpos is within the "result mode" lowpart, the operation
4992 can be implement with a single bitwise AND. Otherwise, we need
4993 a right shift and an AND. */
4994
4995 if (bitpos < GET_MODE_BITSIZE (rmode))
4996 {
4997 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4998
4999 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5000 temp = gen_lowpart (rmode, temp);
5001 temp = expand_binop (rmode, and_optab, temp,
5002 immed_wide_int_const (mask, rmode),
5003 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5004 }
5005 else
5006 {
5007 /* Perform a logical right shift to place the signbit in the least
5008 significant bit, then truncate the result to the desired mode
5009 and mask just this bit. */
5010 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5011 temp = gen_lowpart (rmode, temp);
5012 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5013 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5014 }
5015
5016 return temp;
5017 }
5018
5019 /* Expand fork or exec calls. TARGET is the desired target of the
5020 call. EXP is the call. FN is the
5021 identificator of the actual function. IGNORE is nonzero if the
5022 value is to be ignored. */
5023
5024 static rtx
5025 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5026 {
5027 tree id, decl;
5028 tree call;
5029
5030 /* If we are not profiling, just call the function. */
5031 if (!profile_arc_flag)
5032 return NULL_RTX;
5033
5034 /* Otherwise call the wrapper. This should be equivalent for the rest of
5035 compiler, so the code does not diverge, and the wrapper may run the
5036 code necessary for keeping the profiling sane. */
5037
5038 switch (DECL_FUNCTION_CODE (fn))
5039 {
5040 case BUILT_IN_FORK:
5041 id = get_identifier ("__gcov_fork");
5042 break;
5043
5044 case BUILT_IN_EXECL:
5045 id = get_identifier ("__gcov_execl");
5046 break;
5047
5048 case BUILT_IN_EXECV:
5049 id = get_identifier ("__gcov_execv");
5050 break;
5051
5052 case BUILT_IN_EXECLP:
5053 id = get_identifier ("__gcov_execlp");
5054 break;
5055
5056 case BUILT_IN_EXECLE:
5057 id = get_identifier ("__gcov_execle");
5058 break;
5059
5060 case BUILT_IN_EXECVP:
5061 id = get_identifier ("__gcov_execvp");
5062 break;
5063
5064 case BUILT_IN_EXECVE:
5065 id = get_identifier ("__gcov_execve");
5066 break;
5067
5068 default:
5069 gcc_unreachable ();
5070 }
5071
5072 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5073 FUNCTION_DECL, id, TREE_TYPE (fn));
5074 DECL_EXTERNAL (decl) = 1;
5075 TREE_PUBLIC (decl) = 1;
5076 DECL_ARTIFICIAL (decl) = 1;
5077 TREE_NOTHROW (decl) = 1;
5078 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5079 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5080 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5081 return expand_call (call, target, ignore);
5082 }
5083
5084
5085 \f
5086 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5087 the pointer in these functions is void*, the tree optimizers may remove
5088 casts. The mode computed in expand_builtin isn't reliable either, due
5089 to __sync_bool_compare_and_swap.
5090
5091 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5092 group of builtins. This gives us log2 of the mode size. */
5093
5094 static inline machine_mode
5095 get_builtin_sync_mode (int fcode_diff)
5096 {
5097 /* The size is not negotiable, so ask not to get BLKmode in return
5098 if the target indicates that a smaller size would be better. */
5099 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5100 }
5101
5102 /* Expand the memory expression LOC and return the appropriate memory operand
5103 for the builtin_sync operations. */
5104
5105 static rtx
5106 get_builtin_sync_mem (tree loc, machine_mode mode)
5107 {
5108 rtx addr, mem;
5109
5110 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5111 addr = convert_memory_address (Pmode, addr);
5112
5113 /* Note that we explicitly do not want any alias information for this
5114 memory, so that we kill all other live memories. Otherwise we don't
5115 satisfy the full barrier semantics of the intrinsic. */
5116 mem = validize_mem (gen_rtx_MEM (mode, addr));
5117
5118 /* The alignment needs to be at least according to that of the mode. */
5119 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5120 get_pointer_alignment (loc)));
5121 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5122 MEM_VOLATILE_P (mem) = 1;
5123
5124 return mem;
5125 }
5126
5127 /* Make sure an argument is in the right mode.
5128 EXP is the tree argument.
5129 MODE is the mode it should be in. */
5130
5131 static rtx
5132 expand_expr_force_mode (tree exp, machine_mode mode)
5133 {
5134 rtx val;
5135 machine_mode old_mode;
5136
5137 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5138 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5139 of CONST_INTs, where we know the old_mode only from the call argument. */
5140
5141 old_mode = GET_MODE (val);
5142 if (old_mode == VOIDmode)
5143 old_mode = TYPE_MODE (TREE_TYPE (exp));
5144 val = convert_modes (mode, old_mode, val, 1);
5145 return val;
5146 }
5147
5148
5149 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5150 EXP is the CALL_EXPR. CODE is the rtx code
5151 that corresponds to the arithmetic or logical operation from the name;
5152 an exception here is that NOT actually means NAND. TARGET is an optional
5153 place for us to store the results; AFTER is true if this is the
5154 fetch_and_xxx form. */
5155
5156 static rtx
5157 expand_builtin_sync_operation (machine_mode mode, tree exp,
5158 enum rtx_code code, bool after,
5159 rtx target)
5160 {
5161 rtx val, mem;
5162 location_t loc = EXPR_LOCATION (exp);
5163
5164 if (code == NOT && warn_sync_nand)
5165 {
5166 tree fndecl = get_callee_fndecl (exp);
5167 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5168
5169 static bool warned_f_a_n, warned_n_a_f;
5170
5171 switch (fcode)
5172 {
5173 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5174 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5175 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5176 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5177 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5178 if (warned_f_a_n)
5179 break;
5180
5181 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5182 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5183 warned_f_a_n = true;
5184 break;
5185
5186 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5187 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5188 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5189 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5190 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5191 if (warned_n_a_f)
5192 break;
5193
5194 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5195 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5196 warned_n_a_f = true;
5197 break;
5198
5199 default:
5200 gcc_unreachable ();
5201 }
5202 }
5203
5204 /* Expand the operands. */
5205 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5206 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5207
5208 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5209 after);
5210 }
5211
5212 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5213 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5214 true if this is the boolean form. TARGET is a place for us to store the
5215 results; this is NOT optional if IS_BOOL is true. */
5216
5217 static rtx
5218 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5219 bool is_bool, rtx target)
5220 {
5221 rtx old_val, new_val, mem;
5222 rtx *pbool, *poval;
5223
5224 /* Expand the operands. */
5225 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5226 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5227 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5228
5229 pbool = poval = NULL;
5230 if (target != const0_rtx)
5231 {
5232 if (is_bool)
5233 pbool = &target;
5234 else
5235 poval = &target;
5236 }
5237 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5238 false, MEMMODEL_SYNC_SEQ_CST,
5239 MEMMODEL_SYNC_SEQ_CST))
5240 return NULL_RTX;
5241
5242 return target;
5243 }
5244
5245 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5246 general form is actually an atomic exchange, and some targets only
5247 support a reduced form with the second argument being a constant 1.
5248 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5249 the results. */
5250
5251 static rtx
5252 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5253 rtx target)
5254 {
5255 rtx val, mem;
5256
5257 /* Expand the operands. */
5258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5259 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5260
5261 return expand_sync_lock_test_and_set (target, mem, val);
5262 }
5263
5264 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5265
5266 static void
5267 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5268 {
5269 rtx mem;
5270
5271 /* Expand the operands. */
5272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5273
5274 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5275 }
5276
5277 /* Given an integer representing an ``enum memmodel'', verify its
5278 correctness and return the memory model enum. */
5279
5280 static enum memmodel
5281 get_memmodel (tree exp)
5282 {
5283 rtx op;
5284 unsigned HOST_WIDE_INT val;
5285
5286 /* If the parameter is not a constant, it's a run time value so we'll just
5287 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5288 if (TREE_CODE (exp) != INTEGER_CST)
5289 return MEMMODEL_SEQ_CST;
5290
5291 op = expand_normal (exp);
5292
5293 val = INTVAL (op);
5294 if (targetm.memmodel_check)
5295 val = targetm.memmodel_check (val);
5296 else if (val & ~MEMMODEL_MASK)
5297 {
5298 warning (OPT_Winvalid_memory_model,
5299 "Unknown architecture specifier in memory model to builtin.");
5300 return MEMMODEL_SEQ_CST;
5301 }
5302
5303 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5304 if (memmodel_base (val) >= MEMMODEL_LAST)
5305 {
5306 warning (OPT_Winvalid_memory_model,
5307 "invalid memory model argument to builtin");
5308 return MEMMODEL_SEQ_CST;
5309 }
5310
5311 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5312 be conservative and promote consume to acquire. */
5313 if (val == MEMMODEL_CONSUME)
5314 val = MEMMODEL_ACQUIRE;
5315
5316 return (enum memmodel) val;
5317 }
5318
5319 /* Expand the __atomic_exchange intrinsic:
5320 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5321 EXP is the CALL_EXPR.
5322 TARGET is an optional place for us to store the results. */
5323
5324 static rtx
5325 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5326 {
5327 rtx val, mem;
5328 enum memmodel model;
5329
5330 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5331
5332 if (!flag_inline_atomics)
5333 return NULL_RTX;
5334
5335 /* Expand the operands. */
5336 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5337 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5338
5339 return expand_atomic_exchange (target, mem, val, model);
5340 }
5341
5342 /* Expand the __atomic_compare_exchange intrinsic:
5343 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5344 TYPE desired, BOOL weak,
5345 enum memmodel success,
5346 enum memmodel failure)
5347 EXP is the CALL_EXPR.
5348 TARGET is an optional place for us to store the results. */
5349
5350 static rtx
5351 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5352 rtx target)
5353 {
5354 rtx expect, desired, mem, oldval;
5355 rtx_code_label *label;
5356 enum memmodel success, failure;
5357 tree weak;
5358 bool is_weak;
5359
5360 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5361 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5362
5363 if (failure > success)
5364 {
5365 warning (OPT_Winvalid_memory_model,
5366 "failure memory model cannot be stronger than success memory "
5367 "model for %<__atomic_compare_exchange%>");
5368 success = MEMMODEL_SEQ_CST;
5369 }
5370
5371 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5372 {
5373 warning (OPT_Winvalid_memory_model,
5374 "invalid failure memory model for "
5375 "%<__atomic_compare_exchange%>");
5376 failure = MEMMODEL_SEQ_CST;
5377 success = MEMMODEL_SEQ_CST;
5378 }
5379
5380
5381 if (!flag_inline_atomics)
5382 return NULL_RTX;
5383
5384 /* Expand the operands. */
5385 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5386
5387 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5388 expect = convert_memory_address (Pmode, expect);
5389 expect = gen_rtx_MEM (mode, expect);
5390 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5391
5392 weak = CALL_EXPR_ARG (exp, 3);
5393 is_weak = false;
5394 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5395 is_weak = true;
5396
5397 if (target == const0_rtx)
5398 target = NULL;
5399
5400 /* Lest the rtl backend create a race condition with an imporoper store
5401 to memory, always create a new pseudo for OLDVAL. */
5402 oldval = NULL;
5403
5404 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5405 is_weak, success, failure))
5406 return NULL_RTX;
5407
5408 /* Conditionally store back to EXPECT, lest we create a race condition
5409 with an improper store to memory. */
5410 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5411 the normal case where EXPECT is totally private, i.e. a register. At
5412 which point the store can be unconditional. */
5413 label = gen_label_rtx ();
5414 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5415 GET_MODE (target), 1, label);
5416 emit_move_insn (expect, oldval);
5417 emit_label (label);
5418
5419 return target;
5420 }
5421
5422 /* Expand the __atomic_load intrinsic:
5423 TYPE __atomic_load (TYPE *object, enum memmodel)
5424 EXP is the CALL_EXPR.
5425 TARGET is an optional place for us to store the results. */
5426
5427 static rtx
5428 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5429 {
5430 rtx mem;
5431 enum memmodel model;
5432
5433 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5434 if (is_mm_release (model) || is_mm_acq_rel (model))
5435 {
5436 warning (OPT_Winvalid_memory_model,
5437 "invalid memory model for %<__atomic_load%>");
5438 model = MEMMODEL_SEQ_CST;
5439 }
5440
5441 if (!flag_inline_atomics)
5442 return NULL_RTX;
5443
5444 /* Expand the operand. */
5445 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5446
5447 return expand_atomic_load (target, mem, model);
5448 }
5449
5450
5451 /* Expand the __atomic_store intrinsic:
5452 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5453 EXP is the CALL_EXPR.
5454 TARGET is an optional place for us to store the results. */
5455
5456 static rtx
5457 expand_builtin_atomic_store (machine_mode mode, tree exp)
5458 {
5459 rtx mem, val;
5460 enum memmodel model;
5461
5462 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5463 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5464 || is_mm_release (model)))
5465 {
5466 warning (OPT_Winvalid_memory_model,
5467 "invalid memory model for %<__atomic_store%>");
5468 model = MEMMODEL_SEQ_CST;
5469 }
5470
5471 if (!flag_inline_atomics)
5472 return NULL_RTX;
5473
5474 /* Expand the operands. */
5475 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5476 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5477
5478 return expand_atomic_store (mem, val, model, false);
5479 }
5480
5481 /* Expand the __atomic_fetch_XXX intrinsic:
5482 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5483 EXP is the CALL_EXPR.
5484 TARGET is an optional place for us to store the results.
5485 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5486 FETCH_AFTER is true if returning the result of the operation.
5487 FETCH_AFTER is false if returning the value before the operation.
5488 IGNORE is true if the result is not used.
5489 EXT_CALL is the correct builtin for an external call if this cannot be
5490 resolved to an instruction sequence. */
5491
5492 static rtx
5493 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5494 enum rtx_code code, bool fetch_after,
5495 bool ignore, enum built_in_function ext_call)
5496 {
5497 rtx val, mem, ret;
5498 enum memmodel model;
5499 tree fndecl;
5500 tree addr;
5501
5502 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5503
5504 /* Expand the operands. */
5505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5506 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5507
5508 /* Only try generating instructions if inlining is turned on. */
5509 if (flag_inline_atomics)
5510 {
5511 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5512 if (ret)
5513 return ret;
5514 }
5515
5516 /* Return if a different routine isn't needed for the library call. */
5517 if (ext_call == BUILT_IN_NONE)
5518 return NULL_RTX;
5519
5520 /* Change the call to the specified function. */
5521 fndecl = get_callee_fndecl (exp);
5522 addr = CALL_EXPR_FN (exp);
5523 STRIP_NOPS (addr);
5524
5525 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5526 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5527
5528 /* Expand the call here so we can emit trailing code. */
5529 ret = expand_call (exp, target, ignore);
5530
5531 /* Replace the original function just in case it matters. */
5532 TREE_OPERAND (addr, 0) = fndecl;
5533
5534 /* Then issue the arithmetic correction to return the right result. */
5535 if (!ignore)
5536 {
5537 if (code == NOT)
5538 {
5539 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5540 OPTAB_LIB_WIDEN);
5541 ret = expand_simple_unop (mode, NOT, ret, target, true);
5542 }
5543 else
5544 ret = expand_simple_binop (mode, code, ret, val, target, true,
5545 OPTAB_LIB_WIDEN);
5546 }
5547 return ret;
5548 }
5549
5550 /* Expand an atomic clear operation.
5551 void _atomic_clear (BOOL *obj, enum memmodel)
5552 EXP is the call expression. */
5553
5554 static rtx
5555 expand_builtin_atomic_clear (tree exp)
5556 {
5557 machine_mode mode;
5558 rtx mem, ret;
5559 enum memmodel model;
5560
5561 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5562 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5563 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5564
5565 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5566 {
5567 warning (OPT_Winvalid_memory_model,
5568 "invalid memory model for %<__atomic_store%>");
5569 model = MEMMODEL_SEQ_CST;
5570 }
5571
5572 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5573 Failing that, a store is issued by __atomic_store. The only way this can
5574 fail is if the bool type is larger than a word size. Unlikely, but
5575 handle it anyway for completeness. Assume a single threaded model since
5576 there is no atomic support in this case, and no barriers are required. */
5577 ret = expand_atomic_store (mem, const0_rtx, model, true);
5578 if (!ret)
5579 emit_move_insn (mem, const0_rtx);
5580 return const0_rtx;
5581 }
5582
5583 /* Expand an atomic test_and_set operation.
5584 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5585 EXP is the call expression. */
5586
5587 static rtx
5588 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5589 {
5590 rtx mem;
5591 enum memmodel model;
5592 machine_mode mode;
5593
5594 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5595 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5596 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5597
5598 return expand_atomic_test_and_set (target, mem, model);
5599 }
5600
5601
5602 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5603 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5604
5605 static tree
5606 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5607 {
5608 int size;
5609 machine_mode mode;
5610 unsigned int mode_align, type_align;
5611
5612 if (TREE_CODE (arg0) != INTEGER_CST)
5613 return NULL_TREE;
5614
5615 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5616 mode = mode_for_size (size, MODE_INT, 0);
5617 mode_align = GET_MODE_ALIGNMENT (mode);
5618
5619 if (TREE_CODE (arg1) == INTEGER_CST)
5620 {
5621 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5622
5623 /* Either this argument is null, or it's a fake pointer encoding
5624 the alignment of the object. */
5625 val = val & -val;
5626 val *= BITS_PER_UNIT;
5627
5628 if (val == 0 || mode_align < val)
5629 type_align = mode_align;
5630 else
5631 type_align = val;
5632 }
5633 else
5634 {
5635 tree ttype = TREE_TYPE (arg1);
5636
5637 /* This function is usually invoked and folded immediately by the front
5638 end before anything else has a chance to look at it. The pointer
5639 parameter at this point is usually cast to a void *, so check for that
5640 and look past the cast. */
5641 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5642 && VOID_TYPE_P (TREE_TYPE (ttype)))
5643 arg1 = TREE_OPERAND (arg1, 0);
5644
5645 ttype = TREE_TYPE (arg1);
5646 gcc_assert (POINTER_TYPE_P (ttype));
5647
5648 /* Get the underlying type of the object. */
5649 ttype = TREE_TYPE (ttype);
5650 type_align = TYPE_ALIGN (ttype);
5651 }
5652
5653 /* If the object has smaller alignment, the lock free routines cannot
5654 be used. */
5655 if (type_align < mode_align)
5656 return boolean_false_node;
5657
5658 /* Check if a compare_and_swap pattern exists for the mode which represents
5659 the required size. The pattern is not allowed to fail, so the existence
5660 of the pattern indicates support is present. */
5661 if (can_compare_and_swap_p (mode, true))
5662 return boolean_true_node;
5663 else
5664 return boolean_false_node;
5665 }
5666
5667 /* Return true if the parameters to call EXP represent an object which will
5668 always generate lock free instructions. The first argument represents the
5669 size of the object, and the second parameter is a pointer to the object
5670 itself. If NULL is passed for the object, then the result is based on
5671 typical alignment for an object of the specified size. Otherwise return
5672 false. */
5673
5674 static rtx
5675 expand_builtin_atomic_always_lock_free (tree exp)
5676 {
5677 tree size;
5678 tree arg0 = CALL_EXPR_ARG (exp, 0);
5679 tree arg1 = CALL_EXPR_ARG (exp, 1);
5680
5681 if (TREE_CODE (arg0) != INTEGER_CST)
5682 {
5683 error ("non-constant argument 1 to __atomic_always_lock_free");
5684 return const0_rtx;
5685 }
5686
5687 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5688 if (size == boolean_true_node)
5689 return const1_rtx;
5690 return const0_rtx;
5691 }
5692
5693 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5694 is lock free on this architecture. */
5695
5696 static tree
5697 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5698 {
5699 if (!flag_inline_atomics)
5700 return NULL_TREE;
5701
5702 /* If it isn't always lock free, don't generate a result. */
5703 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5704 return boolean_true_node;
5705
5706 return NULL_TREE;
5707 }
5708
5709 /* Return true if the parameters to call EXP represent an object which will
5710 always generate lock free instructions. The first argument represents the
5711 size of the object, and the second parameter is a pointer to the object
5712 itself. If NULL is passed for the object, then the result is based on
5713 typical alignment for an object of the specified size. Otherwise return
5714 NULL*/
5715
5716 static rtx
5717 expand_builtin_atomic_is_lock_free (tree exp)
5718 {
5719 tree size;
5720 tree arg0 = CALL_EXPR_ARG (exp, 0);
5721 tree arg1 = CALL_EXPR_ARG (exp, 1);
5722
5723 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5724 {
5725 error ("non-integer argument 1 to __atomic_is_lock_free");
5726 return NULL_RTX;
5727 }
5728
5729 if (!flag_inline_atomics)
5730 return NULL_RTX;
5731
5732 /* If the value is known at compile time, return the RTX for it. */
5733 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5734 if (size == boolean_true_node)
5735 return const1_rtx;
5736
5737 return NULL_RTX;
5738 }
5739
5740 /* Expand the __atomic_thread_fence intrinsic:
5741 void __atomic_thread_fence (enum memmodel)
5742 EXP is the CALL_EXPR. */
5743
5744 static void
5745 expand_builtin_atomic_thread_fence (tree exp)
5746 {
5747 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5748 expand_mem_thread_fence (model);
5749 }
5750
5751 /* Expand the __atomic_signal_fence intrinsic:
5752 void __atomic_signal_fence (enum memmodel)
5753 EXP is the CALL_EXPR. */
5754
5755 static void
5756 expand_builtin_atomic_signal_fence (tree exp)
5757 {
5758 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5759 expand_mem_signal_fence (model);
5760 }
5761
5762 /* Expand the __sync_synchronize intrinsic. */
5763
5764 static void
5765 expand_builtin_sync_synchronize (void)
5766 {
5767 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5768 }
5769
5770 static rtx
5771 expand_builtin_thread_pointer (tree exp, rtx target)
5772 {
5773 enum insn_code icode;
5774 if (!validate_arglist (exp, VOID_TYPE))
5775 return const0_rtx;
5776 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5777 if (icode != CODE_FOR_nothing)
5778 {
5779 struct expand_operand op;
5780 /* If the target is not sutitable then create a new target. */
5781 if (target == NULL_RTX
5782 || !REG_P (target)
5783 || GET_MODE (target) != Pmode)
5784 target = gen_reg_rtx (Pmode);
5785 create_output_operand (&op, target, Pmode);
5786 expand_insn (icode, 1, &op);
5787 return target;
5788 }
5789 error ("__builtin_thread_pointer is not supported on this target");
5790 return const0_rtx;
5791 }
5792
5793 static void
5794 expand_builtin_set_thread_pointer (tree exp)
5795 {
5796 enum insn_code icode;
5797 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5798 return;
5799 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5800 if (icode != CODE_FOR_nothing)
5801 {
5802 struct expand_operand op;
5803 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5804 Pmode, EXPAND_NORMAL);
5805 create_input_operand (&op, val, Pmode);
5806 expand_insn (icode, 1, &op);
5807 return;
5808 }
5809 error ("__builtin_set_thread_pointer is not supported on this target");
5810 }
5811
5812 \f
5813 /* Emit code to restore the current value of stack. */
5814
5815 static void
5816 expand_stack_restore (tree var)
5817 {
5818 rtx_insn *prev;
5819 rtx sa = expand_normal (var);
5820
5821 sa = convert_memory_address (Pmode, sa);
5822
5823 prev = get_last_insn ();
5824 emit_stack_restore (SAVE_BLOCK, sa);
5825
5826 record_new_stack_level ();
5827
5828 fixup_args_size_notes (prev, get_last_insn (), 0);
5829 }
5830
5831 /* Emit code to save the current value of stack. */
5832
5833 static rtx
5834 expand_stack_save (void)
5835 {
5836 rtx ret = NULL_RTX;
5837
5838 emit_stack_save (SAVE_BLOCK, &ret);
5839 return ret;
5840 }
5841
5842
5843 /* Expand an expression EXP that calls a built-in function,
5844 with result going to TARGET if that's convenient
5845 (and in mode MODE if that's convenient).
5846 SUBTARGET may be used as the target for computing one of EXP's operands.
5847 IGNORE is nonzero if the value is to be ignored. */
5848
5849 rtx
5850 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5851 int ignore)
5852 {
5853 tree fndecl = get_callee_fndecl (exp);
5854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5855 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5856 int flags;
5857
5858 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5859 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5860
5861 /* When ASan is enabled, we don't want to expand some memory/string
5862 builtins and rely on libsanitizer's hooks. This allows us to avoid
5863 redundant checks and be sure, that possible overflow will be detected
5864 by ASan. */
5865
5866 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5867 return expand_call (exp, target, ignore);
5868
5869 /* When not optimizing, generate calls to library functions for a certain
5870 set of builtins. */
5871 if (!optimize
5872 && !called_as_built_in (fndecl)
5873 && fcode != BUILT_IN_FORK
5874 && fcode != BUILT_IN_EXECL
5875 && fcode != BUILT_IN_EXECV
5876 && fcode != BUILT_IN_EXECLP
5877 && fcode != BUILT_IN_EXECLE
5878 && fcode != BUILT_IN_EXECVP
5879 && fcode != BUILT_IN_EXECVE
5880 && fcode != BUILT_IN_ALLOCA
5881 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5882 && fcode != BUILT_IN_FREE
5883 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5884 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5885 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5886 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5887 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5888 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5889 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5890 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5891 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5892 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5893 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5894 && fcode != BUILT_IN_CHKP_BNDRET)
5895 return expand_call (exp, target, ignore);
5896
5897 /* The built-in function expanders test for target == const0_rtx
5898 to determine whether the function's result will be ignored. */
5899 if (ignore)
5900 target = const0_rtx;
5901
5902 /* If the result of a pure or const built-in function is ignored, and
5903 none of its arguments are volatile, we can avoid expanding the
5904 built-in call and just evaluate the arguments for side-effects. */
5905 if (target == const0_rtx
5906 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5907 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5908 {
5909 bool volatilep = false;
5910 tree arg;
5911 call_expr_arg_iterator iter;
5912
5913 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5914 if (TREE_THIS_VOLATILE (arg))
5915 {
5916 volatilep = true;
5917 break;
5918 }
5919
5920 if (! volatilep)
5921 {
5922 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5923 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5924 return const0_rtx;
5925 }
5926 }
5927
5928 /* expand_builtin_with_bounds is supposed to be used for
5929 instrumented builtin calls. */
5930 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5931
5932 switch (fcode)
5933 {
5934 CASE_FLT_FN (BUILT_IN_FABS):
5935 case BUILT_IN_FABSD32:
5936 case BUILT_IN_FABSD64:
5937 case BUILT_IN_FABSD128:
5938 target = expand_builtin_fabs (exp, target, subtarget);
5939 if (target)
5940 return target;
5941 break;
5942
5943 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5944 target = expand_builtin_copysign (exp, target, subtarget);
5945 if (target)
5946 return target;
5947 break;
5948
5949 /* Just do a normal library call if we were unable to fold
5950 the values. */
5951 CASE_FLT_FN (BUILT_IN_CABS):
5952 break;
5953
5954 CASE_FLT_FN (BUILT_IN_EXP):
5955 CASE_FLT_FN (BUILT_IN_EXP10):
5956 CASE_FLT_FN (BUILT_IN_POW10):
5957 CASE_FLT_FN (BUILT_IN_EXP2):
5958 CASE_FLT_FN (BUILT_IN_EXPM1):
5959 CASE_FLT_FN (BUILT_IN_LOGB):
5960 CASE_FLT_FN (BUILT_IN_LOG):
5961 CASE_FLT_FN (BUILT_IN_LOG10):
5962 CASE_FLT_FN (BUILT_IN_LOG2):
5963 CASE_FLT_FN (BUILT_IN_LOG1P):
5964 CASE_FLT_FN (BUILT_IN_TAN):
5965 CASE_FLT_FN (BUILT_IN_ASIN):
5966 CASE_FLT_FN (BUILT_IN_ACOS):
5967 CASE_FLT_FN (BUILT_IN_ATAN):
5968 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5969 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5970 because of possible accuracy problems. */
5971 if (! flag_unsafe_math_optimizations)
5972 break;
5973 CASE_FLT_FN (BUILT_IN_SQRT):
5974 CASE_FLT_FN (BUILT_IN_FLOOR):
5975 CASE_FLT_FN (BUILT_IN_CEIL):
5976 CASE_FLT_FN (BUILT_IN_TRUNC):
5977 CASE_FLT_FN (BUILT_IN_ROUND):
5978 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5979 CASE_FLT_FN (BUILT_IN_RINT):
5980 target = expand_builtin_mathfn (exp, target, subtarget);
5981 if (target)
5982 return target;
5983 break;
5984
5985 CASE_FLT_FN (BUILT_IN_FMA):
5986 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5987 if (target)
5988 return target;
5989 break;
5990
5991 CASE_FLT_FN (BUILT_IN_ILOGB):
5992 if (! flag_unsafe_math_optimizations)
5993 break;
5994 CASE_FLT_FN (BUILT_IN_ISINF):
5995 CASE_FLT_FN (BUILT_IN_FINITE):
5996 case BUILT_IN_ISFINITE:
5997 case BUILT_IN_ISNORMAL:
5998 target = expand_builtin_interclass_mathfn (exp, target);
5999 if (target)
6000 return target;
6001 break;
6002
6003 CASE_FLT_FN (BUILT_IN_ICEIL):
6004 CASE_FLT_FN (BUILT_IN_LCEIL):
6005 CASE_FLT_FN (BUILT_IN_LLCEIL):
6006 CASE_FLT_FN (BUILT_IN_LFLOOR):
6007 CASE_FLT_FN (BUILT_IN_IFLOOR):
6008 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6009 target = expand_builtin_int_roundingfn (exp, target);
6010 if (target)
6011 return target;
6012 break;
6013
6014 CASE_FLT_FN (BUILT_IN_IRINT):
6015 CASE_FLT_FN (BUILT_IN_LRINT):
6016 CASE_FLT_FN (BUILT_IN_LLRINT):
6017 CASE_FLT_FN (BUILT_IN_IROUND):
6018 CASE_FLT_FN (BUILT_IN_LROUND):
6019 CASE_FLT_FN (BUILT_IN_LLROUND):
6020 target = expand_builtin_int_roundingfn_2 (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_POWI):
6026 target = expand_builtin_powi (exp, target);
6027 if (target)
6028 return target;
6029 break;
6030
6031 CASE_FLT_FN (BUILT_IN_ATAN2):
6032 CASE_FLT_FN (BUILT_IN_LDEXP):
6033 CASE_FLT_FN (BUILT_IN_SCALB):
6034 CASE_FLT_FN (BUILT_IN_SCALBN):
6035 CASE_FLT_FN (BUILT_IN_SCALBLN):
6036 if (! flag_unsafe_math_optimizations)
6037 break;
6038
6039 CASE_FLT_FN (BUILT_IN_FMOD):
6040 CASE_FLT_FN (BUILT_IN_REMAINDER):
6041 CASE_FLT_FN (BUILT_IN_DREM):
6042 CASE_FLT_FN (BUILT_IN_POW):
6043 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6044 if (target)
6045 return target;
6046 break;
6047
6048 CASE_FLT_FN (BUILT_IN_CEXPI):
6049 target = expand_builtin_cexpi (exp, target);
6050 gcc_assert (target);
6051 return target;
6052
6053 CASE_FLT_FN (BUILT_IN_SIN):
6054 CASE_FLT_FN (BUILT_IN_COS):
6055 if (! flag_unsafe_math_optimizations)
6056 break;
6057 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6058 if (target)
6059 return target;
6060 break;
6061
6062 CASE_FLT_FN (BUILT_IN_SINCOS):
6063 if (! flag_unsafe_math_optimizations)
6064 break;
6065 target = expand_builtin_sincos (exp);
6066 if (target)
6067 return target;
6068 break;
6069
6070 case BUILT_IN_APPLY_ARGS:
6071 return expand_builtin_apply_args ();
6072
6073 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6074 FUNCTION with a copy of the parameters described by
6075 ARGUMENTS, and ARGSIZE. It returns a block of memory
6076 allocated on the stack into which is stored all the registers
6077 that might possibly be used for returning the result of a
6078 function. ARGUMENTS is the value returned by
6079 __builtin_apply_args. ARGSIZE is the number of bytes of
6080 arguments that must be copied. ??? How should this value be
6081 computed? We'll also need a safe worst case value for varargs
6082 functions. */
6083 case BUILT_IN_APPLY:
6084 if (!validate_arglist (exp, POINTER_TYPE,
6085 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6086 && !validate_arglist (exp, REFERENCE_TYPE,
6087 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6088 return const0_rtx;
6089 else
6090 {
6091 rtx ops[3];
6092
6093 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6094 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6095 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6096
6097 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6098 }
6099
6100 /* __builtin_return (RESULT) causes the function to return the
6101 value described by RESULT. RESULT is address of the block of
6102 memory returned by __builtin_apply. */
6103 case BUILT_IN_RETURN:
6104 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6105 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6106 return const0_rtx;
6107
6108 case BUILT_IN_SAVEREGS:
6109 return expand_builtin_saveregs ();
6110
6111 case BUILT_IN_VA_ARG_PACK:
6112 /* All valid uses of __builtin_va_arg_pack () are removed during
6113 inlining. */
6114 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6115 return const0_rtx;
6116
6117 case BUILT_IN_VA_ARG_PACK_LEN:
6118 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6119 inlining. */
6120 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6121 return const0_rtx;
6122
6123 /* Return the address of the first anonymous stack arg. */
6124 case BUILT_IN_NEXT_ARG:
6125 if (fold_builtin_next_arg (exp, false))
6126 return const0_rtx;
6127 return expand_builtin_next_arg ();
6128
6129 case BUILT_IN_CLEAR_CACHE:
6130 target = expand_builtin___clear_cache (exp);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_CLASSIFY_TYPE:
6136 return expand_builtin_classify_type (exp);
6137
6138 case BUILT_IN_CONSTANT_P:
6139 return const0_rtx;
6140
6141 case BUILT_IN_FRAME_ADDRESS:
6142 case BUILT_IN_RETURN_ADDRESS:
6143 return expand_builtin_frame_address (fndecl, exp);
6144
6145 /* Returns the address of the area where the structure is returned.
6146 0 otherwise. */
6147 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6148 if (call_expr_nargs (exp) != 0
6149 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6150 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6151 return const0_rtx;
6152 else
6153 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6154
6155 case BUILT_IN_ALLOCA:
6156 case BUILT_IN_ALLOCA_WITH_ALIGN:
6157 /* If the allocation stems from the declaration of a variable-sized
6158 object, it cannot accumulate. */
6159 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6160 if (target)
6161 return target;
6162 break;
6163
6164 case BUILT_IN_STACK_SAVE:
6165 return expand_stack_save ();
6166
6167 case BUILT_IN_STACK_RESTORE:
6168 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6169 return const0_rtx;
6170
6171 case BUILT_IN_BSWAP16:
6172 case BUILT_IN_BSWAP32:
6173 case BUILT_IN_BSWAP64:
6174 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6175 if (target)
6176 return target;
6177 break;
6178
6179 CASE_INT_FN (BUILT_IN_FFS):
6180 target = expand_builtin_unop (target_mode, exp, target,
6181 subtarget, ffs_optab);
6182 if (target)
6183 return target;
6184 break;
6185
6186 CASE_INT_FN (BUILT_IN_CLZ):
6187 target = expand_builtin_unop (target_mode, exp, target,
6188 subtarget, clz_optab);
6189 if (target)
6190 return target;
6191 break;
6192
6193 CASE_INT_FN (BUILT_IN_CTZ):
6194 target = expand_builtin_unop (target_mode, exp, target,
6195 subtarget, ctz_optab);
6196 if (target)
6197 return target;
6198 break;
6199
6200 CASE_INT_FN (BUILT_IN_CLRSB):
6201 target = expand_builtin_unop (target_mode, exp, target,
6202 subtarget, clrsb_optab);
6203 if (target)
6204 return target;
6205 break;
6206
6207 CASE_INT_FN (BUILT_IN_POPCOUNT):
6208 target = expand_builtin_unop (target_mode, exp, target,
6209 subtarget, popcount_optab);
6210 if (target)
6211 return target;
6212 break;
6213
6214 CASE_INT_FN (BUILT_IN_PARITY):
6215 target = expand_builtin_unop (target_mode, exp, target,
6216 subtarget, parity_optab);
6217 if (target)
6218 return target;
6219 break;
6220
6221 case BUILT_IN_STRLEN:
6222 target = expand_builtin_strlen (exp, target, target_mode);
6223 if (target)
6224 return target;
6225 break;
6226
6227 case BUILT_IN_STRCPY:
6228 target = expand_builtin_strcpy (exp, target);
6229 if (target)
6230 return target;
6231 break;
6232
6233 case BUILT_IN_STRNCPY:
6234 target = expand_builtin_strncpy (exp, target);
6235 if (target)
6236 return target;
6237 break;
6238
6239 case BUILT_IN_STPCPY:
6240 target = expand_builtin_stpcpy (exp, target, mode);
6241 if (target)
6242 return target;
6243 break;
6244
6245 case BUILT_IN_MEMCPY:
6246 target = expand_builtin_memcpy (exp, target);
6247 if (target)
6248 return target;
6249 break;
6250
6251 case BUILT_IN_MEMPCPY:
6252 target = expand_builtin_mempcpy (exp, target, mode);
6253 if (target)
6254 return target;
6255 break;
6256
6257 case BUILT_IN_MEMSET:
6258 target = expand_builtin_memset (exp, target, mode);
6259 if (target)
6260 return target;
6261 break;
6262
6263 case BUILT_IN_BZERO:
6264 target = expand_builtin_bzero (exp);
6265 if (target)
6266 return target;
6267 break;
6268
6269 case BUILT_IN_STRCMP:
6270 target = expand_builtin_strcmp (exp, target);
6271 if (target)
6272 return target;
6273 break;
6274
6275 case BUILT_IN_STRNCMP:
6276 target = expand_builtin_strncmp (exp, target, mode);
6277 if (target)
6278 return target;
6279 break;
6280
6281 case BUILT_IN_BCMP:
6282 case BUILT_IN_MEMCMP:
6283 target = expand_builtin_memcmp (exp, target);
6284 if (target)
6285 return target;
6286 break;
6287
6288 case BUILT_IN_SETJMP:
6289 /* This should have been lowered to the builtins below. */
6290 gcc_unreachable ();
6291
6292 case BUILT_IN_SETJMP_SETUP:
6293 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6294 and the receiver label. */
6295 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6296 {
6297 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6298 VOIDmode, EXPAND_NORMAL);
6299 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6300 rtx_insn *label_r = label_rtx (label);
6301
6302 /* This is copied from the handling of non-local gotos. */
6303 expand_builtin_setjmp_setup (buf_addr, label_r);
6304 nonlocal_goto_handler_labels
6305 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6306 nonlocal_goto_handler_labels);
6307 /* ??? Do not let expand_label treat us as such since we would
6308 not want to be both on the list of non-local labels and on
6309 the list of forced labels. */
6310 FORCED_LABEL (label) = 0;
6311 return const0_rtx;
6312 }
6313 break;
6314
6315 case BUILT_IN_SETJMP_RECEIVER:
6316 /* __builtin_setjmp_receiver is passed the receiver label. */
6317 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6318 {
6319 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6320 rtx_insn *label_r = label_rtx (label);
6321
6322 expand_builtin_setjmp_receiver (label_r);
6323 return const0_rtx;
6324 }
6325 break;
6326
6327 /* __builtin_longjmp is passed a pointer to an array of five words.
6328 It's similar to the C library longjmp function but works with
6329 __builtin_setjmp above. */
6330 case BUILT_IN_LONGJMP:
6331 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6332 {
6333 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6334 VOIDmode, EXPAND_NORMAL);
6335 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6336
6337 if (value != const1_rtx)
6338 {
6339 error ("%<__builtin_longjmp%> second argument must be 1");
6340 return const0_rtx;
6341 }
6342
6343 expand_builtin_longjmp (buf_addr, value);
6344 return const0_rtx;
6345 }
6346 break;
6347
6348 case BUILT_IN_NONLOCAL_GOTO:
6349 target = expand_builtin_nonlocal_goto (exp);
6350 if (target)
6351 return target;
6352 break;
6353
6354 /* This updates the setjmp buffer that is its argument with the value
6355 of the current stack pointer. */
6356 case BUILT_IN_UPDATE_SETJMP_BUF:
6357 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6358 {
6359 rtx buf_addr
6360 = expand_normal (CALL_EXPR_ARG (exp, 0));
6361
6362 expand_builtin_update_setjmp_buf (buf_addr);
6363 return const0_rtx;
6364 }
6365 break;
6366
6367 case BUILT_IN_TRAP:
6368 expand_builtin_trap ();
6369 return const0_rtx;
6370
6371 case BUILT_IN_UNREACHABLE:
6372 expand_builtin_unreachable ();
6373 return const0_rtx;
6374
6375 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6376 case BUILT_IN_SIGNBITD32:
6377 case BUILT_IN_SIGNBITD64:
6378 case BUILT_IN_SIGNBITD128:
6379 target = expand_builtin_signbit (exp, target);
6380 if (target)
6381 return target;
6382 break;
6383
6384 /* Various hooks for the DWARF 2 __throw routine. */
6385 case BUILT_IN_UNWIND_INIT:
6386 expand_builtin_unwind_init ();
6387 return const0_rtx;
6388 case BUILT_IN_DWARF_CFA:
6389 return virtual_cfa_rtx;
6390 #ifdef DWARF2_UNWIND_INFO
6391 case BUILT_IN_DWARF_SP_COLUMN:
6392 return expand_builtin_dwarf_sp_column ();
6393 case BUILT_IN_INIT_DWARF_REG_SIZES:
6394 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6395 return const0_rtx;
6396 #endif
6397 case BUILT_IN_FROB_RETURN_ADDR:
6398 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6399 case BUILT_IN_EXTRACT_RETURN_ADDR:
6400 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6401 case BUILT_IN_EH_RETURN:
6402 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6403 CALL_EXPR_ARG (exp, 1));
6404 return const0_rtx;
6405 case BUILT_IN_EH_RETURN_DATA_REGNO:
6406 return expand_builtin_eh_return_data_regno (exp);
6407 case BUILT_IN_EXTEND_POINTER:
6408 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6409 case BUILT_IN_EH_POINTER:
6410 return expand_builtin_eh_pointer (exp);
6411 case BUILT_IN_EH_FILTER:
6412 return expand_builtin_eh_filter (exp);
6413 case BUILT_IN_EH_COPY_VALUES:
6414 return expand_builtin_eh_copy_values (exp);
6415
6416 case BUILT_IN_VA_START:
6417 return expand_builtin_va_start (exp);
6418 case BUILT_IN_VA_END:
6419 return expand_builtin_va_end (exp);
6420 case BUILT_IN_VA_COPY:
6421 return expand_builtin_va_copy (exp);
6422 case BUILT_IN_EXPECT:
6423 return expand_builtin_expect (exp, target);
6424 case BUILT_IN_ASSUME_ALIGNED:
6425 return expand_builtin_assume_aligned (exp, target);
6426 case BUILT_IN_PREFETCH:
6427 expand_builtin_prefetch (exp);
6428 return const0_rtx;
6429
6430 case BUILT_IN_INIT_TRAMPOLINE:
6431 return expand_builtin_init_trampoline (exp, true);
6432 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6433 return expand_builtin_init_trampoline (exp, false);
6434 case BUILT_IN_ADJUST_TRAMPOLINE:
6435 return expand_builtin_adjust_trampoline (exp);
6436
6437 case BUILT_IN_FORK:
6438 case BUILT_IN_EXECL:
6439 case BUILT_IN_EXECV:
6440 case BUILT_IN_EXECLP:
6441 case BUILT_IN_EXECLE:
6442 case BUILT_IN_EXECVP:
6443 case BUILT_IN_EXECVE:
6444 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6445 if (target)
6446 return target;
6447 break;
6448
6449 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6450 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6451 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6452 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6453 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6455 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6456 if (target)
6457 return target;
6458 break;
6459
6460 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6461 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6462 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6463 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6464 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6466 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6472 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6473 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6474 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6475 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6477 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6483 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6484 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6485 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6486 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6488 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6489 if (target)
6490 return target;
6491 break;
6492
6493 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6494 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6495 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6496 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6497 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6499 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6510 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6516 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6517 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6518 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6519 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6521 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6527 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6528 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6529 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6530 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6532 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6538 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6539 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6540 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6541 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6543 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6549 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6550 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6551 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6552 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6560 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6561 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6562 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6563 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6572 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6573 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6582 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6583 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6584 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6585 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6586 if (mode == VOIDmode)
6587 mode = TYPE_MODE (boolean_type_node);
6588 if (!target || !register_operand (target, mode))
6589 target = gen_reg_rtx (mode);
6590
6591 mode = get_builtin_sync_mode
6592 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6593 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6594 if (target)
6595 return target;
6596 break;
6597
6598 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6599 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6600 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6601 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6602 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6603 mode = get_builtin_sync_mode
6604 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6605 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6606 if (target)
6607 return target;
6608 break;
6609
6610 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6611 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6616 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6617 if (target)
6618 return target;
6619 break;
6620
6621 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6622 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6623 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6624 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6625 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6626 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6627 expand_builtin_sync_lock_release (mode, exp);
6628 return const0_rtx;
6629
6630 case BUILT_IN_SYNC_SYNCHRONIZE:
6631 expand_builtin_sync_synchronize ();
6632 return const0_rtx;
6633
6634 case BUILT_IN_ATOMIC_EXCHANGE_1:
6635 case BUILT_IN_ATOMIC_EXCHANGE_2:
6636 case BUILT_IN_ATOMIC_EXCHANGE_4:
6637 case BUILT_IN_ATOMIC_EXCHANGE_8:
6638 case BUILT_IN_ATOMIC_EXCHANGE_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6640 target = expand_builtin_atomic_exchange (mode, exp, target);
6641 if (target)
6642 return target;
6643 break;
6644
6645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6649 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6650 {
6651 unsigned int nargs, z;
6652 vec<tree, va_gc> *vec;
6653
6654 mode =
6655 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6656 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6657 if (target)
6658 return target;
6659
6660 /* If this is turned into an external library call, the weak parameter
6661 must be dropped to match the expected parameter list. */
6662 nargs = call_expr_nargs (exp);
6663 vec_alloc (vec, nargs - 1);
6664 for (z = 0; z < 3; z++)
6665 vec->quick_push (CALL_EXPR_ARG (exp, z));
6666 /* Skip the boolean weak parameter. */
6667 for (z = 4; z < 6; z++)
6668 vec->quick_push (CALL_EXPR_ARG (exp, z));
6669 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6670 break;
6671 }
6672
6673 case BUILT_IN_ATOMIC_LOAD_1:
6674 case BUILT_IN_ATOMIC_LOAD_2:
6675 case BUILT_IN_ATOMIC_LOAD_4:
6676 case BUILT_IN_ATOMIC_LOAD_8:
6677 case BUILT_IN_ATOMIC_LOAD_16:
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6679 target = expand_builtin_atomic_load (mode, exp, target);
6680 if (target)
6681 return target;
6682 break;
6683
6684 case BUILT_IN_ATOMIC_STORE_1:
6685 case BUILT_IN_ATOMIC_STORE_2:
6686 case BUILT_IN_ATOMIC_STORE_4:
6687 case BUILT_IN_ATOMIC_STORE_8:
6688 case BUILT_IN_ATOMIC_STORE_16:
6689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6690 target = expand_builtin_atomic_store (mode, exp);
6691 if (target)
6692 return const0_rtx;
6693 break;
6694
6695 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6696 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6697 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6698 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6699 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6700 {
6701 enum built_in_function lib;
6702 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6703 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6704 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6705 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6706 ignore, lib);
6707 if (target)
6708 return target;
6709 break;
6710 }
6711 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6712 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6713 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6714 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6715 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6716 {
6717 enum built_in_function lib;
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6719 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6720 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6721 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6722 ignore, lib);
6723 if (target)
6724 return target;
6725 break;
6726 }
6727 case BUILT_IN_ATOMIC_AND_FETCH_1:
6728 case BUILT_IN_ATOMIC_AND_FETCH_2:
6729 case BUILT_IN_ATOMIC_AND_FETCH_4:
6730 case BUILT_IN_ATOMIC_AND_FETCH_8:
6731 case BUILT_IN_ATOMIC_AND_FETCH_16:
6732 {
6733 enum built_in_function lib;
6734 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6735 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6736 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6738 ignore, lib);
6739 if (target)
6740 return target;
6741 break;
6742 }
6743 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6744 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6745 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6746 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6747 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6748 {
6749 enum built_in_function lib;
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6751 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6752 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6753 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6754 ignore, lib);
6755 if (target)
6756 return target;
6757 break;
6758 }
6759 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6760 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6761 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6762 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6763 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6764 {
6765 enum built_in_function lib;
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6767 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6768 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6769 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6770 ignore, lib);
6771 if (target)
6772 return target;
6773 break;
6774 }
6775 case BUILT_IN_ATOMIC_OR_FETCH_1:
6776 case BUILT_IN_ATOMIC_OR_FETCH_2:
6777 case BUILT_IN_ATOMIC_OR_FETCH_4:
6778 case BUILT_IN_ATOMIC_OR_FETCH_8:
6779 case BUILT_IN_ATOMIC_OR_FETCH_16:
6780 {
6781 enum built_in_function lib;
6782 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6783 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6784 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6785 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6786 ignore, lib);
6787 if (target)
6788 return target;
6789 break;
6790 }
6791 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6792 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6793 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6794 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6795 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6798 ignore, BUILT_IN_NONE);
6799 if (target)
6800 return target;
6801 break;
6802
6803 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6804 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6805 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6806 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6807 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6809 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6810 ignore, BUILT_IN_NONE);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_ATOMIC_FETCH_AND_1:
6816 case BUILT_IN_ATOMIC_FETCH_AND_2:
6817 case BUILT_IN_ATOMIC_FETCH_AND_4:
6818 case BUILT_IN_ATOMIC_FETCH_AND_8:
6819 case BUILT_IN_ATOMIC_FETCH_AND_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6821 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6822 ignore, BUILT_IN_NONE);
6823 if (target)
6824 return target;
6825 break;
6826
6827 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6828 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6829 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6830 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6831 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6833 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6834 ignore, BUILT_IN_NONE);
6835 if (target)
6836 return target;
6837 break;
6838
6839 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6840 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6841 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6842 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6843 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6845 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6846 ignore, BUILT_IN_NONE);
6847 if (target)
6848 return target;
6849 break;
6850
6851 case BUILT_IN_ATOMIC_FETCH_OR_1:
6852 case BUILT_IN_ATOMIC_FETCH_OR_2:
6853 case BUILT_IN_ATOMIC_FETCH_OR_4:
6854 case BUILT_IN_ATOMIC_FETCH_OR_8:
6855 case BUILT_IN_ATOMIC_FETCH_OR_16:
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6857 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6858 ignore, BUILT_IN_NONE);
6859 if (target)
6860 return target;
6861 break;
6862
6863 case BUILT_IN_ATOMIC_TEST_AND_SET:
6864 return expand_builtin_atomic_test_and_set (exp, target);
6865
6866 case BUILT_IN_ATOMIC_CLEAR:
6867 return expand_builtin_atomic_clear (exp);
6868
6869 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6870 return expand_builtin_atomic_always_lock_free (exp);
6871
6872 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6873 target = expand_builtin_atomic_is_lock_free (exp);
6874 if (target)
6875 return target;
6876 break;
6877
6878 case BUILT_IN_ATOMIC_THREAD_FENCE:
6879 expand_builtin_atomic_thread_fence (exp);
6880 return const0_rtx;
6881
6882 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6883 expand_builtin_atomic_signal_fence (exp);
6884 return const0_rtx;
6885
6886 case BUILT_IN_OBJECT_SIZE:
6887 return expand_builtin_object_size (exp);
6888
6889 case BUILT_IN_MEMCPY_CHK:
6890 case BUILT_IN_MEMPCPY_CHK:
6891 case BUILT_IN_MEMMOVE_CHK:
6892 case BUILT_IN_MEMSET_CHK:
6893 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6894 if (target)
6895 return target;
6896 break;
6897
6898 case BUILT_IN_STRCPY_CHK:
6899 case BUILT_IN_STPCPY_CHK:
6900 case BUILT_IN_STRNCPY_CHK:
6901 case BUILT_IN_STPNCPY_CHK:
6902 case BUILT_IN_STRCAT_CHK:
6903 case BUILT_IN_STRNCAT_CHK:
6904 case BUILT_IN_SNPRINTF_CHK:
6905 case BUILT_IN_VSNPRINTF_CHK:
6906 maybe_emit_chk_warning (exp, fcode);
6907 break;
6908
6909 case BUILT_IN_SPRINTF_CHK:
6910 case BUILT_IN_VSPRINTF_CHK:
6911 maybe_emit_sprintf_chk_warning (exp, fcode);
6912 break;
6913
6914 case BUILT_IN_FREE:
6915 if (warn_free_nonheap_object)
6916 maybe_emit_free_warning (exp);
6917 break;
6918
6919 case BUILT_IN_THREAD_POINTER:
6920 return expand_builtin_thread_pointer (exp, target);
6921
6922 case BUILT_IN_SET_THREAD_POINTER:
6923 expand_builtin_set_thread_pointer (exp);
6924 return const0_rtx;
6925
6926 case BUILT_IN_CILK_DETACH:
6927 expand_builtin_cilk_detach (exp);
6928 return const0_rtx;
6929
6930 case BUILT_IN_CILK_POP_FRAME:
6931 expand_builtin_cilk_pop_frame (exp);
6932 return const0_rtx;
6933
6934 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6935 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6937 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6938 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6939 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6940 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6941 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6942 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6943 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6944 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6945 /* We allow user CHKP builtins if Pointer Bounds
6946 Checker is off. */
6947 if (!chkp_function_instrumented_p (current_function_decl))
6948 {
6949 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6950 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6951 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6952 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6953 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6954 return expand_normal (CALL_EXPR_ARG (exp, 0));
6955 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6956 return expand_normal (size_zero_node);
6957 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6958 return expand_normal (size_int (-1));
6959 else
6960 return const0_rtx;
6961 }
6962 /* FALLTHROUGH */
6963
6964 case BUILT_IN_CHKP_BNDMK:
6965 case BUILT_IN_CHKP_BNDSTX:
6966 case BUILT_IN_CHKP_BNDCL:
6967 case BUILT_IN_CHKP_BNDCU:
6968 case BUILT_IN_CHKP_BNDLDX:
6969 case BUILT_IN_CHKP_BNDRET:
6970 case BUILT_IN_CHKP_INTERSECT:
6971 case BUILT_IN_CHKP_NARROW:
6972 case BUILT_IN_CHKP_EXTRACT_LOWER:
6973 case BUILT_IN_CHKP_EXTRACT_UPPER:
6974 /* Software implementation of Pointer Bounds Checker is NYI.
6975 Target support is required. */
6976 error ("Your target platform does not support -fcheck-pointer-bounds");
6977 break;
6978
6979 case BUILT_IN_ACC_ON_DEVICE:
6980 /* Do library call, if we failed to expand the builtin when
6981 folding. */
6982 break;
6983
6984 default: /* just do library call, if unknown builtin */
6985 break;
6986 }
6987
6988 /* The switch statement above can drop through to cause the function
6989 to be called normally. */
6990 return expand_call (exp, target, ignore);
6991 }
6992
6993 /* Similar to expand_builtin but is used for instrumented calls. */
6994
6995 rtx
6996 expand_builtin_with_bounds (tree exp, rtx target,
6997 rtx subtarget ATTRIBUTE_UNUSED,
6998 machine_mode mode, int ignore)
6999 {
7000 tree fndecl = get_callee_fndecl (exp);
7001 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7002
7003 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7004
7005 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7006 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7007
7008 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7009 && fcode < END_CHKP_BUILTINS);
7010
7011 switch (fcode)
7012 {
7013 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7014 target = expand_builtin_memcpy_with_bounds (exp, target);
7015 if (target)
7016 return target;
7017 break;
7018
7019 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7020 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7021 if (target)
7022 return target;
7023 break;
7024
7025 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7026 target = expand_builtin_memset_with_bounds (exp, target, mode);
7027 if (target)
7028 return target;
7029 break;
7030
7031 default:
7032 break;
7033 }
7034
7035 /* The switch statement above can drop through to cause the function
7036 to be called normally. */
7037 return expand_call (exp, target, ignore);
7038 }
7039
7040 /* Determine whether a tree node represents a call to a built-in
7041 function. If the tree T is a call to a built-in function with
7042 the right number of arguments of the appropriate types, return
7043 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7044 Otherwise the return value is END_BUILTINS. */
7045
7046 enum built_in_function
7047 builtin_mathfn_code (const_tree t)
7048 {
7049 const_tree fndecl, arg, parmlist;
7050 const_tree argtype, parmtype;
7051 const_call_expr_arg_iterator iter;
7052
7053 if (TREE_CODE (t) != CALL_EXPR
7054 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7055 return END_BUILTINS;
7056
7057 fndecl = get_callee_fndecl (t);
7058 if (fndecl == NULL_TREE
7059 || TREE_CODE (fndecl) != FUNCTION_DECL
7060 || ! DECL_BUILT_IN (fndecl)
7061 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7062 return END_BUILTINS;
7063
7064 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7065 init_const_call_expr_arg_iterator (t, &iter);
7066 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7067 {
7068 /* If a function doesn't take a variable number of arguments,
7069 the last element in the list will have type `void'. */
7070 parmtype = TREE_VALUE (parmlist);
7071 if (VOID_TYPE_P (parmtype))
7072 {
7073 if (more_const_call_expr_args_p (&iter))
7074 return END_BUILTINS;
7075 return DECL_FUNCTION_CODE (fndecl);
7076 }
7077
7078 if (! more_const_call_expr_args_p (&iter))
7079 return END_BUILTINS;
7080
7081 arg = next_const_call_expr_arg (&iter);
7082 argtype = TREE_TYPE (arg);
7083
7084 if (SCALAR_FLOAT_TYPE_P (parmtype))
7085 {
7086 if (! SCALAR_FLOAT_TYPE_P (argtype))
7087 return END_BUILTINS;
7088 }
7089 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7090 {
7091 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7092 return END_BUILTINS;
7093 }
7094 else if (POINTER_TYPE_P (parmtype))
7095 {
7096 if (! POINTER_TYPE_P (argtype))
7097 return END_BUILTINS;
7098 }
7099 else if (INTEGRAL_TYPE_P (parmtype))
7100 {
7101 if (! INTEGRAL_TYPE_P (argtype))
7102 return END_BUILTINS;
7103 }
7104 else
7105 return END_BUILTINS;
7106 }
7107
7108 /* Variable-length argument list. */
7109 return DECL_FUNCTION_CODE (fndecl);
7110 }
7111
7112 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7113 evaluate to a constant. */
7114
7115 static tree
7116 fold_builtin_constant_p (tree arg)
7117 {
7118 /* We return 1 for a numeric type that's known to be a constant
7119 value at compile-time or for an aggregate type that's a
7120 literal constant. */
7121 STRIP_NOPS (arg);
7122
7123 /* If we know this is a constant, emit the constant of one. */
7124 if (CONSTANT_CLASS_P (arg)
7125 || (TREE_CODE (arg) == CONSTRUCTOR
7126 && TREE_CONSTANT (arg)))
7127 return integer_one_node;
7128 if (TREE_CODE (arg) == ADDR_EXPR)
7129 {
7130 tree op = TREE_OPERAND (arg, 0);
7131 if (TREE_CODE (op) == STRING_CST
7132 || (TREE_CODE (op) == ARRAY_REF
7133 && integer_zerop (TREE_OPERAND (op, 1))
7134 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7135 return integer_one_node;
7136 }
7137
7138 /* If this expression has side effects, show we don't know it to be a
7139 constant. Likewise if it's a pointer or aggregate type since in
7140 those case we only want literals, since those are only optimized
7141 when generating RTL, not later.
7142 And finally, if we are compiling an initializer, not code, we
7143 need to return a definite result now; there's not going to be any
7144 more optimization done. */
7145 if (TREE_SIDE_EFFECTS (arg)
7146 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7147 || POINTER_TYPE_P (TREE_TYPE (arg))
7148 || cfun == 0
7149 || folding_initializer
7150 || force_folding_builtin_constant_p)
7151 return integer_zero_node;
7152
7153 return NULL_TREE;
7154 }
7155
7156 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7157 return it as a truthvalue. */
7158
7159 static tree
7160 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7161 tree predictor)
7162 {
7163 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7164
7165 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7166 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7167 ret_type = TREE_TYPE (TREE_TYPE (fn));
7168 pred_type = TREE_VALUE (arg_types);
7169 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7170
7171 pred = fold_convert_loc (loc, pred_type, pred);
7172 expected = fold_convert_loc (loc, expected_type, expected);
7173 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7174 predictor);
7175
7176 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7177 build_int_cst (ret_type, 0));
7178 }
7179
7180 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7181 NULL_TREE if no simplification is possible. */
7182
7183 tree
7184 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7185 {
7186 tree inner, fndecl, inner_arg0;
7187 enum tree_code code;
7188
7189 /* Distribute the expected value over short-circuiting operators.
7190 See through the cast from truthvalue_type_node to long. */
7191 inner_arg0 = arg0;
7192 while (CONVERT_EXPR_P (inner_arg0)
7193 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7194 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7195 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7196
7197 /* If this is a builtin_expect within a builtin_expect keep the
7198 inner one. See through a comparison against a constant. It
7199 might have been added to create a thruthvalue. */
7200 inner = inner_arg0;
7201
7202 if (COMPARISON_CLASS_P (inner)
7203 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7204 inner = TREE_OPERAND (inner, 0);
7205
7206 if (TREE_CODE (inner) == CALL_EXPR
7207 && (fndecl = get_callee_fndecl (inner))
7208 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7209 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7210 return arg0;
7211
7212 inner = inner_arg0;
7213 code = TREE_CODE (inner);
7214 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7215 {
7216 tree op0 = TREE_OPERAND (inner, 0);
7217 tree op1 = TREE_OPERAND (inner, 1);
7218
7219 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7220 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7221 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7222
7223 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7224 }
7225
7226 /* If the argument isn't invariant then there's nothing else we can do. */
7227 if (!TREE_CONSTANT (inner_arg0))
7228 return NULL_TREE;
7229
7230 /* If we expect that a comparison against the argument will fold to
7231 a constant return the constant. In practice, this means a true
7232 constant or the address of a non-weak symbol. */
7233 inner = inner_arg0;
7234 STRIP_NOPS (inner);
7235 if (TREE_CODE (inner) == ADDR_EXPR)
7236 {
7237 do
7238 {
7239 inner = TREE_OPERAND (inner, 0);
7240 }
7241 while (TREE_CODE (inner) == COMPONENT_REF
7242 || TREE_CODE (inner) == ARRAY_REF);
7243 if ((TREE_CODE (inner) == VAR_DECL
7244 || TREE_CODE (inner) == FUNCTION_DECL)
7245 && DECL_WEAK (inner))
7246 return NULL_TREE;
7247 }
7248
7249 /* Otherwise, ARG0 already has the proper type for the return value. */
7250 return arg0;
7251 }
7252
7253 /* Fold a call to __builtin_classify_type with argument ARG. */
7254
7255 static tree
7256 fold_builtin_classify_type (tree arg)
7257 {
7258 if (arg == 0)
7259 return build_int_cst (integer_type_node, no_type_class);
7260
7261 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7262 }
7263
7264 /* Fold a call to __builtin_strlen with argument ARG. */
7265
7266 static tree
7267 fold_builtin_strlen (location_t loc, tree type, tree arg)
7268 {
7269 if (!validate_arg (arg, POINTER_TYPE))
7270 return NULL_TREE;
7271 else
7272 {
7273 tree len = c_strlen (arg, 0);
7274
7275 if (len)
7276 return fold_convert_loc (loc, type, len);
7277
7278 return NULL_TREE;
7279 }
7280 }
7281
7282 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7283
7284 static tree
7285 fold_builtin_inf (location_t loc, tree type, int warn)
7286 {
7287 REAL_VALUE_TYPE real;
7288
7289 /* __builtin_inff is intended to be usable to define INFINITY on all
7290 targets. If an infinity is not available, INFINITY expands "to a
7291 positive constant of type float that overflows at translation
7292 time", footnote "In this case, using INFINITY will violate the
7293 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7294 Thus we pedwarn to ensure this constraint violation is
7295 diagnosed. */
7296 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7297 pedwarn (loc, 0, "target format does not support infinity");
7298
7299 real_inf (&real);
7300 return build_real (type, real);
7301 }
7302
7303 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7304
7305 static tree
7306 fold_builtin_nan (tree arg, tree type, int quiet)
7307 {
7308 REAL_VALUE_TYPE real;
7309 const char *str;
7310
7311 if (!validate_arg (arg, POINTER_TYPE))
7312 return NULL_TREE;
7313 str = c_getstr (arg);
7314 if (!str)
7315 return NULL_TREE;
7316
7317 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7318 return NULL_TREE;
7319
7320 return build_real (type, real);
7321 }
7322
7323 /* Return true if the floating point expression T has an integer value.
7324 We also allow +Inf, -Inf and NaN to be considered integer values. */
7325
7326 static bool
7327 integer_valued_real_p (tree t)
7328 {
7329 switch (TREE_CODE (t))
7330 {
7331 case FLOAT_EXPR:
7332 return true;
7333
7334 case ABS_EXPR:
7335 case SAVE_EXPR:
7336 return integer_valued_real_p (TREE_OPERAND (t, 0));
7337
7338 case COMPOUND_EXPR:
7339 case MODIFY_EXPR:
7340 case BIND_EXPR:
7341 return integer_valued_real_p (TREE_OPERAND (t, 1));
7342
7343 case PLUS_EXPR:
7344 case MINUS_EXPR:
7345 case MULT_EXPR:
7346 case MIN_EXPR:
7347 case MAX_EXPR:
7348 return integer_valued_real_p (TREE_OPERAND (t, 0))
7349 && integer_valued_real_p (TREE_OPERAND (t, 1));
7350
7351 case COND_EXPR:
7352 return integer_valued_real_p (TREE_OPERAND (t, 1))
7353 && integer_valued_real_p (TREE_OPERAND (t, 2));
7354
7355 case REAL_CST:
7356 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7357
7358 CASE_CONVERT:
7359 {
7360 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7361 if (TREE_CODE (type) == INTEGER_TYPE)
7362 return true;
7363 if (TREE_CODE (type) == REAL_TYPE)
7364 return integer_valued_real_p (TREE_OPERAND (t, 0));
7365 break;
7366 }
7367
7368 case CALL_EXPR:
7369 switch (builtin_mathfn_code (t))
7370 {
7371 CASE_FLT_FN (BUILT_IN_CEIL):
7372 CASE_FLT_FN (BUILT_IN_FLOOR):
7373 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7374 CASE_FLT_FN (BUILT_IN_RINT):
7375 CASE_FLT_FN (BUILT_IN_ROUND):
7376 CASE_FLT_FN (BUILT_IN_TRUNC):
7377 return true;
7378
7379 CASE_FLT_FN (BUILT_IN_FMIN):
7380 CASE_FLT_FN (BUILT_IN_FMAX):
7381 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7382 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7383
7384 default:
7385 break;
7386 }
7387 break;
7388
7389 default:
7390 break;
7391 }
7392 return false;
7393 }
7394
7395 /* FNDECL is assumed to be a builtin where truncation can be propagated
7396 across (for instance floor((double)f) == (double)floorf (f).
7397 Do the transformation for a call with argument ARG. */
7398
7399 static tree
7400 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7401 {
7402 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7403
7404 if (!validate_arg (arg, REAL_TYPE))
7405 return NULL_TREE;
7406
7407 /* Integer rounding functions are idempotent. */
7408 if (fcode == builtin_mathfn_code (arg))
7409 return arg;
7410
7411 /* If argument is already integer valued, and we don't need to worry
7412 about setting errno, there's no need to perform rounding. */
7413 if (! flag_errno_math && integer_valued_real_p (arg))
7414 return arg;
7415
7416 if (optimize)
7417 {
7418 tree arg0 = strip_float_extensions (arg);
7419 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7420 tree newtype = TREE_TYPE (arg0);
7421 tree decl;
7422
7423 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7424 && (decl = mathfn_built_in (newtype, fcode)))
7425 return fold_convert_loc (loc, ftype,
7426 build_call_expr_loc (loc, decl, 1,
7427 fold_convert_loc (loc,
7428 newtype,
7429 arg0)));
7430 }
7431 return NULL_TREE;
7432 }
7433
7434 /* FNDECL is assumed to be builtin which can narrow the FP type of
7435 the argument, for instance lround((double)f) -> lroundf (f).
7436 Do the transformation for a call with argument ARG. */
7437
7438 static tree
7439 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7440 {
7441 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7442
7443 if (!validate_arg (arg, REAL_TYPE))
7444 return NULL_TREE;
7445
7446 /* If argument is already integer valued, and we don't need to worry
7447 about setting errno, there's no need to perform rounding. */
7448 if (! flag_errno_math && integer_valued_real_p (arg))
7449 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7450 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7451
7452 if (optimize)
7453 {
7454 tree ftype = TREE_TYPE (arg);
7455 tree arg0 = strip_float_extensions (arg);
7456 tree newtype = TREE_TYPE (arg0);
7457 tree decl;
7458
7459 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7460 && (decl = mathfn_built_in (newtype, fcode)))
7461 return build_call_expr_loc (loc, decl, 1,
7462 fold_convert_loc (loc, newtype, arg0));
7463 }
7464
7465 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7466 sizeof (int) == sizeof (long). */
7467 if (TYPE_PRECISION (integer_type_node)
7468 == TYPE_PRECISION (long_integer_type_node))
7469 {
7470 tree newfn = NULL_TREE;
7471 switch (fcode)
7472 {
7473 CASE_FLT_FN (BUILT_IN_ICEIL):
7474 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7475 break;
7476
7477 CASE_FLT_FN (BUILT_IN_IFLOOR):
7478 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7479 break;
7480
7481 CASE_FLT_FN (BUILT_IN_IROUND):
7482 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7483 break;
7484
7485 CASE_FLT_FN (BUILT_IN_IRINT):
7486 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7487 break;
7488
7489 default:
7490 break;
7491 }
7492
7493 if (newfn)
7494 {
7495 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7496 return fold_convert_loc (loc,
7497 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7498 }
7499 }
7500
7501 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7502 sizeof (long long) == sizeof (long). */
7503 if (TYPE_PRECISION (long_long_integer_type_node)
7504 == TYPE_PRECISION (long_integer_type_node))
7505 {
7506 tree newfn = NULL_TREE;
7507 switch (fcode)
7508 {
7509 CASE_FLT_FN (BUILT_IN_LLCEIL):
7510 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7511 break;
7512
7513 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7514 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7515 break;
7516
7517 CASE_FLT_FN (BUILT_IN_LLROUND):
7518 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7519 break;
7520
7521 CASE_FLT_FN (BUILT_IN_LLRINT):
7522 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7523 break;
7524
7525 default:
7526 break;
7527 }
7528
7529 if (newfn)
7530 {
7531 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7532 return fold_convert_loc (loc,
7533 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7534 }
7535 }
7536
7537 return NULL_TREE;
7538 }
7539
7540 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7541 NULL_TREE if no simplification can be made. */
7542
7543 static tree
7544 fold_builtin_sincos (location_t loc,
7545 tree arg0, tree arg1, tree arg2)
7546 {
7547 tree type;
7548 tree res, fn, call;
7549
7550 if (!validate_arg (arg0, REAL_TYPE)
7551 || !validate_arg (arg1, POINTER_TYPE)
7552 || !validate_arg (arg2, POINTER_TYPE))
7553 return NULL_TREE;
7554
7555 type = TREE_TYPE (arg0);
7556
7557 /* Calculate the result when the argument is a constant. */
7558 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7559 return res;
7560
7561 /* Canonicalize sincos to cexpi. */
7562 if (!targetm.libc_has_function (function_c99_math_complex))
7563 return NULL_TREE;
7564 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7565 if (!fn)
7566 return NULL_TREE;
7567
7568 call = build_call_expr_loc (loc, fn, 1, arg0);
7569 call = builtin_save_expr (call);
7570
7571 return build2 (COMPOUND_EXPR, void_type_node,
7572 build2 (MODIFY_EXPR, void_type_node,
7573 build_fold_indirect_ref_loc (loc, arg1),
7574 build1 (IMAGPART_EXPR, type, call)),
7575 build2 (MODIFY_EXPR, void_type_node,
7576 build_fold_indirect_ref_loc (loc, arg2),
7577 build1 (REALPART_EXPR, type, call)));
7578 }
7579
7580 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7581 NULL_TREE if no simplification can be made. */
7582
7583 static tree
7584 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7585 {
7586 tree rtype;
7587 tree realp, imagp, ifn;
7588 tree res;
7589
7590 if (!validate_arg (arg0, COMPLEX_TYPE)
7591 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7592 return NULL_TREE;
7593
7594 /* Calculate the result when the argument is a constant. */
7595 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7596 return res;
7597
7598 rtype = TREE_TYPE (TREE_TYPE (arg0));
7599
7600 /* In case we can figure out the real part of arg0 and it is constant zero
7601 fold to cexpi. */
7602 if (!targetm.libc_has_function (function_c99_math_complex))
7603 return NULL_TREE;
7604 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7605 if (!ifn)
7606 return NULL_TREE;
7607
7608 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7609 && real_zerop (realp))
7610 {
7611 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7612 return build_call_expr_loc (loc, ifn, 1, narg);
7613 }
7614
7615 /* In case we can easily decompose real and imaginary parts split cexp
7616 to exp (r) * cexpi (i). */
7617 if (flag_unsafe_math_optimizations
7618 && realp)
7619 {
7620 tree rfn, rcall, icall;
7621
7622 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7623 if (!rfn)
7624 return NULL_TREE;
7625
7626 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7627 if (!imagp)
7628 return NULL_TREE;
7629
7630 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7631 icall = builtin_save_expr (icall);
7632 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7633 rcall = builtin_save_expr (rcall);
7634 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7635 fold_build2_loc (loc, MULT_EXPR, rtype,
7636 rcall,
7637 fold_build1_loc (loc, REALPART_EXPR,
7638 rtype, icall)),
7639 fold_build2_loc (loc, MULT_EXPR, rtype,
7640 rcall,
7641 fold_build1_loc (loc, IMAGPART_EXPR,
7642 rtype, icall)));
7643 }
7644
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7649 Return NULL_TREE if no simplification can be made. */
7650
7651 static tree
7652 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7653 {
7654 if (!validate_arg (arg, REAL_TYPE))
7655 return NULL_TREE;
7656
7657 /* Optimize trunc of constant value. */
7658 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7659 {
7660 REAL_VALUE_TYPE r, x;
7661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7662
7663 x = TREE_REAL_CST (arg);
7664 real_trunc (&r, TYPE_MODE (type), &x);
7665 return build_real (type, r);
7666 }
7667
7668 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7669 }
7670
7671 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7672 Return NULL_TREE if no simplification can be made. */
7673
7674 static tree
7675 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7676 {
7677 if (!validate_arg (arg, REAL_TYPE))
7678 return NULL_TREE;
7679
7680 /* Optimize floor of constant value. */
7681 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7682 {
7683 REAL_VALUE_TYPE x;
7684
7685 x = TREE_REAL_CST (arg);
7686 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7687 {
7688 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7689 REAL_VALUE_TYPE r;
7690
7691 real_floor (&r, TYPE_MODE (type), &x);
7692 return build_real (type, r);
7693 }
7694 }
7695
7696 /* Fold floor (x) where x is nonnegative to trunc (x). */
7697 if (tree_expr_nonnegative_p (arg))
7698 {
7699 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7700 if (truncfn)
7701 return build_call_expr_loc (loc, truncfn, 1, arg);
7702 }
7703
7704 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7705 }
7706
7707 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7708 Return NULL_TREE if no simplification can be made. */
7709
7710 static tree
7711 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7712 {
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7715
7716 /* Optimize ceil of constant value. */
7717 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7718 {
7719 REAL_VALUE_TYPE x;
7720
7721 x = TREE_REAL_CST (arg);
7722 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7723 {
7724 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7725 REAL_VALUE_TYPE r;
7726
7727 real_ceil (&r, TYPE_MODE (type), &x);
7728 return build_real (type, r);
7729 }
7730 }
7731
7732 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7733 }
7734
7735 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7736 Return NULL_TREE if no simplification can be made. */
7737
7738 static tree
7739 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7740 {
7741 if (!validate_arg (arg, REAL_TYPE))
7742 return NULL_TREE;
7743
7744 /* Optimize round of constant value. */
7745 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7746 {
7747 REAL_VALUE_TYPE x;
7748
7749 x = TREE_REAL_CST (arg);
7750 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7751 {
7752 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7753 REAL_VALUE_TYPE r;
7754
7755 real_round (&r, TYPE_MODE (type), &x);
7756 return build_real (type, r);
7757 }
7758 }
7759
7760 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7761 }
7762
7763 /* Fold function call to builtin lround, lroundf or lroundl (or the
7764 corresponding long long versions) and other rounding functions. ARG
7765 is the argument to the call. Return NULL_TREE if no simplification
7766 can be made. */
7767
7768 static tree
7769 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7770 {
7771 if (!validate_arg (arg, REAL_TYPE))
7772 return NULL_TREE;
7773
7774 /* Optimize lround of constant value. */
7775 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7776 {
7777 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7778
7779 if (real_isfinite (&x))
7780 {
7781 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7782 tree ftype = TREE_TYPE (arg);
7783 REAL_VALUE_TYPE r;
7784 bool fail = false;
7785
7786 switch (DECL_FUNCTION_CODE (fndecl))
7787 {
7788 CASE_FLT_FN (BUILT_IN_IFLOOR):
7789 CASE_FLT_FN (BUILT_IN_LFLOOR):
7790 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7791 real_floor (&r, TYPE_MODE (ftype), &x);
7792 break;
7793
7794 CASE_FLT_FN (BUILT_IN_ICEIL):
7795 CASE_FLT_FN (BUILT_IN_LCEIL):
7796 CASE_FLT_FN (BUILT_IN_LLCEIL):
7797 real_ceil (&r, TYPE_MODE (ftype), &x);
7798 break;
7799
7800 CASE_FLT_FN (BUILT_IN_IROUND):
7801 CASE_FLT_FN (BUILT_IN_LROUND):
7802 CASE_FLT_FN (BUILT_IN_LLROUND):
7803 real_round (&r, TYPE_MODE (ftype), &x);
7804 break;
7805
7806 default:
7807 gcc_unreachable ();
7808 }
7809
7810 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
7811 if (!fail)
7812 return wide_int_to_tree (itype, val);
7813 }
7814 }
7815
7816 switch (DECL_FUNCTION_CODE (fndecl))
7817 {
7818 CASE_FLT_FN (BUILT_IN_LFLOOR):
7819 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7820 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7821 if (tree_expr_nonnegative_p (arg))
7822 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7823 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7824 break;
7825 default:;
7826 }
7827
7828 return fold_fixed_mathfn (loc, fndecl, arg);
7829 }
7830
7831 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7832 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7833 the argument to the call. Return NULL_TREE if no simplification can
7834 be made. */
7835
7836 static tree
7837 fold_builtin_bitop (tree fndecl, tree arg)
7838 {
7839 if (!validate_arg (arg, INTEGER_TYPE))
7840 return NULL_TREE;
7841
7842 /* Optimize for constant argument. */
7843 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7844 {
7845 tree type = TREE_TYPE (arg);
7846 int result;
7847
7848 switch (DECL_FUNCTION_CODE (fndecl))
7849 {
7850 CASE_INT_FN (BUILT_IN_FFS):
7851 result = wi::ffs (arg);
7852 break;
7853
7854 CASE_INT_FN (BUILT_IN_CLZ):
7855 if (wi::ne_p (arg, 0))
7856 result = wi::clz (arg);
7857 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7858 result = TYPE_PRECISION (type);
7859 break;
7860
7861 CASE_INT_FN (BUILT_IN_CTZ):
7862 if (wi::ne_p (arg, 0))
7863 result = wi::ctz (arg);
7864 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7865 result = TYPE_PRECISION (type);
7866 break;
7867
7868 CASE_INT_FN (BUILT_IN_CLRSB):
7869 result = wi::clrsb (arg);
7870 break;
7871
7872 CASE_INT_FN (BUILT_IN_POPCOUNT):
7873 result = wi::popcount (arg);
7874 break;
7875
7876 CASE_INT_FN (BUILT_IN_PARITY):
7877 result = wi::parity (arg);
7878 break;
7879
7880 default:
7881 gcc_unreachable ();
7882 }
7883
7884 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7885 }
7886
7887 return NULL_TREE;
7888 }
7889
7890 /* Fold function call to builtin_bswap and the short, long and long long
7891 variants. Return NULL_TREE if no simplification can be made. */
7892 static tree
7893 fold_builtin_bswap (tree fndecl, tree arg)
7894 {
7895 if (! validate_arg (arg, INTEGER_TYPE))
7896 return NULL_TREE;
7897
7898 /* Optimize constant value. */
7899 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7900 {
7901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7902
7903 switch (DECL_FUNCTION_CODE (fndecl))
7904 {
7905 case BUILT_IN_BSWAP16:
7906 case BUILT_IN_BSWAP32:
7907 case BUILT_IN_BSWAP64:
7908 {
7909 signop sgn = TYPE_SIGN (type);
7910 tree result =
7911 wide_int_to_tree (type,
7912 wide_int::from (arg, TYPE_PRECISION (type),
7913 sgn).bswap ());
7914 return result;
7915 }
7916 default:
7917 gcc_unreachable ();
7918 }
7919 }
7920
7921 return NULL_TREE;
7922 }
7923
7924 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7925 NULL_TREE if no simplification can be made. */
7926
7927 static tree
7928 fold_builtin_hypot (location_t loc, tree arg0, tree arg1, tree type)
7929 {
7930 tree res;
7931
7932 if (!validate_arg (arg0, REAL_TYPE)
7933 || !validate_arg (arg1, REAL_TYPE))
7934 return NULL_TREE;
7935
7936 /* Calculate the result when the argument is a constant. */
7937 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7938 return res;
7939
7940 /* If either argument is zero, hypot is fabs of the other. */
7941 if (real_zerop (arg0))
7942 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7943 else if (real_zerop (arg1))
7944 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7945
7946 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7947 if (flag_unsafe_math_optimizations
7948 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7949 return fold_build2_loc (loc, MULT_EXPR, type,
7950 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7951 build_real_truncate (type, dconst_sqrt2 ()));
7952
7953 return NULL_TREE;
7954 }
7955
7956
7957 /* Fold a builtin function call to pow, powf, or powl. Return
7958 NULL_TREE if no simplification can be made. */
7959 static tree
7960 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7961 {
7962 tree res;
7963
7964 if (!validate_arg (arg0, REAL_TYPE)
7965 || !validate_arg (arg1, REAL_TYPE))
7966 return NULL_TREE;
7967
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7970 return res;
7971
7972 /* Optimize pow(1.0,y) = 1.0. */
7973 if (real_onep (arg0))
7974 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7975
7976 if (TREE_CODE (arg1) == REAL_CST
7977 && !TREE_OVERFLOW (arg1))
7978 {
7979 REAL_VALUE_TYPE cint;
7980 REAL_VALUE_TYPE c;
7981 HOST_WIDE_INT n;
7982
7983 c = TREE_REAL_CST (arg1);
7984
7985 /* Optimize pow(x,0.0) = 1.0. */
7986 if (real_equal (&c, &dconst0))
7987 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7988 arg0);
7989
7990 /* Optimize pow(x,1.0) = x. */
7991 if (real_equal (&c, &dconst1))
7992 return arg0;
7993
7994 /* Optimize pow(x,-1.0) = 1.0/x. */
7995 if (real_equal (&c, &dconstm1))
7996 return fold_build2_loc (loc, RDIV_EXPR, type,
7997 build_real (type, dconst1), arg0);
7998
7999 /* Optimize pow(x,0.5) = sqrt(x). */
8000 if (flag_unsafe_math_optimizations
8001 && real_equal (&c, &dconsthalf))
8002 {
8003 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8004
8005 if (sqrtfn != NULL_TREE)
8006 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8007 }
8008
8009 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8010 if (flag_unsafe_math_optimizations)
8011 {
8012 const REAL_VALUE_TYPE dconstroot
8013 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8014
8015 if (real_equal (&c, &dconstroot))
8016 {
8017 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8018 if (cbrtfn != NULL_TREE)
8019 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8020 }
8021 }
8022
8023 /* Check for an integer exponent. */
8024 n = real_to_integer (&c);
8025 real_from_integer (&cint, VOIDmode, n, SIGNED);
8026 if (real_identical (&c, &cint))
8027 {
8028 /* Attempt to evaluate pow at compile-time, unless this should
8029 raise an exception. */
8030 if (TREE_CODE (arg0) == REAL_CST
8031 && !TREE_OVERFLOW (arg0)
8032 && (n > 0
8033 || (!flag_trapping_math && !flag_errno_math)
8034 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8035 {
8036 REAL_VALUE_TYPE x;
8037 bool inexact;
8038
8039 x = TREE_REAL_CST (arg0);
8040 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8041 if (flag_unsafe_math_optimizations || !inexact)
8042 return build_real (type, x);
8043 }
8044 }
8045 }
8046
8047 if (flag_unsafe_math_optimizations)
8048 {
8049 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8050
8051 /* Optimize pow(expN(x),y) = expN(x*y). */
8052 if (BUILTIN_EXPONENT_P (fcode))
8053 {
8054 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8055 tree arg = CALL_EXPR_ARG (arg0, 0);
8056 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8057 return build_call_expr_loc (loc, expfn, 1, arg);
8058 }
8059
8060 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8061 if (BUILTIN_SQRT_P (fcode))
8062 {
8063 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8064 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8065 build_real (type, dconsthalf));
8066 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8067 }
8068
8069 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8070 if (BUILTIN_CBRT_P (fcode))
8071 {
8072 tree arg = CALL_EXPR_ARG (arg0, 0);
8073 if (tree_expr_nonnegative_p (arg))
8074 {
8075 tree c = build_real_truncate (type, dconst_third ());
8076 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
8077 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8078 }
8079 }
8080
8081 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8082 if (fcode == BUILT_IN_POW
8083 || fcode == BUILT_IN_POWF
8084 || fcode == BUILT_IN_POWL)
8085 {
8086 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8087 if (tree_expr_nonnegative_p (arg00))
8088 {
8089 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8090 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8091 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8092 }
8093 }
8094 }
8095
8096 return NULL_TREE;
8097 }
8098
8099 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8100 Return NULL_TREE if no simplification can be made. */
8101 static tree
8102 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8103 tree arg0, tree arg1, tree type)
8104 {
8105 if (!validate_arg (arg0, REAL_TYPE)
8106 || !validate_arg (arg1, INTEGER_TYPE))
8107 return NULL_TREE;
8108
8109 /* Optimize pow(1.0,y) = 1.0. */
8110 if (real_onep (arg0))
8111 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8112
8113 if (tree_fits_shwi_p (arg1))
8114 {
8115 HOST_WIDE_INT c = tree_to_shwi (arg1);
8116
8117 /* Evaluate powi at compile-time. */
8118 if (TREE_CODE (arg0) == REAL_CST
8119 && !TREE_OVERFLOW (arg0))
8120 {
8121 REAL_VALUE_TYPE x;
8122 x = TREE_REAL_CST (arg0);
8123 real_powi (&x, TYPE_MODE (type), &x, c);
8124 return build_real (type, x);
8125 }
8126
8127 /* Optimize pow(x,0) = 1.0. */
8128 if (c == 0)
8129 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8130 arg0);
8131
8132 /* Optimize pow(x,1) = x. */
8133 if (c == 1)
8134 return arg0;
8135
8136 /* Optimize pow(x,-1) = 1.0/x. */
8137 if (c == -1)
8138 return fold_build2_loc (loc, RDIV_EXPR, type,
8139 build_real (type, dconst1), arg0);
8140 }
8141
8142 return NULL_TREE;
8143 }
8144
8145 /* A subroutine of fold_builtin to fold the various exponent
8146 functions. Return NULL_TREE if no simplification can be made.
8147 FUNC is the corresponding MPFR exponent function. */
8148
8149 static tree
8150 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8151 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8152 {
8153 if (validate_arg (arg, REAL_TYPE))
8154 {
8155 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8156 tree res;
8157
8158 /* Calculate the result when the argument is a constant. */
8159 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8160 return res;
8161
8162 /* Optimize expN(logN(x)) = x. */
8163 if (flag_unsafe_math_optimizations)
8164 {
8165 const enum built_in_function fcode = builtin_mathfn_code (arg);
8166
8167 if ((func == mpfr_exp
8168 && (fcode == BUILT_IN_LOG
8169 || fcode == BUILT_IN_LOGF
8170 || fcode == BUILT_IN_LOGL))
8171 || (func == mpfr_exp2
8172 && (fcode == BUILT_IN_LOG2
8173 || fcode == BUILT_IN_LOG2F
8174 || fcode == BUILT_IN_LOG2L))
8175 || (func == mpfr_exp10
8176 && (fcode == BUILT_IN_LOG10
8177 || fcode == BUILT_IN_LOG10F
8178 || fcode == BUILT_IN_LOG10L)))
8179 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8180 }
8181 }
8182
8183 return NULL_TREE;
8184 }
8185
8186 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8187 arguments to the call, and TYPE is its return type.
8188 Return NULL_TREE if no simplification can be made. */
8189
8190 static tree
8191 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8192 {
8193 if (!validate_arg (arg1, POINTER_TYPE)
8194 || !validate_arg (arg2, INTEGER_TYPE)
8195 || !validate_arg (len, INTEGER_TYPE))
8196 return NULL_TREE;
8197 else
8198 {
8199 const char *p1;
8200
8201 if (TREE_CODE (arg2) != INTEGER_CST
8202 || !tree_fits_uhwi_p (len))
8203 return NULL_TREE;
8204
8205 p1 = c_getstr (arg1);
8206 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8207 {
8208 char c;
8209 const char *r;
8210 tree tem;
8211
8212 if (target_char_cast (arg2, &c))
8213 return NULL_TREE;
8214
8215 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8216
8217 if (r == NULL)
8218 return build_int_cst (TREE_TYPE (arg1), 0);
8219
8220 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8221 return fold_convert_loc (loc, type, tem);
8222 }
8223 return NULL_TREE;
8224 }
8225 }
8226
8227 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8228 Return NULL_TREE if no simplification can be made. */
8229
8230 static tree
8231 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8232 {
8233 const char *p1, *p2;
8234
8235 if (!validate_arg (arg1, POINTER_TYPE)
8236 || !validate_arg (arg2, POINTER_TYPE)
8237 || !validate_arg (len, INTEGER_TYPE))
8238 return NULL_TREE;
8239
8240 /* If the LEN parameter is zero, return zero. */
8241 if (integer_zerop (len))
8242 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8243 arg1, arg2);
8244
8245 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8246 if (operand_equal_p (arg1, arg2, 0))
8247 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8248
8249 p1 = c_getstr (arg1);
8250 p2 = c_getstr (arg2);
8251
8252 /* If all arguments are constant, and the value of len is not greater
8253 than the lengths of arg1 and arg2, evaluate at compile-time. */
8254 if (tree_fits_uhwi_p (len) && p1 && p2
8255 && compare_tree_int (len, strlen (p1) + 1) <= 0
8256 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8257 {
8258 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8259
8260 if (r > 0)
8261 return integer_one_node;
8262 else if (r < 0)
8263 return integer_minus_one_node;
8264 else
8265 return integer_zero_node;
8266 }
8267
8268 /* If len parameter is one, return an expression corresponding to
8269 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8270 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8271 {
8272 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8273 tree cst_uchar_ptr_node
8274 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8275
8276 tree ind1
8277 = fold_convert_loc (loc, integer_type_node,
8278 build1 (INDIRECT_REF, cst_uchar_node,
8279 fold_convert_loc (loc,
8280 cst_uchar_ptr_node,
8281 arg1)));
8282 tree ind2
8283 = fold_convert_loc (loc, integer_type_node,
8284 build1 (INDIRECT_REF, cst_uchar_node,
8285 fold_convert_loc (loc,
8286 cst_uchar_ptr_node,
8287 arg2)));
8288 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8289 }
8290
8291 return NULL_TREE;
8292 }
8293
8294 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8295 Return NULL_TREE if no simplification can be made. */
8296
8297 static tree
8298 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8299 {
8300 const char *p1, *p2;
8301
8302 if (!validate_arg (arg1, POINTER_TYPE)
8303 || !validate_arg (arg2, POINTER_TYPE))
8304 return NULL_TREE;
8305
8306 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8307 if (operand_equal_p (arg1, arg2, 0))
8308 return integer_zero_node;
8309
8310 p1 = c_getstr (arg1);
8311 p2 = c_getstr (arg2);
8312
8313 if (p1 && p2)
8314 {
8315 const int i = strcmp (p1, p2);
8316 if (i < 0)
8317 return integer_minus_one_node;
8318 else if (i > 0)
8319 return integer_one_node;
8320 else
8321 return integer_zero_node;
8322 }
8323
8324 /* If the second arg is "", return *(const unsigned char*)arg1. */
8325 if (p2 && *p2 == '\0')
8326 {
8327 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8328 tree cst_uchar_ptr_node
8329 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8330
8331 return fold_convert_loc (loc, integer_type_node,
8332 build1 (INDIRECT_REF, cst_uchar_node,
8333 fold_convert_loc (loc,
8334 cst_uchar_ptr_node,
8335 arg1)));
8336 }
8337
8338 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8339 if (p1 && *p1 == '\0')
8340 {
8341 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8342 tree cst_uchar_ptr_node
8343 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8344
8345 tree temp
8346 = fold_convert_loc (loc, integer_type_node,
8347 build1 (INDIRECT_REF, cst_uchar_node,
8348 fold_convert_loc (loc,
8349 cst_uchar_ptr_node,
8350 arg2)));
8351 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8352 }
8353
8354 return NULL_TREE;
8355 }
8356
8357 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8358 Return NULL_TREE if no simplification can be made. */
8359
8360 static tree
8361 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8362 {
8363 const char *p1, *p2;
8364
8365 if (!validate_arg (arg1, POINTER_TYPE)
8366 || !validate_arg (arg2, POINTER_TYPE)
8367 || !validate_arg (len, INTEGER_TYPE))
8368 return NULL_TREE;
8369
8370 /* If the LEN parameter is zero, return zero. */
8371 if (integer_zerop (len))
8372 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8373 arg1, arg2);
8374
8375 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8376 if (operand_equal_p (arg1, arg2, 0))
8377 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8378
8379 p1 = c_getstr (arg1);
8380 p2 = c_getstr (arg2);
8381
8382 if (tree_fits_uhwi_p (len) && p1 && p2)
8383 {
8384 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8385 if (i > 0)
8386 return integer_one_node;
8387 else if (i < 0)
8388 return integer_minus_one_node;
8389 else
8390 return integer_zero_node;
8391 }
8392
8393 /* If the second arg is "", and the length is greater than zero,
8394 return *(const unsigned char*)arg1. */
8395 if (p2 && *p2 == '\0'
8396 && TREE_CODE (len) == INTEGER_CST
8397 && tree_int_cst_sgn (len) == 1)
8398 {
8399 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8400 tree cst_uchar_ptr_node
8401 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8402
8403 return fold_convert_loc (loc, integer_type_node,
8404 build1 (INDIRECT_REF, cst_uchar_node,
8405 fold_convert_loc (loc,
8406 cst_uchar_ptr_node,
8407 arg1)));
8408 }
8409
8410 /* If the first arg is "", and the length is greater than zero,
8411 return -*(const unsigned char*)arg2. */
8412 if (p1 && *p1 == '\0'
8413 && TREE_CODE (len) == INTEGER_CST
8414 && tree_int_cst_sgn (len) == 1)
8415 {
8416 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8417 tree cst_uchar_ptr_node
8418 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8419
8420 tree temp = fold_convert_loc (loc, integer_type_node,
8421 build1 (INDIRECT_REF, cst_uchar_node,
8422 fold_convert_loc (loc,
8423 cst_uchar_ptr_node,
8424 arg2)));
8425 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8426 }
8427
8428 /* If len parameter is one, return an expression corresponding to
8429 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8430 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8431 {
8432 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8433 tree cst_uchar_ptr_node
8434 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8435
8436 tree ind1 = fold_convert_loc (loc, integer_type_node,
8437 build1 (INDIRECT_REF, cst_uchar_node,
8438 fold_convert_loc (loc,
8439 cst_uchar_ptr_node,
8440 arg1)));
8441 tree ind2 = fold_convert_loc (loc, integer_type_node,
8442 build1 (INDIRECT_REF, cst_uchar_node,
8443 fold_convert_loc (loc,
8444 cst_uchar_ptr_node,
8445 arg2)));
8446 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8447 }
8448
8449 return NULL_TREE;
8450 }
8451
8452 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8453 ARG. Return NULL_TREE if no simplification can be made. */
8454
8455 static tree
8456 fold_builtin_signbit (location_t loc, tree arg, tree type)
8457 {
8458 if (!validate_arg (arg, REAL_TYPE))
8459 return NULL_TREE;
8460
8461 /* If ARG is a compile-time constant, determine the result. */
8462 if (TREE_CODE (arg) == REAL_CST
8463 && !TREE_OVERFLOW (arg))
8464 {
8465 REAL_VALUE_TYPE c;
8466
8467 c = TREE_REAL_CST (arg);
8468 return (REAL_VALUE_NEGATIVE (c)
8469 ? build_one_cst (type)
8470 : build_zero_cst (type));
8471 }
8472
8473 /* If ARG is non-negative, the result is always zero. */
8474 if (tree_expr_nonnegative_p (arg))
8475 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8476
8477 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8478 if (!HONOR_SIGNED_ZEROS (arg))
8479 return fold_convert (type,
8480 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8481 build_real (TREE_TYPE (arg), dconst0)));
8482
8483 return NULL_TREE;
8484 }
8485
8486 /* Fold function call to builtin copysign, copysignf or copysignl with
8487 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8488 be made. */
8489
8490 static tree
8491 fold_builtin_copysign (location_t loc, tree arg1, tree arg2, tree type)
8492 {
8493 if (!validate_arg (arg1, REAL_TYPE)
8494 || !validate_arg (arg2, REAL_TYPE))
8495 return NULL_TREE;
8496
8497 /* copysign(X,X) is X. */
8498 if (operand_equal_p (arg1, arg2, 0))
8499 return fold_convert_loc (loc, type, arg1);
8500
8501 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8502 if (TREE_CODE (arg1) == REAL_CST
8503 && TREE_CODE (arg2) == REAL_CST
8504 && !TREE_OVERFLOW (arg1)
8505 && !TREE_OVERFLOW (arg2))
8506 {
8507 REAL_VALUE_TYPE c1, c2;
8508
8509 c1 = TREE_REAL_CST (arg1);
8510 c2 = TREE_REAL_CST (arg2);
8511 /* c1.sign := c2.sign. */
8512 real_copysign (&c1, &c2);
8513 return build_real (type, c1);
8514 }
8515
8516 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8517 Remember to evaluate Y for side-effects. */
8518 if (tree_expr_nonnegative_p (arg2))
8519 return omit_one_operand_loc (loc, type,
8520 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8521 arg2);
8522
8523 return NULL_TREE;
8524 }
8525
8526 /* Fold a call to builtin isascii with argument ARG. */
8527
8528 static tree
8529 fold_builtin_isascii (location_t loc, tree arg)
8530 {
8531 if (!validate_arg (arg, INTEGER_TYPE))
8532 return NULL_TREE;
8533 else
8534 {
8535 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8536 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8537 build_int_cst (integer_type_node,
8538 ~ (unsigned HOST_WIDE_INT) 0x7f));
8539 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8540 arg, integer_zero_node);
8541 }
8542 }
8543
8544 /* Fold a call to builtin toascii with argument ARG. */
8545
8546 static tree
8547 fold_builtin_toascii (location_t loc, tree arg)
8548 {
8549 if (!validate_arg (arg, INTEGER_TYPE))
8550 return NULL_TREE;
8551
8552 /* Transform toascii(c) -> (c & 0x7f). */
8553 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8554 build_int_cst (integer_type_node, 0x7f));
8555 }
8556
8557 /* Fold a call to builtin isdigit with argument ARG. */
8558
8559 static tree
8560 fold_builtin_isdigit (location_t loc, tree arg)
8561 {
8562 if (!validate_arg (arg, INTEGER_TYPE))
8563 return NULL_TREE;
8564 else
8565 {
8566 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8567 /* According to the C standard, isdigit is unaffected by locale.
8568 However, it definitely is affected by the target character set. */
8569 unsigned HOST_WIDE_INT target_digit0
8570 = lang_hooks.to_target_charset ('0');
8571
8572 if (target_digit0 == 0)
8573 return NULL_TREE;
8574
8575 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8576 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8577 build_int_cst (unsigned_type_node, target_digit0));
8578 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8579 build_int_cst (unsigned_type_node, 9));
8580 }
8581 }
8582
8583 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8584
8585 static tree
8586 fold_builtin_fabs (location_t loc, tree arg, tree type)
8587 {
8588 if (!validate_arg (arg, REAL_TYPE))
8589 return NULL_TREE;
8590
8591 arg = fold_convert_loc (loc, type, arg);
8592 if (TREE_CODE (arg) == REAL_CST)
8593 return fold_abs_const (arg, type);
8594 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8595 }
8596
8597 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8598
8599 static tree
8600 fold_builtin_abs (location_t loc, tree arg, tree type)
8601 {
8602 if (!validate_arg (arg, INTEGER_TYPE))
8603 return NULL_TREE;
8604
8605 arg = fold_convert_loc (loc, type, arg);
8606 if (TREE_CODE (arg) == INTEGER_CST)
8607 return fold_abs_const (arg, type);
8608 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8609 }
8610
8611 /* Fold a fma operation with arguments ARG[012]. */
8612
8613 tree
8614 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8615 tree type, tree arg0, tree arg1, tree arg2)
8616 {
8617 if (TREE_CODE (arg0) == REAL_CST
8618 && TREE_CODE (arg1) == REAL_CST
8619 && TREE_CODE (arg2) == REAL_CST)
8620 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8621
8622 return NULL_TREE;
8623 }
8624
8625 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8626
8627 static tree
8628 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8629 {
8630 if (validate_arg (arg0, REAL_TYPE)
8631 && validate_arg (arg1, REAL_TYPE)
8632 && validate_arg (arg2, REAL_TYPE))
8633 {
8634 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8635 if (tem)
8636 return tem;
8637
8638 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8639 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8640 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8641 }
8642 return NULL_TREE;
8643 }
8644
8645 /* Fold a call to builtin fmin or fmax. */
8646
8647 static tree
8648 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8649 tree type, bool max)
8650 {
8651 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8652 {
8653 /* Calculate the result when the argument is a constant. */
8654 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8655
8656 if (res)
8657 return res;
8658
8659 /* If either argument is NaN, return the other one. Avoid the
8660 transformation if we get (and honor) a signalling NaN. Using
8661 omit_one_operand() ensures we create a non-lvalue. */
8662 if (TREE_CODE (arg0) == REAL_CST
8663 && real_isnan (&TREE_REAL_CST (arg0))
8664 && (! HONOR_SNANS (arg0)
8665 || ! TREE_REAL_CST (arg0).signalling))
8666 return omit_one_operand_loc (loc, type, arg1, arg0);
8667 if (TREE_CODE (arg1) == REAL_CST
8668 && real_isnan (&TREE_REAL_CST (arg1))
8669 && (! HONOR_SNANS (arg1)
8670 || ! TREE_REAL_CST (arg1).signalling))
8671 return omit_one_operand_loc (loc, type, arg0, arg1);
8672
8673 /* Transform fmin/fmax(x,x) -> x. */
8674 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8675 return omit_one_operand_loc (loc, type, arg0, arg1);
8676
8677 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8678 functions to return the numeric arg if the other one is NaN.
8679 These tree codes don't honor that, so only transform if
8680 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8681 handled, so we don't have to worry about it either. */
8682 if (flag_finite_math_only)
8683 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8684 fold_convert_loc (loc, type, arg0),
8685 fold_convert_loc (loc, type, arg1));
8686 }
8687 return NULL_TREE;
8688 }
8689
8690 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8691
8692 static tree
8693 fold_builtin_carg (location_t loc, tree arg, tree type)
8694 {
8695 if (validate_arg (arg, COMPLEX_TYPE)
8696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8697 {
8698 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8699
8700 if (atan2_fn)
8701 {
8702 tree new_arg = builtin_save_expr (arg);
8703 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8704 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8705 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8706 }
8707 }
8708
8709 return NULL_TREE;
8710 }
8711
8712 /* Fold a call to builtin logb/ilogb. */
8713
8714 static tree
8715 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8716 {
8717 if (! validate_arg (arg, REAL_TYPE))
8718 return NULL_TREE;
8719
8720 STRIP_NOPS (arg);
8721
8722 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8723 {
8724 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8725
8726 switch (value->cl)
8727 {
8728 case rvc_nan:
8729 case rvc_inf:
8730 /* If arg is Inf or NaN and we're logb, return it. */
8731 if (TREE_CODE (rettype) == REAL_TYPE)
8732 {
8733 /* For logb(-Inf) we have to return +Inf. */
8734 if (real_isinf (value) && real_isneg (value))
8735 {
8736 REAL_VALUE_TYPE tem;
8737 real_inf (&tem);
8738 return build_real (rettype, tem);
8739 }
8740 return fold_convert_loc (loc, rettype, arg);
8741 }
8742 /* Fall through... */
8743 case rvc_zero:
8744 /* Zero may set errno and/or raise an exception for logb, also
8745 for ilogb we don't know FP_ILOGB0. */
8746 return NULL_TREE;
8747 case rvc_normal:
8748 /* For normal numbers, proceed iff radix == 2. In GCC,
8749 normalized significands are in the range [0.5, 1.0). We
8750 want the exponent as if they were [1.0, 2.0) so get the
8751 exponent and subtract 1. */
8752 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8753 return fold_convert_loc (loc, rettype,
8754 build_int_cst (integer_type_node,
8755 REAL_EXP (value)-1));
8756 break;
8757 }
8758 }
8759
8760 return NULL_TREE;
8761 }
8762
8763 /* Fold a call to builtin significand, if radix == 2. */
8764
8765 static tree
8766 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8767 {
8768 if (! validate_arg (arg, REAL_TYPE))
8769 return NULL_TREE;
8770
8771 STRIP_NOPS (arg);
8772
8773 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8774 {
8775 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8776
8777 switch (value->cl)
8778 {
8779 case rvc_zero:
8780 case rvc_nan:
8781 case rvc_inf:
8782 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8783 return fold_convert_loc (loc, rettype, arg);
8784 case rvc_normal:
8785 /* For normal numbers, proceed iff radix == 2. */
8786 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8787 {
8788 REAL_VALUE_TYPE result = *value;
8789 /* In GCC, normalized significands are in the range [0.5,
8790 1.0). We want them to be [1.0, 2.0) so set the
8791 exponent to 1. */
8792 SET_REAL_EXP (&result, 1);
8793 return build_real (rettype, result);
8794 }
8795 break;
8796 }
8797 }
8798
8799 return NULL_TREE;
8800 }
8801
8802 /* Fold a call to builtin frexp, we can assume the base is 2. */
8803
8804 static tree
8805 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8806 {
8807 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8808 return NULL_TREE;
8809
8810 STRIP_NOPS (arg0);
8811
8812 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8813 return NULL_TREE;
8814
8815 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8816
8817 /* Proceed if a valid pointer type was passed in. */
8818 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8819 {
8820 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8821 tree frac, exp;
8822
8823 switch (value->cl)
8824 {
8825 case rvc_zero:
8826 /* For +-0, return (*exp = 0, +-0). */
8827 exp = integer_zero_node;
8828 frac = arg0;
8829 break;
8830 case rvc_nan:
8831 case rvc_inf:
8832 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8833 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8834 case rvc_normal:
8835 {
8836 /* Since the frexp function always expects base 2, and in
8837 GCC normalized significands are already in the range
8838 [0.5, 1.0), we have exactly what frexp wants. */
8839 REAL_VALUE_TYPE frac_rvt = *value;
8840 SET_REAL_EXP (&frac_rvt, 0);
8841 frac = build_real (rettype, frac_rvt);
8842 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8843 }
8844 break;
8845 default:
8846 gcc_unreachable ();
8847 }
8848
8849 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8850 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8851 TREE_SIDE_EFFECTS (arg1) = 1;
8852 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8853 }
8854
8855 return NULL_TREE;
8856 }
8857
8858 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8859 then we can assume the base is two. If it's false, then we have to
8860 check the mode of the TYPE parameter in certain cases. */
8861
8862 static tree
8863 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8864 tree type, bool ldexp)
8865 {
8866 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8867 {
8868 STRIP_NOPS (arg0);
8869 STRIP_NOPS (arg1);
8870
8871 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8872 if (real_zerop (arg0) || integer_zerop (arg1)
8873 || (TREE_CODE (arg0) == REAL_CST
8874 && !real_isfinite (&TREE_REAL_CST (arg0))))
8875 return omit_one_operand_loc (loc, type, arg0, arg1);
8876
8877 /* If both arguments are constant, then try to evaluate it. */
8878 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8879 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8880 && tree_fits_shwi_p (arg1))
8881 {
8882 /* Bound the maximum adjustment to twice the range of the
8883 mode's valid exponents. Use abs to ensure the range is
8884 positive as a sanity check. */
8885 const long max_exp_adj = 2 *
8886 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8887 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8888
8889 /* Get the user-requested adjustment. */
8890 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8891
8892 /* The requested adjustment must be inside this range. This
8893 is a preliminary cap to avoid things like overflow, we
8894 may still fail to compute the result for other reasons. */
8895 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8896 {
8897 REAL_VALUE_TYPE initial_result;
8898
8899 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8900
8901 /* Ensure we didn't overflow. */
8902 if (! real_isinf (&initial_result))
8903 {
8904 const REAL_VALUE_TYPE trunc_result
8905 = real_value_truncate (TYPE_MODE (type), initial_result);
8906
8907 /* Only proceed if the target mode can hold the
8908 resulting value. */
8909 if (real_equal (&initial_result, &trunc_result))
8910 return build_real (type, trunc_result);
8911 }
8912 }
8913 }
8914 }
8915
8916 return NULL_TREE;
8917 }
8918
8919 /* Fold a call to builtin modf. */
8920
8921 static tree
8922 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8923 {
8924 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8925 return NULL_TREE;
8926
8927 STRIP_NOPS (arg0);
8928
8929 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8930 return NULL_TREE;
8931
8932 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8933
8934 /* Proceed if a valid pointer type was passed in. */
8935 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8936 {
8937 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8938 REAL_VALUE_TYPE trunc, frac;
8939
8940 switch (value->cl)
8941 {
8942 case rvc_nan:
8943 case rvc_zero:
8944 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8945 trunc = frac = *value;
8946 break;
8947 case rvc_inf:
8948 /* For +-Inf, return (*arg1 = arg0, +-0). */
8949 frac = dconst0;
8950 frac.sign = value->sign;
8951 trunc = *value;
8952 break;
8953 case rvc_normal:
8954 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8955 real_trunc (&trunc, VOIDmode, value);
8956 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8957 /* If the original number was negative and already
8958 integral, then the fractional part is -0.0. */
8959 if (value->sign && frac.cl == rvc_zero)
8960 frac.sign = value->sign;
8961 break;
8962 }
8963
8964 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8965 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8966 build_real (rettype, trunc));
8967 TREE_SIDE_EFFECTS (arg1) = 1;
8968 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8969 build_real (rettype, frac));
8970 }
8971
8972 return NULL_TREE;
8973 }
8974
8975 /* Given a location LOC, an interclass builtin function decl FNDECL
8976 and its single argument ARG, return an folded expression computing
8977 the same, or NULL_TREE if we either couldn't or didn't want to fold
8978 (the latter happen if there's an RTL instruction available). */
8979
8980 static tree
8981 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8982 {
8983 machine_mode mode;
8984
8985 if (!validate_arg (arg, REAL_TYPE))
8986 return NULL_TREE;
8987
8988 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8989 return NULL_TREE;
8990
8991 mode = TYPE_MODE (TREE_TYPE (arg));
8992
8993 /* If there is no optab, try generic code. */
8994 switch (DECL_FUNCTION_CODE (fndecl))
8995 {
8996 tree result;
8997
8998 CASE_FLT_FN (BUILT_IN_ISINF):
8999 {
9000 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9001 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9002 tree const type = TREE_TYPE (arg);
9003 REAL_VALUE_TYPE r;
9004 char buf[128];
9005
9006 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9007 real_from_string (&r, buf);
9008 result = build_call_expr (isgr_fn, 2,
9009 fold_build1_loc (loc, ABS_EXPR, type, arg),
9010 build_real (type, r));
9011 return result;
9012 }
9013 CASE_FLT_FN (BUILT_IN_FINITE):
9014 case BUILT_IN_ISFINITE:
9015 {
9016 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9017 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9018 tree const type = TREE_TYPE (arg);
9019 REAL_VALUE_TYPE r;
9020 char buf[128];
9021
9022 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9023 real_from_string (&r, buf);
9024 result = build_call_expr (isle_fn, 2,
9025 fold_build1_loc (loc, ABS_EXPR, type, arg),
9026 build_real (type, r));
9027 /*result = fold_build2_loc (loc, UNGT_EXPR,
9028 TREE_TYPE (TREE_TYPE (fndecl)),
9029 fold_build1_loc (loc, ABS_EXPR, type, arg),
9030 build_real (type, r));
9031 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9032 TREE_TYPE (TREE_TYPE (fndecl)),
9033 result);*/
9034 return result;
9035 }
9036 case BUILT_IN_ISNORMAL:
9037 {
9038 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9039 islessequal(fabs(x),DBL_MAX). */
9040 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9041 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9042 tree const type = TREE_TYPE (arg);
9043 REAL_VALUE_TYPE rmax, rmin;
9044 char buf[128];
9045
9046 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9047 real_from_string (&rmax, buf);
9048 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9049 real_from_string (&rmin, buf);
9050 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9051 result = build_call_expr (isle_fn, 2, arg,
9052 build_real (type, rmax));
9053 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9054 build_call_expr (isge_fn, 2, arg,
9055 build_real (type, rmin)));
9056 return result;
9057 }
9058 default:
9059 break;
9060 }
9061
9062 return NULL_TREE;
9063 }
9064
9065 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9066 ARG is the argument for the call. */
9067
9068 static tree
9069 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9070 {
9071 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9072 REAL_VALUE_TYPE r;
9073
9074 if (!validate_arg (arg, REAL_TYPE))
9075 return NULL_TREE;
9076
9077 switch (builtin_index)
9078 {
9079 case BUILT_IN_ISINF:
9080 if (!HONOR_INFINITIES (arg))
9081 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9082
9083 if (TREE_CODE (arg) == REAL_CST)
9084 {
9085 r = TREE_REAL_CST (arg);
9086 if (real_isinf (&r))
9087 return real_compare (GT_EXPR, &r, &dconst0)
9088 ? integer_one_node : integer_minus_one_node;
9089 else
9090 return integer_zero_node;
9091 }
9092
9093 return NULL_TREE;
9094
9095 case BUILT_IN_ISINF_SIGN:
9096 {
9097 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9098 /* In a boolean context, GCC will fold the inner COND_EXPR to
9099 1. So e.g. "if (isinf_sign(x))" would be folded to just
9100 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9101 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9102 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9103 tree tmp = NULL_TREE;
9104
9105 arg = builtin_save_expr (arg);
9106
9107 if (signbit_fn && isinf_fn)
9108 {
9109 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9110 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9111
9112 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9113 signbit_call, integer_zero_node);
9114 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9115 isinf_call, integer_zero_node);
9116
9117 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9118 integer_minus_one_node, integer_one_node);
9119 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9120 isinf_call, tmp,
9121 integer_zero_node);
9122 }
9123
9124 return tmp;
9125 }
9126
9127 case BUILT_IN_ISFINITE:
9128 if (!HONOR_NANS (arg)
9129 && !HONOR_INFINITIES (arg))
9130 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9131
9132 if (TREE_CODE (arg) == REAL_CST)
9133 {
9134 r = TREE_REAL_CST (arg);
9135 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9136 }
9137
9138 return NULL_TREE;
9139
9140 case BUILT_IN_ISNAN:
9141 if (!HONOR_NANS (arg))
9142 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9143
9144 if (TREE_CODE (arg) == REAL_CST)
9145 {
9146 r = TREE_REAL_CST (arg);
9147 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9148 }
9149
9150 arg = builtin_save_expr (arg);
9151 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9152
9153 default:
9154 gcc_unreachable ();
9155 }
9156 }
9157
9158 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9159 This builtin will generate code to return the appropriate floating
9160 point classification depending on the value of the floating point
9161 number passed in. The possible return values must be supplied as
9162 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9163 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9164 one floating point argument which is "type generic". */
9165
9166 static tree
9167 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9168 {
9169 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9170 arg, type, res, tmp;
9171 machine_mode mode;
9172 REAL_VALUE_TYPE r;
9173 char buf[128];
9174
9175 /* Verify the required arguments in the original call. */
9176 if (nargs != 6
9177 || !validate_arg (args[0], INTEGER_TYPE)
9178 || !validate_arg (args[1], INTEGER_TYPE)
9179 || !validate_arg (args[2], INTEGER_TYPE)
9180 || !validate_arg (args[3], INTEGER_TYPE)
9181 || !validate_arg (args[4], INTEGER_TYPE)
9182 || !validate_arg (args[5], REAL_TYPE))
9183 return NULL_TREE;
9184
9185 fp_nan = args[0];
9186 fp_infinite = args[1];
9187 fp_normal = args[2];
9188 fp_subnormal = args[3];
9189 fp_zero = args[4];
9190 arg = args[5];
9191 type = TREE_TYPE (arg);
9192 mode = TYPE_MODE (type);
9193 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9194
9195 /* fpclassify(x) ->
9196 isnan(x) ? FP_NAN :
9197 (fabs(x) == Inf ? FP_INFINITE :
9198 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9199 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9200
9201 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9202 build_real (type, dconst0));
9203 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9204 tmp, fp_zero, fp_subnormal);
9205
9206 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9207 real_from_string (&r, buf);
9208 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9209 arg, build_real (type, r));
9210 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9211
9212 if (HONOR_INFINITIES (mode))
9213 {
9214 real_inf (&r);
9215 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9216 build_real (type, r));
9217 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9218 fp_infinite, res);
9219 }
9220
9221 if (HONOR_NANS (mode))
9222 {
9223 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9224 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9225 }
9226
9227 return res;
9228 }
9229
9230 /* Fold a call to an unordered comparison function such as
9231 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9232 being called and ARG0 and ARG1 are the arguments for the call.
9233 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9234 the opposite of the desired result. UNORDERED_CODE is used
9235 for modes that can hold NaNs and ORDERED_CODE is used for
9236 the rest. */
9237
9238 static tree
9239 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9240 enum tree_code unordered_code,
9241 enum tree_code ordered_code)
9242 {
9243 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9244 enum tree_code code;
9245 tree type0, type1;
9246 enum tree_code code0, code1;
9247 tree cmp_type = NULL_TREE;
9248
9249 type0 = TREE_TYPE (arg0);
9250 type1 = TREE_TYPE (arg1);
9251
9252 code0 = TREE_CODE (type0);
9253 code1 = TREE_CODE (type1);
9254
9255 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9256 /* Choose the wider of two real types. */
9257 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9258 ? type0 : type1;
9259 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9260 cmp_type = type0;
9261 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9262 cmp_type = type1;
9263
9264 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9265 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9266
9267 if (unordered_code == UNORDERED_EXPR)
9268 {
9269 if (!HONOR_NANS (arg0))
9270 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9271 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9272 }
9273
9274 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9275 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9276 fold_build2_loc (loc, code, type, arg0, arg1));
9277 }
9278
9279 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9280 arithmetics if it can never overflow, or into internal functions that
9281 return both result of arithmetics and overflowed boolean flag in
9282 a complex integer result, or some other check for overflow. */
9283
9284 static tree
9285 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9286 tree arg0, tree arg1, tree arg2)
9287 {
9288 enum internal_fn ifn = IFN_LAST;
9289 tree type = TREE_TYPE (TREE_TYPE (arg2));
9290 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9291 switch (fcode)
9292 {
9293 case BUILT_IN_ADD_OVERFLOW:
9294 case BUILT_IN_SADD_OVERFLOW:
9295 case BUILT_IN_SADDL_OVERFLOW:
9296 case BUILT_IN_SADDLL_OVERFLOW:
9297 case BUILT_IN_UADD_OVERFLOW:
9298 case BUILT_IN_UADDL_OVERFLOW:
9299 case BUILT_IN_UADDLL_OVERFLOW:
9300 ifn = IFN_ADD_OVERFLOW;
9301 break;
9302 case BUILT_IN_SUB_OVERFLOW:
9303 case BUILT_IN_SSUB_OVERFLOW:
9304 case BUILT_IN_SSUBL_OVERFLOW:
9305 case BUILT_IN_SSUBLL_OVERFLOW:
9306 case BUILT_IN_USUB_OVERFLOW:
9307 case BUILT_IN_USUBL_OVERFLOW:
9308 case BUILT_IN_USUBLL_OVERFLOW:
9309 ifn = IFN_SUB_OVERFLOW;
9310 break;
9311 case BUILT_IN_MUL_OVERFLOW:
9312 case BUILT_IN_SMUL_OVERFLOW:
9313 case BUILT_IN_SMULL_OVERFLOW:
9314 case BUILT_IN_SMULLL_OVERFLOW:
9315 case BUILT_IN_UMUL_OVERFLOW:
9316 case BUILT_IN_UMULL_OVERFLOW:
9317 case BUILT_IN_UMULLL_OVERFLOW:
9318 ifn = IFN_MUL_OVERFLOW;
9319 break;
9320 default:
9321 gcc_unreachable ();
9322 }
9323 tree ctype = build_complex_type (type);
9324 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9325 2, arg0, arg1);
9326 tree tgt = save_expr (call);
9327 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9328 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9329 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9330 tree store
9331 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9332 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9333 }
9334
9335 /* Fold a call to built-in function FNDECL with 0 arguments.
9336 This function returns NULL_TREE if no simplification was possible. */
9337
9338 static tree
9339 fold_builtin_0 (location_t loc, tree fndecl)
9340 {
9341 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9342 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9343 switch (fcode)
9344 {
9345 CASE_FLT_FN (BUILT_IN_INF):
9346 case BUILT_IN_INFD32:
9347 case BUILT_IN_INFD64:
9348 case BUILT_IN_INFD128:
9349 return fold_builtin_inf (loc, type, true);
9350
9351 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9352 return fold_builtin_inf (loc, type, false);
9353
9354 case BUILT_IN_CLASSIFY_TYPE:
9355 return fold_builtin_classify_type (NULL_TREE);
9356
9357 default:
9358 break;
9359 }
9360 return NULL_TREE;
9361 }
9362
9363 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9364 This function returns NULL_TREE if no simplification was possible. */
9365
9366 static tree
9367 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9368 {
9369 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9370 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9371 switch (fcode)
9372 {
9373 case BUILT_IN_CONSTANT_P:
9374 {
9375 tree val = fold_builtin_constant_p (arg0);
9376
9377 /* Gimplification will pull the CALL_EXPR for the builtin out of
9378 an if condition. When not optimizing, we'll not CSE it back.
9379 To avoid link error types of regressions, return false now. */
9380 if (!val && !optimize)
9381 val = integer_zero_node;
9382
9383 return val;
9384 }
9385
9386 case BUILT_IN_CLASSIFY_TYPE:
9387 return fold_builtin_classify_type (arg0);
9388
9389 case BUILT_IN_STRLEN:
9390 return fold_builtin_strlen (loc, type, arg0);
9391
9392 CASE_FLT_FN (BUILT_IN_FABS):
9393 case BUILT_IN_FABSD32:
9394 case BUILT_IN_FABSD64:
9395 case BUILT_IN_FABSD128:
9396 return fold_builtin_fabs (loc, arg0, type);
9397
9398 case BUILT_IN_ABS:
9399 case BUILT_IN_LABS:
9400 case BUILT_IN_LLABS:
9401 case BUILT_IN_IMAXABS:
9402 return fold_builtin_abs (loc, arg0, type);
9403
9404 CASE_FLT_FN (BUILT_IN_CONJ):
9405 if (validate_arg (arg0, COMPLEX_TYPE)
9406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9407 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9408 break;
9409
9410 CASE_FLT_FN (BUILT_IN_CREAL):
9411 if (validate_arg (arg0, COMPLEX_TYPE)
9412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9413 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9414 break;
9415
9416 CASE_FLT_FN (BUILT_IN_CIMAG):
9417 if (validate_arg (arg0, COMPLEX_TYPE)
9418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9419 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9420 break;
9421
9422 CASE_FLT_FN (BUILT_IN_CCOS):
9423 if (validate_arg (arg0, COMPLEX_TYPE)
9424 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9425 return do_mpc_arg1 (arg0, type, mpc_cos);
9426 break;
9427
9428 CASE_FLT_FN (BUILT_IN_CCOSH):
9429 if (validate_arg (arg0, COMPLEX_TYPE)
9430 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9431 return do_mpc_arg1 (arg0, type, mpc_cosh);
9432 break;
9433
9434 CASE_FLT_FN (BUILT_IN_CPROJ):
9435 if (TREE_CODE (arg0) == COMPLEX_CST
9436 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9437 {
9438 const REAL_VALUE_TYPE *real
9439 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9440 const REAL_VALUE_TYPE *imag
9441 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9442
9443 if (real_isinf (real) || real_isinf (imag))
9444 return build_complex_inf (type, imag->sign);
9445 else
9446 return arg0;
9447 }
9448 break;
9449
9450 CASE_FLT_FN (BUILT_IN_CSIN):
9451 if (validate_arg (arg0, COMPLEX_TYPE)
9452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9453 return do_mpc_arg1 (arg0, type, mpc_sin);
9454 break;
9455
9456 CASE_FLT_FN (BUILT_IN_CSINH):
9457 if (validate_arg (arg0, COMPLEX_TYPE)
9458 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9459 return do_mpc_arg1 (arg0, type, mpc_sinh);
9460 break;
9461
9462 CASE_FLT_FN (BUILT_IN_CTAN):
9463 if (validate_arg (arg0, COMPLEX_TYPE)
9464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9465 return do_mpc_arg1 (arg0, type, mpc_tan);
9466 break;
9467
9468 CASE_FLT_FN (BUILT_IN_CTANH):
9469 if (validate_arg (arg0, COMPLEX_TYPE)
9470 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9471 return do_mpc_arg1 (arg0, type, mpc_tanh);
9472 break;
9473
9474 CASE_FLT_FN (BUILT_IN_CLOG):
9475 if (validate_arg (arg0, COMPLEX_TYPE)
9476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9477 return do_mpc_arg1 (arg0, type, mpc_log);
9478 break;
9479
9480 CASE_FLT_FN (BUILT_IN_CSQRT):
9481 if (validate_arg (arg0, COMPLEX_TYPE)
9482 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9483 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9484 break;
9485
9486 CASE_FLT_FN (BUILT_IN_CASIN):
9487 if (validate_arg (arg0, COMPLEX_TYPE)
9488 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9489 return do_mpc_arg1 (arg0, type, mpc_asin);
9490 break;
9491
9492 CASE_FLT_FN (BUILT_IN_CACOS):
9493 if (validate_arg (arg0, COMPLEX_TYPE)
9494 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9495 return do_mpc_arg1 (arg0, type, mpc_acos);
9496 break;
9497
9498 CASE_FLT_FN (BUILT_IN_CATAN):
9499 if (validate_arg (arg0, COMPLEX_TYPE)
9500 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9501 return do_mpc_arg1 (arg0, type, mpc_atan);
9502 break;
9503
9504 CASE_FLT_FN (BUILT_IN_CASINH):
9505 if (validate_arg (arg0, COMPLEX_TYPE)
9506 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9507 return do_mpc_arg1 (arg0, type, mpc_asinh);
9508 break;
9509
9510 CASE_FLT_FN (BUILT_IN_CACOSH):
9511 if (validate_arg (arg0, COMPLEX_TYPE)
9512 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9513 return do_mpc_arg1 (arg0, type, mpc_acosh);
9514 break;
9515
9516 CASE_FLT_FN (BUILT_IN_CATANH):
9517 if (validate_arg (arg0, COMPLEX_TYPE)
9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9519 return do_mpc_arg1 (arg0, type, mpc_atanh);
9520 break;
9521
9522 CASE_FLT_FN (BUILT_IN_CABS):
9523 if (TREE_CODE (arg0) == COMPLEX_CST
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9525 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
9526 type, mpfr_hypot);
9527 break;
9528
9529 CASE_FLT_FN (BUILT_IN_CARG):
9530 return fold_builtin_carg (loc, arg0, type);
9531
9532 CASE_FLT_FN (BUILT_IN_SQRT):
9533 if (validate_arg (arg0, REAL_TYPE))
9534 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9535 break;
9536
9537 CASE_FLT_FN (BUILT_IN_CBRT):
9538 if (validate_arg (arg0, REAL_TYPE))
9539 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9540 break;
9541
9542 CASE_FLT_FN (BUILT_IN_ASIN):
9543 if (validate_arg (arg0, REAL_TYPE))
9544 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9545 &dconstm1, &dconst1, true);
9546 break;
9547
9548 CASE_FLT_FN (BUILT_IN_ACOS):
9549 if (validate_arg (arg0, REAL_TYPE))
9550 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9551 &dconstm1, &dconst1, true);
9552 break;
9553
9554 CASE_FLT_FN (BUILT_IN_ATAN):
9555 if (validate_arg (arg0, REAL_TYPE))
9556 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9557 break;
9558
9559 CASE_FLT_FN (BUILT_IN_ASINH):
9560 if (validate_arg (arg0, REAL_TYPE))
9561 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9562 break;
9563
9564 CASE_FLT_FN (BUILT_IN_ACOSH):
9565 if (validate_arg (arg0, REAL_TYPE))
9566 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9567 &dconst1, NULL, true);
9568 break;
9569
9570 CASE_FLT_FN (BUILT_IN_ATANH):
9571 if (validate_arg (arg0, REAL_TYPE))
9572 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9573 &dconstm1, &dconst1, false);
9574 break;
9575
9576 CASE_FLT_FN (BUILT_IN_SIN):
9577 if (validate_arg (arg0, REAL_TYPE))
9578 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9579 break;
9580
9581 CASE_FLT_FN (BUILT_IN_COS):
9582 if (validate_arg (arg0, REAL_TYPE))
9583 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
9584 break;
9585
9586 CASE_FLT_FN (BUILT_IN_TAN):
9587 if (validate_arg (arg0, REAL_TYPE))
9588 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
9589 break;
9590
9591 CASE_FLT_FN (BUILT_IN_CEXP):
9592 return fold_builtin_cexp (loc, arg0, type);
9593
9594 CASE_FLT_FN (BUILT_IN_CEXPI):
9595 if (validate_arg (arg0, REAL_TYPE))
9596 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9597 break;
9598
9599 CASE_FLT_FN (BUILT_IN_SINH):
9600 if (validate_arg (arg0, REAL_TYPE))
9601 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9602 break;
9603
9604 CASE_FLT_FN (BUILT_IN_COSH):
9605 if (validate_arg (arg0, REAL_TYPE))
9606 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
9607 break;
9608
9609 CASE_FLT_FN (BUILT_IN_TANH):
9610 if (validate_arg (arg0, REAL_TYPE))
9611 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9612 break;
9613
9614 CASE_FLT_FN (BUILT_IN_ERF):
9615 if (validate_arg (arg0, REAL_TYPE))
9616 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9617 break;
9618
9619 CASE_FLT_FN (BUILT_IN_ERFC):
9620 if (validate_arg (arg0, REAL_TYPE))
9621 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9622 break;
9623
9624 CASE_FLT_FN (BUILT_IN_TGAMMA):
9625 if (validate_arg (arg0, REAL_TYPE))
9626 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9627 break;
9628
9629 CASE_FLT_FN (BUILT_IN_EXP):
9630 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9631
9632 CASE_FLT_FN (BUILT_IN_EXP2):
9633 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9634
9635 CASE_FLT_FN (BUILT_IN_EXP10):
9636 CASE_FLT_FN (BUILT_IN_POW10):
9637 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9638
9639 CASE_FLT_FN (BUILT_IN_EXPM1):
9640 if (validate_arg (arg0, REAL_TYPE))
9641 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9642 break;
9643
9644 CASE_FLT_FN (BUILT_IN_LOG):
9645 if (validate_arg (arg0, REAL_TYPE))
9646 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9647 break;
9648
9649 CASE_FLT_FN (BUILT_IN_LOG2):
9650 if (validate_arg (arg0, REAL_TYPE))
9651 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9652 break;
9653
9654 CASE_FLT_FN (BUILT_IN_LOG10):
9655 if (validate_arg (arg0, REAL_TYPE))
9656 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9657 break;
9658
9659 CASE_FLT_FN (BUILT_IN_LOG1P):
9660 if (validate_arg (arg0, REAL_TYPE))
9661 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9662 &dconstm1, NULL, false);
9663 break;
9664
9665 CASE_FLT_FN (BUILT_IN_J0):
9666 if (validate_arg (arg0, REAL_TYPE))
9667 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9668 NULL, NULL, 0);
9669 break;
9670
9671 CASE_FLT_FN (BUILT_IN_J1):
9672 if (validate_arg (arg0, REAL_TYPE))
9673 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9674 NULL, NULL, 0);
9675 break;
9676
9677 CASE_FLT_FN (BUILT_IN_Y0):
9678 if (validate_arg (arg0, REAL_TYPE))
9679 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9680 &dconst0, NULL, false);
9681 break;
9682
9683 CASE_FLT_FN (BUILT_IN_Y1):
9684 if (validate_arg (arg0, REAL_TYPE))
9685 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9686 &dconst0, NULL, false);
9687 break;
9688
9689 CASE_FLT_FN (BUILT_IN_NAN):
9690 case BUILT_IN_NAND32:
9691 case BUILT_IN_NAND64:
9692 case BUILT_IN_NAND128:
9693 return fold_builtin_nan (arg0, type, true);
9694
9695 CASE_FLT_FN (BUILT_IN_NANS):
9696 return fold_builtin_nan (arg0, type, false);
9697
9698 CASE_FLT_FN (BUILT_IN_FLOOR):
9699 return fold_builtin_floor (loc, fndecl, arg0);
9700
9701 CASE_FLT_FN (BUILT_IN_CEIL):
9702 return fold_builtin_ceil (loc, fndecl, arg0);
9703
9704 CASE_FLT_FN (BUILT_IN_TRUNC):
9705 return fold_builtin_trunc (loc, fndecl, arg0);
9706
9707 CASE_FLT_FN (BUILT_IN_ROUND):
9708 return fold_builtin_round (loc, fndecl, arg0);
9709
9710 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9711 CASE_FLT_FN (BUILT_IN_RINT):
9712 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9713
9714 CASE_FLT_FN (BUILT_IN_ICEIL):
9715 CASE_FLT_FN (BUILT_IN_LCEIL):
9716 CASE_FLT_FN (BUILT_IN_LLCEIL):
9717 CASE_FLT_FN (BUILT_IN_LFLOOR):
9718 CASE_FLT_FN (BUILT_IN_IFLOOR):
9719 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9720 CASE_FLT_FN (BUILT_IN_IROUND):
9721 CASE_FLT_FN (BUILT_IN_LROUND):
9722 CASE_FLT_FN (BUILT_IN_LLROUND):
9723 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9724
9725 CASE_FLT_FN (BUILT_IN_IRINT):
9726 CASE_FLT_FN (BUILT_IN_LRINT):
9727 CASE_FLT_FN (BUILT_IN_LLRINT):
9728 return fold_fixed_mathfn (loc, fndecl, arg0);
9729
9730 case BUILT_IN_BSWAP16:
9731 case BUILT_IN_BSWAP32:
9732 case BUILT_IN_BSWAP64:
9733 return fold_builtin_bswap (fndecl, arg0);
9734
9735 CASE_INT_FN (BUILT_IN_FFS):
9736 CASE_INT_FN (BUILT_IN_CLZ):
9737 CASE_INT_FN (BUILT_IN_CTZ):
9738 CASE_INT_FN (BUILT_IN_CLRSB):
9739 CASE_INT_FN (BUILT_IN_POPCOUNT):
9740 CASE_INT_FN (BUILT_IN_PARITY):
9741 return fold_builtin_bitop (fndecl, arg0);
9742
9743 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9744 return fold_builtin_signbit (loc, arg0, type);
9745
9746 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9747 return fold_builtin_significand (loc, arg0, type);
9748
9749 CASE_FLT_FN (BUILT_IN_ILOGB):
9750 CASE_FLT_FN (BUILT_IN_LOGB):
9751 return fold_builtin_logb (loc, arg0, type);
9752
9753 case BUILT_IN_ISASCII:
9754 return fold_builtin_isascii (loc, arg0);
9755
9756 case BUILT_IN_TOASCII:
9757 return fold_builtin_toascii (loc, arg0);
9758
9759 case BUILT_IN_ISDIGIT:
9760 return fold_builtin_isdigit (loc, arg0);
9761
9762 CASE_FLT_FN (BUILT_IN_FINITE):
9763 case BUILT_IN_FINITED32:
9764 case BUILT_IN_FINITED64:
9765 case BUILT_IN_FINITED128:
9766 case BUILT_IN_ISFINITE:
9767 {
9768 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9769 if (ret)
9770 return ret;
9771 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9772 }
9773
9774 CASE_FLT_FN (BUILT_IN_ISINF):
9775 case BUILT_IN_ISINFD32:
9776 case BUILT_IN_ISINFD64:
9777 case BUILT_IN_ISINFD128:
9778 {
9779 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9780 if (ret)
9781 return ret;
9782 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9783 }
9784
9785 case BUILT_IN_ISNORMAL:
9786 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9787
9788 case BUILT_IN_ISINF_SIGN:
9789 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9790
9791 CASE_FLT_FN (BUILT_IN_ISNAN):
9792 case BUILT_IN_ISNAND32:
9793 case BUILT_IN_ISNAND64:
9794 case BUILT_IN_ISNAND128:
9795 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9796
9797 case BUILT_IN_FREE:
9798 if (integer_zerop (arg0))
9799 return build_empty_stmt (loc);
9800 break;
9801
9802 default:
9803 break;
9804 }
9805
9806 return NULL_TREE;
9807
9808 }
9809
9810 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9811 This function returns NULL_TREE if no simplification was possible. */
9812
9813 static tree
9814 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9815 {
9816 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9817 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9818
9819 switch (fcode)
9820 {
9821 CASE_FLT_FN (BUILT_IN_JN):
9822 if (validate_arg (arg0, INTEGER_TYPE)
9823 && validate_arg (arg1, REAL_TYPE))
9824 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9825 break;
9826
9827 CASE_FLT_FN (BUILT_IN_YN):
9828 if (validate_arg (arg0, INTEGER_TYPE)
9829 && validate_arg (arg1, REAL_TYPE))
9830 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9831 &dconst0, false);
9832 break;
9833
9834 CASE_FLT_FN (BUILT_IN_DREM):
9835 CASE_FLT_FN (BUILT_IN_REMAINDER):
9836 if (validate_arg (arg0, REAL_TYPE)
9837 && validate_arg (arg1, REAL_TYPE))
9838 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9839 break;
9840
9841 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9842 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9843 if (validate_arg (arg0, REAL_TYPE)
9844 && validate_arg (arg1, POINTER_TYPE))
9845 return do_mpfr_lgamma_r (arg0, arg1, type);
9846 break;
9847
9848 CASE_FLT_FN (BUILT_IN_ATAN2):
9849 if (validate_arg (arg0, REAL_TYPE)
9850 && validate_arg (arg1, REAL_TYPE))
9851 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9852 break;
9853
9854 CASE_FLT_FN (BUILT_IN_FDIM):
9855 if (validate_arg (arg0, REAL_TYPE)
9856 && validate_arg (arg1, REAL_TYPE))
9857 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9858 break;
9859
9860 CASE_FLT_FN (BUILT_IN_HYPOT):
9861 return fold_builtin_hypot (loc, arg0, arg1, type);
9862
9863 CASE_FLT_FN (BUILT_IN_CPOW):
9864 if (validate_arg (arg0, COMPLEX_TYPE)
9865 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9866 && validate_arg (arg1, COMPLEX_TYPE)
9867 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9868 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9869 break;
9870
9871 CASE_FLT_FN (BUILT_IN_LDEXP):
9872 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9873 CASE_FLT_FN (BUILT_IN_SCALBN):
9874 CASE_FLT_FN (BUILT_IN_SCALBLN):
9875 return fold_builtin_load_exponent (loc, arg0, arg1,
9876 type, /*ldexp=*/false);
9877
9878 CASE_FLT_FN (BUILT_IN_FREXP):
9879 return fold_builtin_frexp (loc, arg0, arg1, type);
9880
9881 CASE_FLT_FN (BUILT_IN_MODF):
9882 return fold_builtin_modf (loc, arg0, arg1, type);
9883
9884 case BUILT_IN_STRSTR:
9885 return fold_builtin_strstr (loc, arg0, arg1, type);
9886
9887 case BUILT_IN_STRSPN:
9888 return fold_builtin_strspn (loc, arg0, arg1);
9889
9890 case BUILT_IN_STRCSPN:
9891 return fold_builtin_strcspn (loc, arg0, arg1);
9892
9893 case BUILT_IN_STRCHR:
9894 case BUILT_IN_INDEX:
9895 return fold_builtin_strchr (loc, arg0, arg1, type);
9896
9897 case BUILT_IN_STRRCHR:
9898 case BUILT_IN_RINDEX:
9899 return fold_builtin_strrchr (loc, arg0, arg1, type);
9900
9901 case BUILT_IN_STRCMP:
9902 return fold_builtin_strcmp (loc, arg0, arg1);
9903
9904 case BUILT_IN_STRPBRK:
9905 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9906
9907 case BUILT_IN_EXPECT:
9908 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9909
9910 CASE_FLT_FN (BUILT_IN_POW):
9911 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
9912
9913 CASE_FLT_FN (BUILT_IN_POWI):
9914 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
9915
9916 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9917 return fold_builtin_copysign (loc, arg0, arg1, type);
9918
9919 CASE_FLT_FN (BUILT_IN_FMIN):
9920 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9921
9922 CASE_FLT_FN (BUILT_IN_FMAX):
9923 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9924
9925 case BUILT_IN_ISGREATER:
9926 return fold_builtin_unordered_cmp (loc, fndecl,
9927 arg0, arg1, UNLE_EXPR, LE_EXPR);
9928 case BUILT_IN_ISGREATEREQUAL:
9929 return fold_builtin_unordered_cmp (loc, fndecl,
9930 arg0, arg1, UNLT_EXPR, LT_EXPR);
9931 case BUILT_IN_ISLESS:
9932 return fold_builtin_unordered_cmp (loc, fndecl,
9933 arg0, arg1, UNGE_EXPR, GE_EXPR);
9934 case BUILT_IN_ISLESSEQUAL:
9935 return fold_builtin_unordered_cmp (loc, fndecl,
9936 arg0, arg1, UNGT_EXPR, GT_EXPR);
9937 case BUILT_IN_ISLESSGREATER:
9938 return fold_builtin_unordered_cmp (loc, fndecl,
9939 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9940 case BUILT_IN_ISUNORDERED:
9941 return fold_builtin_unordered_cmp (loc, fndecl,
9942 arg0, arg1, UNORDERED_EXPR,
9943 NOP_EXPR);
9944
9945 /* We do the folding for va_start in the expander. */
9946 case BUILT_IN_VA_START:
9947 break;
9948
9949 case BUILT_IN_OBJECT_SIZE:
9950 return fold_builtin_object_size (arg0, arg1);
9951
9952 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9953 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9954
9955 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9956 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9957
9958 default:
9959 break;
9960 }
9961 return NULL_TREE;
9962 }
9963
9964 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9965 and ARG2.
9966 This function returns NULL_TREE if no simplification was possible. */
9967
9968 static tree
9969 fold_builtin_3 (location_t loc, tree fndecl,
9970 tree arg0, tree arg1, tree arg2)
9971 {
9972 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9973 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9974 switch (fcode)
9975 {
9976
9977 CASE_FLT_FN (BUILT_IN_SINCOS):
9978 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9979
9980 CASE_FLT_FN (BUILT_IN_FMA):
9981 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9982 break;
9983
9984 CASE_FLT_FN (BUILT_IN_REMQUO):
9985 if (validate_arg (arg0, REAL_TYPE)
9986 && validate_arg (arg1, REAL_TYPE)
9987 && validate_arg (arg2, POINTER_TYPE))
9988 return do_mpfr_remquo (arg0, arg1, arg2);
9989 break;
9990
9991 case BUILT_IN_STRNCMP:
9992 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9993
9994 case BUILT_IN_MEMCHR:
9995 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9996
9997 case BUILT_IN_BCMP:
9998 case BUILT_IN_MEMCMP:
9999 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10000
10001 case BUILT_IN_EXPECT:
10002 return fold_builtin_expect (loc, arg0, arg1, arg2);
10003
10004 case BUILT_IN_ADD_OVERFLOW:
10005 case BUILT_IN_SUB_OVERFLOW:
10006 case BUILT_IN_MUL_OVERFLOW:
10007 case BUILT_IN_SADD_OVERFLOW:
10008 case BUILT_IN_SADDL_OVERFLOW:
10009 case BUILT_IN_SADDLL_OVERFLOW:
10010 case BUILT_IN_SSUB_OVERFLOW:
10011 case BUILT_IN_SSUBL_OVERFLOW:
10012 case BUILT_IN_SSUBLL_OVERFLOW:
10013 case BUILT_IN_SMUL_OVERFLOW:
10014 case BUILT_IN_SMULL_OVERFLOW:
10015 case BUILT_IN_SMULLL_OVERFLOW:
10016 case BUILT_IN_UADD_OVERFLOW:
10017 case BUILT_IN_UADDL_OVERFLOW:
10018 case BUILT_IN_UADDLL_OVERFLOW:
10019 case BUILT_IN_USUB_OVERFLOW:
10020 case BUILT_IN_USUBL_OVERFLOW:
10021 case BUILT_IN_USUBLL_OVERFLOW:
10022 case BUILT_IN_UMUL_OVERFLOW:
10023 case BUILT_IN_UMULL_OVERFLOW:
10024 case BUILT_IN_UMULLL_OVERFLOW:
10025 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10026
10027 default:
10028 break;
10029 }
10030 return NULL_TREE;
10031 }
10032
10033 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10034 arguments. IGNORE is true if the result of the
10035 function call is ignored. This function returns NULL_TREE if no
10036 simplification was possible. */
10037
10038 tree
10039 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10040 {
10041 tree ret = NULL_TREE;
10042
10043 switch (nargs)
10044 {
10045 case 0:
10046 ret = fold_builtin_0 (loc, fndecl);
10047 break;
10048 case 1:
10049 ret = fold_builtin_1 (loc, fndecl, args[0]);
10050 break;
10051 case 2:
10052 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10053 break;
10054 case 3:
10055 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10056 break;
10057 default:
10058 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10059 break;
10060 }
10061 if (ret)
10062 {
10063 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10064 SET_EXPR_LOCATION (ret, loc);
10065 TREE_NO_WARNING (ret) = 1;
10066 return ret;
10067 }
10068 return NULL_TREE;
10069 }
10070
10071 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10072 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10073 of arguments in ARGS to be omitted. OLDNARGS is the number of
10074 elements in ARGS. */
10075
10076 static tree
10077 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10078 int skip, tree fndecl, int n, va_list newargs)
10079 {
10080 int nargs = oldnargs - skip + n;
10081 tree *buffer;
10082
10083 if (n > 0)
10084 {
10085 int i, j;
10086
10087 buffer = XALLOCAVEC (tree, nargs);
10088 for (i = 0; i < n; i++)
10089 buffer[i] = va_arg (newargs, tree);
10090 for (j = skip; j < oldnargs; j++, i++)
10091 buffer[i] = args[j];
10092 }
10093 else
10094 buffer = args + skip;
10095
10096 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10097 }
10098
10099 /* Return true if FNDECL shouldn't be folded right now.
10100 If a built-in function has an inline attribute always_inline
10101 wrapper, defer folding it after always_inline functions have
10102 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10103 might not be performed. */
10104
10105 bool
10106 avoid_folding_inline_builtin (tree fndecl)
10107 {
10108 return (DECL_DECLARED_INLINE_P (fndecl)
10109 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10110 && cfun
10111 && !cfun->always_inline_functions_inlined
10112 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10113 }
10114
10115 /* A wrapper function for builtin folding that prevents warnings for
10116 "statement without effect" and the like, caused by removing the
10117 call node earlier than the warning is generated. */
10118
10119 tree
10120 fold_call_expr (location_t loc, tree exp, bool ignore)
10121 {
10122 tree ret = NULL_TREE;
10123 tree fndecl = get_callee_fndecl (exp);
10124 if (fndecl
10125 && TREE_CODE (fndecl) == FUNCTION_DECL
10126 && DECL_BUILT_IN (fndecl)
10127 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10128 yet. Defer folding until we see all the arguments
10129 (after inlining). */
10130 && !CALL_EXPR_VA_ARG_PACK (exp))
10131 {
10132 int nargs = call_expr_nargs (exp);
10133
10134 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10135 instead last argument is __builtin_va_arg_pack (). Defer folding
10136 even in that case, until arguments are finalized. */
10137 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10138 {
10139 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10140 if (fndecl2
10141 && TREE_CODE (fndecl2) == FUNCTION_DECL
10142 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10143 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10144 return NULL_TREE;
10145 }
10146
10147 if (avoid_folding_inline_builtin (fndecl))
10148 return NULL_TREE;
10149
10150 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10151 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10152 CALL_EXPR_ARGP (exp), ignore);
10153 else
10154 {
10155 tree *args = CALL_EXPR_ARGP (exp);
10156 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10157 if (ret)
10158 return ret;
10159 }
10160 }
10161 return NULL_TREE;
10162 }
10163
10164 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10165 N arguments are passed in the array ARGARRAY. Return a folded
10166 expression or NULL_TREE if no simplification was possible. */
10167
10168 tree
10169 fold_builtin_call_array (location_t loc, tree,
10170 tree fn,
10171 int n,
10172 tree *argarray)
10173 {
10174 if (TREE_CODE (fn) != ADDR_EXPR)
10175 return NULL_TREE;
10176
10177 tree fndecl = TREE_OPERAND (fn, 0);
10178 if (TREE_CODE (fndecl) == FUNCTION_DECL
10179 && DECL_BUILT_IN (fndecl))
10180 {
10181 /* If last argument is __builtin_va_arg_pack (), arguments to this
10182 function are not finalized yet. Defer folding until they are. */
10183 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10184 {
10185 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10186 if (fndecl2
10187 && TREE_CODE (fndecl2) == FUNCTION_DECL
10188 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10189 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10190 return NULL_TREE;
10191 }
10192 if (avoid_folding_inline_builtin (fndecl))
10193 return NULL_TREE;
10194 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10195 return targetm.fold_builtin (fndecl, n, argarray, false);
10196 else
10197 return fold_builtin_n (loc, fndecl, argarray, n, false);
10198 }
10199
10200 return NULL_TREE;
10201 }
10202
10203 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10204 along with N new arguments specified as the "..." parameters. SKIP
10205 is the number of arguments in EXP to be omitted. This function is used
10206 to do varargs-to-varargs transformations. */
10207
10208 static tree
10209 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10210 {
10211 va_list ap;
10212 tree t;
10213
10214 va_start (ap, n);
10215 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10216 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10217 va_end (ap);
10218
10219 return t;
10220 }
10221
10222 /* Validate a single argument ARG against a tree code CODE representing
10223 a type. */
10224
10225 static bool
10226 validate_arg (const_tree arg, enum tree_code code)
10227 {
10228 if (!arg)
10229 return false;
10230 else if (code == POINTER_TYPE)
10231 return POINTER_TYPE_P (TREE_TYPE (arg));
10232 else if (code == INTEGER_TYPE)
10233 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10234 return code == TREE_CODE (TREE_TYPE (arg));
10235 }
10236
10237 /* This function validates the types of a function call argument list
10238 against a specified list of tree_codes. If the last specifier is a 0,
10239 that represents an ellipses, otherwise the last specifier must be a
10240 VOID_TYPE.
10241
10242 This is the GIMPLE version of validate_arglist. Eventually we want to
10243 completely convert builtins.c to work from GIMPLEs and the tree based
10244 validate_arglist will then be removed. */
10245
10246 bool
10247 validate_gimple_arglist (const gcall *call, ...)
10248 {
10249 enum tree_code code;
10250 bool res = 0;
10251 va_list ap;
10252 const_tree arg;
10253 size_t i;
10254
10255 va_start (ap, call);
10256 i = 0;
10257
10258 do
10259 {
10260 code = (enum tree_code) va_arg (ap, int);
10261 switch (code)
10262 {
10263 case 0:
10264 /* This signifies an ellipses, any further arguments are all ok. */
10265 res = true;
10266 goto end;
10267 case VOID_TYPE:
10268 /* This signifies an endlink, if no arguments remain, return
10269 true, otherwise return false. */
10270 res = (i == gimple_call_num_args (call));
10271 goto end;
10272 default:
10273 /* If no parameters remain or the parameter's code does not
10274 match the specified code, return false. Otherwise continue
10275 checking any remaining arguments. */
10276 arg = gimple_call_arg (call, i++);
10277 if (!validate_arg (arg, code))
10278 goto end;
10279 break;
10280 }
10281 }
10282 while (1);
10283
10284 /* We need gotos here since we can only have one VA_CLOSE in a
10285 function. */
10286 end: ;
10287 va_end (ap);
10288
10289 return res;
10290 }
10291
10292 /* Default target-specific builtin expander that does nothing. */
10293
10294 rtx
10295 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10296 rtx target ATTRIBUTE_UNUSED,
10297 rtx subtarget ATTRIBUTE_UNUSED,
10298 machine_mode mode ATTRIBUTE_UNUSED,
10299 int ignore ATTRIBUTE_UNUSED)
10300 {
10301 return NULL_RTX;
10302 }
10303
10304 /* Returns true is EXP represents data that would potentially reside
10305 in a readonly section. */
10306
10307 bool
10308 readonly_data_expr (tree exp)
10309 {
10310 STRIP_NOPS (exp);
10311
10312 if (TREE_CODE (exp) != ADDR_EXPR)
10313 return false;
10314
10315 exp = get_base_address (TREE_OPERAND (exp, 0));
10316 if (!exp)
10317 return false;
10318
10319 /* Make sure we call decl_readonly_section only for trees it
10320 can handle (since it returns true for everything it doesn't
10321 understand). */
10322 if (TREE_CODE (exp) == STRING_CST
10323 || TREE_CODE (exp) == CONSTRUCTOR
10324 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10325 return decl_readonly_section (exp, 0);
10326 else
10327 return false;
10328 }
10329
10330 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10331 to the call, and TYPE is its return type.
10332
10333 Return NULL_TREE if no simplification was possible, otherwise return the
10334 simplified form of the call as a tree.
10335
10336 The simplified form may be a constant or other expression which
10337 computes the same value, but in a more efficient manner (including
10338 calls to other builtin functions).
10339
10340 The call may contain arguments which need to be evaluated, but
10341 which are not useful to determine the result of the call. In
10342 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10343 COMPOUND_EXPR will be an argument which must be evaluated.
10344 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10345 COMPOUND_EXPR in the chain will contain the tree for the simplified
10346 form of the builtin function call. */
10347
10348 static tree
10349 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10350 {
10351 if (!validate_arg (s1, POINTER_TYPE)
10352 || !validate_arg (s2, POINTER_TYPE))
10353 return NULL_TREE;
10354 else
10355 {
10356 tree fn;
10357 const char *p1, *p2;
10358
10359 p2 = c_getstr (s2);
10360 if (p2 == NULL)
10361 return NULL_TREE;
10362
10363 p1 = c_getstr (s1);
10364 if (p1 != NULL)
10365 {
10366 const char *r = strstr (p1, p2);
10367 tree tem;
10368
10369 if (r == NULL)
10370 return build_int_cst (TREE_TYPE (s1), 0);
10371
10372 /* Return an offset into the constant string argument. */
10373 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10374 return fold_convert_loc (loc, type, tem);
10375 }
10376
10377 /* The argument is const char *, and the result is char *, so we need
10378 a type conversion here to avoid a warning. */
10379 if (p2[0] == '\0')
10380 return fold_convert_loc (loc, type, s1);
10381
10382 if (p2[1] != '\0')
10383 return NULL_TREE;
10384
10385 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10386 if (!fn)
10387 return NULL_TREE;
10388
10389 /* New argument list transforming strstr(s1, s2) to
10390 strchr(s1, s2[0]). */
10391 return build_call_expr_loc (loc, fn, 2, s1,
10392 build_int_cst (integer_type_node, p2[0]));
10393 }
10394 }
10395
10396 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10397 the call, and TYPE is its return type.
10398
10399 Return NULL_TREE if no simplification was possible, otherwise return the
10400 simplified form of the call as a tree.
10401
10402 The simplified form may be a constant or other expression which
10403 computes the same value, but in a more efficient manner (including
10404 calls to other builtin functions).
10405
10406 The call may contain arguments which need to be evaluated, but
10407 which are not useful to determine the result of the call. In
10408 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10409 COMPOUND_EXPR will be an argument which must be evaluated.
10410 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10411 COMPOUND_EXPR in the chain will contain the tree for the simplified
10412 form of the builtin function call. */
10413
10414 static tree
10415 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10416 {
10417 if (!validate_arg (s1, POINTER_TYPE)
10418 || !validate_arg (s2, INTEGER_TYPE))
10419 return NULL_TREE;
10420 else
10421 {
10422 const char *p1;
10423
10424 if (TREE_CODE (s2) != INTEGER_CST)
10425 return NULL_TREE;
10426
10427 p1 = c_getstr (s1);
10428 if (p1 != NULL)
10429 {
10430 char c;
10431 const char *r;
10432 tree tem;
10433
10434 if (target_char_cast (s2, &c))
10435 return NULL_TREE;
10436
10437 r = strchr (p1, c);
10438
10439 if (r == NULL)
10440 return build_int_cst (TREE_TYPE (s1), 0);
10441
10442 /* Return an offset into the constant string argument. */
10443 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10444 return fold_convert_loc (loc, type, tem);
10445 }
10446 return NULL_TREE;
10447 }
10448 }
10449
10450 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10451 the call, and TYPE is its return type.
10452
10453 Return NULL_TREE if no simplification was possible, otherwise return the
10454 simplified form of the call as a tree.
10455
10456 The simplified form may be a constant or other expression which
10457 computes the same value, but in a more efficient manner (including
10458 calls to other builtin functions).
10459
10460 The call may contain arguments which need to be evaluated, but
10461 which are not useful to determine the result of the call. In
10462 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10463 COMPOUND_EXPR will be an argument which must be evaluated.
10464 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10465 COMPOUND_EXPR in the chain will contain the tree for the simplified
10466 form of the builtin function call. */
10467
10468 static tree
10469 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10470 {
10471 if (!validate_arg (s1, POINTER_TYPE)
10472 || !validate_arg (s2, INTEGER_TYPE))
10473 return NULL_TREE;
10474 else
10475 {
10476 tree fn;
10477 const char *p1;
10478
10479 if (TREE_CODE (s2) != INTEGER_CST)
10480 return NULL_TREE;
10481
10482 p1 = c_getstr (s1);
10483 if (p1 != NULL)
10484 {
10485 char c;
10486 const char *r;
10487 tree tem;
10488
10489 if (target_char_cast (s2, &c))
10490 return NULL_TREE;
10491
10492 r = strrchr (p1, c);
10493
10494 if (r == NULL)
10495 return build_int_cst (TREE_TYPE (s1), 0);
10496
10497 /* Return an offset into the constant string argument. */
10498 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10499 return fold_convert_loc (loc, type, tem);
10500 }
10501
10502 if (! integer_zerop (s2))
10503 return NULL_TREE;
10504
10505 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10506 if (!fn)
10507 return NULL_TREE;
10508
10509 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10510 return build_call_expr_loc (loc, fn, 2, s1, s2);
10511 }
10512 }
10513
10514 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10515 to the call, and TYPE is its return type.
10516
10517 Return NULL_TREE if no simplification was possible, otherwise return the
10518 simplified form of the call as a tree.
10519
10520 The simplified form may be a constant or other expression which
10521 computes the same value, but in a more efficient manner (including
10522 calls to other builtin functions).
10523
10524 The call may contain arguments which need to be evaluated, but
10525 which are not useful to determine the result of the call. In
10526 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10527 COMPOUND_EXPR will be an argument which must be evaluated.
10528 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10529 COMPOUND_EXPR in the chain will contain the tree for the simplified
10530 form of the builtin function call. */
10531
10532 static tree
10533 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10534 {
10535 if (!validate_arg (s1, POINTER_TYPE)
10536 || !validate_arg (s2, POINTER_TYPE))
10537 return NULL_TREE;
10538 else
10539 {
10540 tree fn;
10541 const char *p1, *p2;
10542
10543 p2 = c_getstr (s2);
10544 if (p2 == NULL)
10545 return NULL_TREE;
10546
10547 p1 = c_getstr (s1);
10548 if (p1 != NULL)
10549 {
10550 const char *r = strpbrk (p1, p2);
10551 tree tem;
10552
10553 if (r == NULL)
10554 return build_int_cst (TREE_TYPE (s1), 0);
10555
10556 /* Return an offset into the constant string argument. */
10557 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10558 return fold_convert_loc (loc, type, tem);
10559 }
10560
10561 if (p2[0] == '\0')
10562 /* strpbrk(x, "") == NULL.
10563 Evaluate and ignore s1 in case it had side-effects. */
10564 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10565
10566 if (p2[1] != '\0')
10567 return NULL_TREE; /* Really call strpbrk. */
10568
10569 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10570 if (!fn)
10571 return NULL_TREE;
10572
10573 /* New argument list transforming strpbrk(s1, s2) to
10574 strchr(s1, s2[0]). */
10575 return build_call_expr_loc (loc, fn, 2, s1,
10576 build_int_cst (integer_type_node, p2[0]));
10577 }
10578 }
10579
10580 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10581 to the call.
10582
10583 Return NULL_TREE if no simplification was possible, otherwise return the
10584 simplified form of the call as a tree.
10585
10586 The simplified form may be a constant or other expression which
10587 computes the same value, but in a more efficient manner (including
10588 calls to other builtin functions).
10589
10590 The call may contain arguments which need to be evaluated, but
10591 which are not useful to determine the result of the call. In
10592 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10593 COMPOUND_EXPR will be an argument which must be evaluated.
10594 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10595 COMPOUND_EXPR in the chain will contain the tree for the simplified
10596 form of the builtin function call. */
10597
10598 static tree
10599 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10600 {
10601 if (!validate_arg (s1, POINTER_TYPE)
10602 || !validate_arg (s2, POINTER_TYPE))
10603 return NULL_TREE;
10604 else
10605 {
10606 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10607
10608 /* If both arguments are constants, evaluate at compile-time. */
10609 if (p1 && p2)
10610 {
10611 const size_t r = strspn (p1, p2);
10612 return build_int_cst (size_type_node, r);
10613 }
10614
10615 /* If either argument is "", return NULL_TREE. */
10616 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10617 /* Evaluate and ignore both arguments in case either one has
10618 side-effects. */
10619 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10620 s1, s2);
10621 return NULL_TREE;
10622 }
10623 }
10624
10625 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10626 to the call.
10627
10628 Return NULL_TREE if no simplification was possible, otherwise return the
10629 simplified form of the call as a tree.
10630
10631 The simplified form may be a constant or other expression which
10632 computes the same value, but in a more efficient manner (including
10633 calls to other builtin functions).
10634
10635 The call may contain arguments which need to be evaluated, but
10636 which are not useful to determine the result of the call. In
10637 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10638 COMPOUND_EXPR will be an argument which must be evaluated.
10639 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10640 COMPOUND_EXPR in the chain will contain the tree for the simplified
10641 form of the builtin function call. */
10642
10643 static tree
10644 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10645 {
10646 if (!validate_arg (s1, POINTER_TYPE)
10647 || !validate_arg (s2, POINTER_TYPE))
10648 return NULL_TREE;
10649 else
10650 {
10651 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10652
10653 /* If both arguments are constants, evaluate at compile-time. */
10654 if (p1 && p2)
10655 {
10656 const size_t r = strcspn (p1, p2);
10657 return build_int_cst (size_type_node, r);
10658 }
10659
10660 /* If the first argument is "", return NULL_TREE. */
10661 if (p1 && *p1 == '\0')
10662 {
10663 /* Evaluate and ignore argument s2 in case it has
10664 side-effects. */
10665 return omit_one_operand_loc (loc, size_type_node,
10666 size_zero_node, s2);
10667 }
10668
10669 /* If the second argument is "", return __builtin_strlen(s1). */
10670 if (p2 && *p2 == '\0')
10671 {
10672 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10673
10674 /* If the replacement _DECL isn't initialized, don't do the
10675 transformation. */
10676 if (!fn)
10677 return NULL_TREE;
10678
10679 return build_call_expr_loc (loc, fn, 1, s1);
10680 }
10681 return NULL_TREE;
10682 }
10683 }
10684
10685 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10686 produced. False otherwise. This is done so that we don't output the error
10687 or warning twice or three times. */
10688
10689 bool
10690 fold_builtin_next_arg (tree exp, bool va_start_p)
10691 {
10692 tree fntype = TREE_TYPE (current_function_decl);
10693 int nargs = call_expr_nargs (exp);
10694 tree arg;
10695 /* There is good chance the current input_location points inside the
10696 definition of the va_start macro (perhaps on the token for
10697 builtin) in a system header, so warnings will not be emitted.
10698 Use the location in real source code. */
10699 source_location current_location =
10700 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10701 NULL);
10702
10703 if (!stdarg_p (fntype))
10704 {
10705 error ("%<va_start%> used in function with fixed args");
10706 return true;
10707 }
10708
10709 if (va_start_p)
10710 {
10711 if (va_start_p && (nargs != 2))
10712 {
10713 error ("wrong number of arguments to function %<va_start%>");
10714 return true;
10715 }
10716 arg = CALL_EXPR_ARG (exp, 1);
10717 }
10718 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10719 when we checked the arguments and if needed issued a warning. */
10720 else
10721 {
10722 if (nargs == 0)
10723 {
10724 /* Evidently an out of date version of <stdarg.h>; can't validate
10725 va_start's second argument, but can still work as intended. */
10726 warning_at (current_location,
10727 OPT_Wvarargs,
10728 "%<__builtin_next_arg%> called without an argument");
10729 return true;
10730 }
10731 else if (nargs > 1)
10732 {
10733 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10734 return true;
10735 }
10736 arg = CALL_EXPR_ARG (exp, 0);
10737 }
10738
10739 if (TREE_CODE (arg) == SSA_NAME)
10740 arg = SSA_NAME_VAR (arg);
10741
10742 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10743 or __builtin_next_arg (0) the first time we see it, after checking
10744 the arguments and if needed issuing a warning. */
10745 if (!integer_zerop (arg))
10746 {
10747 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10748
10749 /* Strip off all nops for the sake of the comparison. This
10750 is not quite the same as STRIP_NOPS. It does more.
10751 We must also strip off INDIRECT_EXPR for C++ reference
10752 parameters. */
10753 while (CONVERT_EXPR_P (arg)
10754 || TREE_CODE (arg) == INDIRECT_REF)
10755 arg = TREE_OPERAND (arg, 0);
10756 if (arg != last_parm)
10757 {
10758 /* FIXME: Sometimes with the tree optimizers we can get the
10759 not the last argument even though the user used the last
10760 argument. We just warn and set the arg to be the last
10761 argument so that we will get wrong-code because of
10762 it. */
10763 warning_at (current_location,
10764 OPT_Wvarargs,
10765 "second parameter of %<va_start%> not last named argument");
10766 }
10767
10768 /* Undefined by C99 7.15.1.4p4 (va_start):
10769 "If the parameter parmN is declared with the register storage
10770 class, with a function or array type, or with a type that is
10771 not compatible with the type that results after application of
10772 the default argument promotions, the behavior is undefined."
10773 */
10774 else if (DECL_REGISTER (arg))
10775 {
10776 warning_at (current_location,
10777 OPT_Wvarargs,
10778 "undefined behaviour when second parameter of "
10779 "%<va_start%> is declared with %<register%> storage");
10780 }
10781
10782 /* We want to verify the second parameter just once before the tree
10783 optimizers are run and then avoid keeping it in the tree,
10784 as otherwise we could warn even for correct code like:
10785 void foo (int i, ...)
10786 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10787 if (va_start_p)
10788 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10789 else
10790 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10791 }
10792 return false;
10793 }
10794
10795
10796 /* Expand a call EXP to __builtin_object_size. */
10797
10798 static rtx
10799 expand_builtin_object_size (tree exp)
10800 {
10801 tree ost;
10802 int object_size_type;
10803 tree fndecl = get_callee_fndecl (exp);
10804
10805 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10806 {
10807 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10808 exp, fndecl);
10809 expand_builtin_trap ();
10810 return const0_rtx;
10811 }
10812
10813 ost = CALL_EXPR_ARG (exp, 1);
10814 STRIP_NOPS (ost);
10815
10816 if (TREE_CODE (ost) != INTEGER_CST
10817 || tree_int_cst_sgn (ost) < 0
10818 || compare_tree_int (ost, 3) > 0)
10819 {
10820 error ("%Klast argument of %D is not integer constant between 0 and 3",
10821 exp, fndecl);
10822 expand_builtin_trap ();
10823 return const0_rtx;
10824 }
10825
10826 object_size_type = tree_to_shwi (ost);
10827
10828 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10829 }
10830
10831 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10832 FCODE is the BUILT_IN_* to use.
10833 Return NULL_RTX if we failed; the caller should emit a normal call,
10834 otherwise try to get the result in TARGET, if convenient (and in
10835 mode MODE if that's convenient). */
10836
10837 static rtx
10838 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10839 enum built_in_function fcode)
10840 {
10841 tree dest, src, len, size;
10842
10843 if (!validate_arglist (exp,
10844 POINTER_TYPE,
10845 fcode == BUILT_IN_MEMSET_CHK
10846 ? INTEGER_TYPE : POINTER_TYPE,
10847 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10848 return NULL_RTX;
10849
10850 dest = CALL_EXPR_ARG (exp, 0);
10851 src = CALL_EXPR_ARG (exp, 1);
10852 len = CALL_EXPR_ARG (exp, 2);
10853 size = CALL_EXPR_ARG (exp, 3);
10854
10855 if (! tree_fits_uhwi_p (size))
10856 return NULL_RTX;
10857
10858 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10859 {
10860 tree fn;
10861
10862 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10863 {
10864 warning_at (tree_nonartificial_location (exp),
10865 0, "%Kcall to %D will always overflow destination buffer",
10866 exp, get_callee_fndecl (exp));
10867 return NULL_RTX;
10868 }
10869
10870 fn = NULL_TREE;
10871 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10872 mem{cpy,pcpy,move,set} is available. */
10873 switch (fcode)
10874 {
10875 case BUILT_IN_MEMCPY_CHK:
10876 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10877 break;
10878 case BUILT_IN_MEMPCPY_CHK:
10879 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10880 break;
10881 case BUILT_IN_MEMMOVE_CHK:
10882 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10883 break;
10884 case BUILT_IN_MEMSET_CHK:
10885 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10886 break;
10887 default:
10888 break;
10889 }
10890
10891 if (! fn)
10892 return NULL_RTX;
10893
10894 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10897 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10898 }
10899 else if (fcode == BUILT_IN_MEMSET_CHK)
10900 return NULL_RTX;
10901 else
10902 {
10903 unsigned int dest_align = get_pointer_alignment (dest);
10904
10905 /* If DEST is not a pointer type, call the normal function. */
10906 if (dest_align == 0)
10907 return NULL_RTX;
10908
10909 /* If SRC and DEST are the same (and not volatile), do nothing. */
10910 if (operand_equal_p (src, dest, 0))
10911 {
10912 tree expr;
10913
10914 if (fcode != BUILT_IN_MEMPCPY_CHK)
10915 {
10916 /* Evaluate and ignore LEN in case it has side-effects. */
10917 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10918 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10919 }
10920
10921 expr = fold_build_pointer_plus (dest, len);
10922 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10923 }
10924
10925 /* __memmove_chk special case. */
10926 if (fcode == BUILT_IN_MEMMOVE_CHK)
10927 {
10928 unsigned int src_align = get_pointer_alignment (src);
10929
10930 if (src_align == 0)
10931 return NULL_RTX;
10932
10933 /* If src is categorized for a readonly section we can use
10934 normal __memcpy_chk. */
10935 if (readonly_data_expr (src))
10936 {
10937 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10938 if (!fn)
10939 return NULL_RTX;
10940 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10941 dest, src, len, size);
10942 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10943 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10944 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10945 }
10946 }
10947 return NULL_RTX;
10948 }
10949 }
10950
10951 /* Emit warning if a buffer overflow is detected at compile time. */
10952
10953 static void
10954 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10955 {
10956 int is_strlen = 0;
10957 tree len, size;
10958 location_t loc = tree_nonartificial_location (exp);
10959
10960 switch (fcode)
10961 {
10962 case BUILT_IN_STRCPY_CHK:
10963 case BUILT_IN_STPCPY_CHK:
10964 /* For __strcat_chk the warning will be emitted only if overflowing
10965 by at least strlen (dest) + 1 bytes. */
10966 case BUILT_IN_STRCAT_CHK:
10967 len = CALL_EXPR_ARG (exp, 1);
10968 size = CALL_EXPR_ARG (exp, 2);
10969 is_strlen = 1;
10970 break;
10971 case BUILT_IN_STRNCAT_CHK:
10972 case BUILT_IN_STRNCPY_CHK:
10973 case BUILT_IN_STPNCPY_CHK:
10974 len = CALL_EXPR_ARG (exp, 2);
10975 size = CALL_EXPR_ARG (exp, 3);
10976 break;
10977 case BUILT_IN_SNPRINTF_CHK:
10978 case BUILT_IN_VSNPRINTF_CHK:
10979 len = CALL_EXPR_ARG (exp, 1);
10980 size = CALL_EXPR_ARG (exp, 3);
10981 break;
10982 default:
10983 gcc_unreachable ();
10984 }
10985
10986 if (!len || !size)
10987 return;
10988
10989 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10990 return;
10991
10992 if (is_strlen)
10993 {
10994 len = c_strlen (len, 1);
10995 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10996 return;
10997 }
10998 else if (fcode == BUILT_IN_STRNCAT_CHK)
10999 {
11000 tree src = CALL_EXPR_ARG (exp, 1);
11001 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11002 return;
11003 src = c_strlen (src, 1);
11004 if (! src || ! tree_fits_uhwi_p (src))
11005 {
11006 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11007 exp, get_callee_fndecl (exp));
11008 return;
11009 }
11010 else if (tree_int_cst_lt (src, size))
11011 return;
11012 }
11013 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11014 return;
11015
11016 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11017 exp, get_callee_fndecl (exp));
11018 }
11019
11020 /* Emit warning if a buffer overflow is detected at compile time
11021 in __sprintf_chk/__vsprintf_chk calls. */
11022
11023 static void
11024 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11025 {
11026 tree size, len, fmt;
11027 const char *fmt_str;
11028 int nargs = call_expr_nargs (exp);
11029
11030 /* Verify the required arguments in the original call. */
11031
11032 if (nargs < 4)
11033 return;
11034 size = CALL_EXPR_ARG (exp, 2);
11035 fmt = CALL_EXPR_ARG (exp, 3);
11036
11037 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11038 return;
11039
11040 /* Check whether the format is a literal string constant. */
11041 fmt_str = c_getstr (fmt);
11042 if (fmt_str == NULL)
11043 return;
11044
11045 if (!init_target_chars ())
11046 return;
11047
11048 /* If the format doesn't contain % args or %%, we know its size. */
11049 if (strchr (fmt_str, target_percent) == 0)
11050 len = build_int_cstu (size_type_node, strlen (fmt_str));
11051 /* If the format is "%s" and first ... argument is a string literal,
11052 we know it too. */
11053 else if (fcode == BUILT_IN_SPRINTF_CHK
11054 && strcmp (fmt_str, target_percent_s) == 0)
11055 {
11056 tree arg;
11057
11058 if (nargs < 5)
11059 return;
11060 arg = CALL_EXPR_ARG (exp, 4);
11061 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11062 return;
11063
11064 len = c_strlen (arg, 1);
11065 if (!len || ! tree_fits_uhwi_p (len))
11066 return;
11067 }
11068 else
11069 return;
11070
11071 if (! tree_int_cst_lt (len, size))
11072 warning_at (tree_nonartificial_location (exp),
11073 0, "%Kcall to %D will always overflow destination buffer",
11074 exp, get_callee_fndecl (exp));
11075 }
11076
11077 /* Emit warning if a free is called with address of a variable. */
11078
11079 static void
11080 maybe_emit_free_warning (tree exp)
11081 {
11082 tree arg = CALL_EXPR_ARG (exp, 0);
11083
11084 STRIP_NOPS (arg);
11085 if (TREE_CODE (arg) != ADDR_EXPR)
11086 return;
11087
11088 arg = get_base_address (TREE_OPERAND (arg, 0));
11089 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11090 return;
11091
11092 if (SSA_VAR_P (arg))
11093 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11094 "%Kattempt to free a non-heap object %qD", exp, arg);
11095 else
11096 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11097 "%Kattempt to free a non-heap object", exp);
11098 }
11099
11100 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11101 if possible. */
11102
11103 static tree
11104 fold_builtin_object_size (tree ptr, tree ost)
11105 {
11106 unsigned HOST_WIDE_INT bytes;
11107 int object_size_type;
11108
11109 if (!validate_arg (ptr, POINTER_TYPE)
11110 || !validate_arg (ost, INTEGER_TYPE))
11111 return NULL_TREE;
11112
11113 STRIP_NOPS (ost);
11114
11115 if (TREE_CODE (ost) != INTEGER_CST
11116 || tree_int_cst_sgn (ost) < 0
11117 || compare_tree_int (ost, 3) > 0)
11118 return NULL_TREE;
11119
11120 object_size_type = tree_to_shwi (ost);
11121
11122 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11123 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11124 and (size_t) 0 for types 2 and 3. */
11125 if (TREE_SIDE_EFFECTS (ptr))
11126 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11127
11128 if (TREE_CODE (ptr) == ADDR_EXPR)
11129 {
11130 bytes = compute_builtin_object_size (ptr, object_size_type);
11131 if (wi::fits_to_tree_p (bytes, size_type_node))
11132 return build_int_cstu (size_type_node, bytes);
11133 }
11134 else if (TREE_CODE (ptr) == SSA_NAME)
11135 {
11136 /* If object size is not known yet, delay folding until
11137 later. Maybe subsequent passes will help determining
11138 it. */
11139 bytes = compute_builtin_object_size (ptr, object_size_type);
11140 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11141 && wi::fits_to_tree_p (bytes, size_type_node))
11142 return build_int_cstu (size_type_node, bytes);
11143 }
11144
11145 return NULL_TREE;
11146 }
11147
11148 /* Builtins with folding operations that operate on "..." arguments
11149 need special handling; we need to store the arguments in a convenient
11150 data structure before attempting any folding. Fortunately there are
11151 only a few builtins that fall into this category. FNDECL is the
11152 function, EXP is the CALL_EXPR for the call. */
11153
11154 static tree
11155 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11156 {
11157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11158 tree ret = NULL_TREE;
11159
11160 switch (fcode)
11161 {
11162 case BUILT_IN_FPCLASSIFY:
11163 ret = fold_builtin_fpclassify (loc, args, nargs);
11164 break;
11165
11166 default:
11167 break;
11168 }
11169 if (ret)
11170 {
11171 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11172 SET_EXPR_LOCATION (ret, loc);
11173 TREE_NO_WARNING (ret) = 1;
11174 return ret;
11175 }
11176 return NULL_TREE;
11177 }
11178
11179 /* Initialize format string characters in the target charset. */
11180
11181 bool
11182 init_target_chars (void)
11183 {
11184 static bool init;
11185 if (!init)
11186 {
11187 target_newline = lang_hooks.to_target_charset ('\n');
11188 target_percent = lang_hooks.to_target_charset ('%');
11189 target_c = lang_hooks.to_target_charset ('c');
11190 target_s = lang_hooks.to_target_charset ('s');
11191 if (target_newline == 0 || target_percent == 0 || target_c == 0
11192 || target_s == 0)
11193 return false;
11194
11195 target_percent_c[0] = target_percent;
11196 target_percent_c[1] = target_c;
11197 target_percent_c[2] = '\0';
11198
11199 target_percent_s[0] = target_percent;
11200 target_percent_s[1] = target_s;
11201 target_percent_s[2] = '\0';
11202
11203 target_percent_s_newline[0] = target_percent;
11204 target_percent_s_newline[1] = target_s;
11205 target_percent_s_newline[2] = target_newline;
11206 target_percent_s_newline[3] = '\0';
11207
11208 init = true;
11209 }
11210 return true;
11211 }
11212
11213 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11214 and no overflow/underflow occurred. INEXACT is true if M was not
11215 exactly calculated. TYPE is the tree type for the result. This
11216 function assumes that you cleared the MPFR flags and then
11217 calculated M to see if anything subsequently set a flag prior to
11218 entering this function. Return NULL_TREE if any checks fail. */
11219
11220 static tree
11221 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11222 {
11223 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11224 overflow/underflow occurred. If -frounding-math, proceed iff the
11225 result of calling FUNC was exact. */
11226 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11227 && (!flag_rounding_math || !inexact))
11228 {
11229 REAL_VALUE_TYPE rr;
11230
11231 real_from_mpfr (&rr, m, type, GMP_RNDN);
11232 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11233 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11234 but the mpft_t is not, then we underflowed in the
11235 conversion. */
11236 if (real_isfinite (&rr)
11237 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11238 {
11239 REAL_VALUE_TYPE rmode;
11240
11241 real_convert (&rmode, TYPE_MODE (type), &rr);
11242 /* Proceed iff the specified mode can hold the value. */
11243 if (real_identical (&rmode, &rr))
11244 return build_real (type, rmode);
11245 }
11246 }
11247 return NULL_TREE;
11248 }
11249
11250 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11251 number and no overflow/underflow occurred. INEXACT is true if M
11252 was not exactly calculated. TYPE is the tree type for the result.
11253 This function assumes that you cleared the MPFR flags and then
11254 calculated M to see if anything subsequently set a flag prior to
11255 entering this function. Return NULL_TREE if any checks fail, if
11256 FORCE_CONVERT is true, then bypass the checks. */
11257
11258 static tree
11259 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11260 {
11261 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11262 overflow/underflow occurred. If -frounding-math, proceed iff the
11263 result of calling FUNC was exact. */
11264 if (force_convert
11265 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11266 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11267 && (!flag_rounding_math || !inexact)))
11268 {
11269 REAL_VALUE_TYPE re, im;
11270
11271 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11272 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11273 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11274 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11275 but the mpft_t is not, then we underflowed in the
11276 conversion. */
11277 if (force_convert
11278 || (real_isfinite (&re) && real_isfinite (&im)
11279 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11280 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11281 {
11282 REAL_VALUE_TYPE re_mode, im_mode;
11283
11284 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11285 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11286 /* Proceed iff the specified mode can hold the value. */
11287 if (force_convert
11288 || (real_identical (&re_mode, &re)
11289 && real_identical (&im_mode, &im)))
11290 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11291 build_real (TREE_TYPE (type), im_mode));
11292 }
11293 }
11294 return NULL_TREE;
11295 }
11296
11297 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11298 FUNC on it and return the resulting value as a tree with type TYPE.
11299 If MIN and/or MAX are not NULL, then the supplied ARG must be
11300 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11301 acceptable values, otherwise they are not. The mpfr precision is
11302 set to the precision of TYPE. We assume that function FUNC returns
11303 zero if the result could be calculated exactly within the requested
11304 precision. */
11305
11306 static tree
11307 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11308 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11309 bool inclusive)
11310 {
11311 tree result = NULL_TREE;
11312
11313 STRIP_NOPS (arg);
11314
11315 /* To proceed, MPFR must exactly represent the target floating point
11316 format, which only happens when the target base equals two. */
11317 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11318 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11319 {
11320 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11321
11322 if (real_isfinite (ra)
11323 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11324 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11325 {
11326 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11327 const int prec = fmt->p;
11328 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11329 int inexact;
11330 mpfr_t m;
11331
11332 mpfr_init2 (m, prec);
11333 mpfr_from_real (m, ra, GMP_RNDN);
11334 mpfr_clear_flags ();
11335 inexact = func (m, m, rnd);
11336 result = do_mpfr_ckconv (m, type, inexact);
11337 mpfr_clear (m);
11338 }
11339 }
11340
11341 return result;
11342 }
11343
11344 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11345 FUNC on it and return the resulting value as a tree with type TYPE.
11346 The mpfr precision is set to the precision of TYPE. We assume that
11347 function FUNC returns zero if the result could be calculated
11348 exactly within the requested precision. */
11349
11350 static tree
11351 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11352 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11353 {
11354 tree result = NULL_TREE;
11355
11356 STRIP_NOPS (arg1);
11357 STRIP_NOPS (arg2);
11358
11359 /* To proceed, MPFR must exactly represent the target floating point
11360 format, which only happens when the target base equals two. */
11361 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11362 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11363 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11364 {
11365 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11366 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11367
11368 if (real_isfinite (ra1) && real_isfinite (ra2))
11369 {
11370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11371 const int prec = fmt->p;
11372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11373 int inexact;
11374 mpfr_t m1, m2;
11375
11376 mpfr_inits2 (prec, m1, m2, NULL);
11377 mpfr_from_real (m1, ra1, GMP_RNDN);
11378 mpfr_from_real (m2, ra2, GMP_RNDN);
11379 mpfr_clear_flags ();
11380 inexact = func (m1, m1, m2, rnd);
11381 result = do_mpfr_ckconv (m1, type, inexact);
11382 mpfr_clears (m1, m2, NULL);
11383 }
11384 }
11385
11386 return result;
11387 }
11388
11389 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11390 FUNC on it and return the resulting value as a tree with type TYPE.
11391 The mpfr precision is set to the precision of TYPE. We assume that
11392 function FUNC returns zero if the result could be calculated
11393 exactly within the requested precision. */
11394
11395 static tree
11396 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11397 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11398 {
11399 tree result = NULL_TREE;
11400
11401 STRIP_NOPS (arg1);
11402 STRIP_NOPS (arg2);
11403 STRIP_NOPS (arg3);
11404
11405 /* To proceed, MPFR must exactly represent the target floating point
11406 format, which only happens when the target base equals two. */
11407 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11408 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11409 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11410 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11411 {
11412 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11413 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11414 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11415
11416 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11417 {
11418 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11419 const int prec = fmt->p;
11420 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11421 int inexact;
11422 mpfr_t m1, m2, m3;
11423
11424 mpfr_inits2 (prec, m1, m2, m3, NULL);
11425 mpfr_from_real (m1, ra1, GMP_RNDN);
11426 mpfr_from_real (m2, ra2, GMP_RNDN);
11427 mpfr_from_real (m3, ra3, GMP_RNDN);
11428 mpfr_clear_flags ();
11429 inexact = func (m1, m1, m2, m3, rnd);
11430 result = do_mpfr_ckconv (m1, type, inexact);
11431 mpfr_clears (m1, m2, m3, NULL);
11432 }
11433 }
11434
11435 return result;
11436 }
11437
11438 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11439 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11440 If ARG_SINP and ARG_COSP are NULL then the result is returned
11441 as a complex value.
11442 The type is taken from the type of ARG and is used for setting the
11443 precision of the calculation and results. */
11444
11445 static tree
11446 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11447 {
11448 tree const type = TREE_TYPE (arg);
11449 tree result = NULL_TREE;
11450
11451 STRIP_NOPS (arg);
11452
11453 /* To proceed, MPFR must exactly represent the target floating point
11454 format, which only happens when the target base equals two. */
11455 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11456 && TREE_CODE (arg) == REAL_CST
11457 && !TREE_OVERFLOW (arg))
11458 {
11459 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11460
11461 if (real_isfinite (ra))
11462 {
11463 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11464 const int prec = fmt->p;
11465 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11466 tree result_s, result_c;
11467 int inexact;
11468 mpfr_t m, ms, mc;
11469
11470 mpfr_inits2 (prec, m, ms, mc, NULL);
11471 mpfr_from_real (m, ra, GMP_RNDN);
11472 mpfr_clear_flags ();
11473 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11474 result_s = do_mpfr_ckconv (ms, type, inexact);
11475 result_c = do_mpfr_ckconv (mc, type, inexact);
11476 mpfr_clears (m, ms, mc, NULL);
11477 if (result_s && result_c)
11478 {
11479 /* If we are to return in a complex value do so. */
11480 if (!arg_sinp && !arg_cosp)
11481 return build_complex (build_complex_type (type),
11482 result_c, result_s);
11483
11484 /* Dereference the sin/cos pointer arguments. */
11485 arg_sinp = build_fold_indirect_ref (arg_sinp);
11486 arg_cosp = build_fold_indirect_ref (arg_cosp);
11487 /* Proceed if valid pointer type were passed in. */
11488 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11489 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11490 {
11491 /* Set the values. */
11492 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11493 result_s);
11494 TREE_SIDE_EFFECTS (result_s) = 1;
11495 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11496 result_c);
11497 TREE_SIDE_EFFECTS (result_c) = 1;
11498 /* Combine the assignments into a compound expr. */
11499 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11500 result_s, result_c));
11501 }
11502 }
11503 }
11504 }
11505 return result;
11506 }
11507
11508 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11509 two-argument mpfr order N Bessel function FUNC on them and return
11510 the resulting value as a tree with type TYPE. The mpfr precision
11511 is set to the precision of TYPE. We assume that function FUNC
11512 returns zero if the result could be calculated exactly within the
11513 requested precision. */
11514 static tree
11515 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11516 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11517 const REAL_VALUE_TYPE *min, bool inclusive)
11518 {
11519 tree result = NULL_TREE;
11520
11521 STRIP_NOPS (arg1);
11522 STRIP_NOPS (arg2);
11523
11524 /* To proceed, MPFR must exactly represent the target floating point
11525 format, which only happens when the target base equals two. */
11526 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11527 && tree_fits_shwi_p (arg1)
11528 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11529 {
11530 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11531 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11532
11533 if (n == (long)n
11534 && real_isfinite (ra)
11535 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11536 {
11537 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11538 const int prec = fmt->p;
11539 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11540 int inexact;
11541 mpfr_t m;
11542
11543 mpfr_init2 (m, prec);
11544 mpfr_from_real (m, ra, GMP_RNDN);
11545 mpfr_clear_flags ();
11546 inexact = func (m, n, m, rnd);
11547 result = do_mpfr_ckconv (m, type, inexact);
11548 mpfr_clear (m);
11549 }
11550 }
11551
11552 return result;
11553 }
11554
11555 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11556 the pointer *(ARG_QUO) and return the result. The type is taken
11557 from the type of ARG0 and is used for setting the precision of the
11558 calculation and results. */
11559
11560 static tree
11561 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11562 {
11563 tree const type = TREE_TYPE (arg0);
11564 tree result = NULL_TREE;
11565
11566 STRIP_NOPS (arg0);
11567 STRIP_NOPS (arg1);
11568
11569 /* To proceed, MPFR must exactly represent the target floating point
11570 format, which only happens when the target base equals two. */
11571 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11572 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11573 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11574 {
11575 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11576 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11577
11578 if (real_isfinite (ra0) && real_isfinite (ra1))
11579 {
11580 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11581 const int prec = fmt->p;
11582 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11583 tree result_rem;
11584 long integer_quo;
11585 mpfr_t m0, m1;
11586
11587 mpfr_inits2 (prec, m0, m1, NULL);
11588 mpfr_from_real (m0, ra0, GMP_RNDN);
11589 mpfr_from_real (m1, ra1, GMP_RNDN);
11590 mpfr_clear_flags ();
11591 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11592 /* Remquo is independent of the rounding mode, so pass
11593 inexact=0 to do_mpfr_ckconv(). */
11594 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11595 mpfr_clears (m0, m1, NULL);
11596 if (result_rem)
11597 {
11598 /* MPFR calculates quo in the host's long so it may
11599 return more bits in quo than the target int can hold
11600 if sizeof(host long) > sizeof(target int). This can
11601 happen even for native compilers in LP64 mode. In
11602 these cases, modulo the quo value with the largest
11603 number that the target int can hold while leaving one
11604 bit for the sign. */
11605 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11606 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11607
11608 /* Dereference the quo pointer argument. */
11609 arg_quo = build_fold_indirect_ref (arg_quo);
11610 /* Proceed iff a valid pointer type was passed in. */
11611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11612 {
11613 /* Set the value. */
11614 tree result_quo
11615 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11616 build_int_cst (TREE_TYPE (arg_quo),
11617 integer_quo));
11618 TREE_SIDE_EFFECTS (result_quo) = 1;
11619 /* Combine the quo assignment with the rem. */
11620 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11621 result_quo, result_rem));
11622 }
11623 }
11624 }
11625 }
11626 return result;
11627 }
11628
11629 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11630 resulting value as a tree with type TYPE. The mpfr precision is
11631 set to the precision of TYPE. We assume that this mpfr function
11632 returns zero if the result could be calculated exactly within the
11633 requested precision. In addition, the integer pointer represented
11634 by ARG_SG will be dereferenced and set to the appropriate signgam
11635 (-1,1) value. */
11636
11637 static tree
11638 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11639 {
11640 tree result = NULL_TREE;
11641
11642 STRIP_NOPS (arg);
11643
11644 /* To proceed, MPFR must exactly represent the target floating point
11645 format, which only happens when the target base equals two. Also
11646 verify ARG is a constant and that ARG_SG is an int pointer. */
11647 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11648 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11649 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11650 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11651 {
11652 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11653
11654 /* In addition to NaN and Inf, the argument cannot be zero or a
11655 negative integer. */
11656 if (real_isfinite (ra)
11657 && ra->cl != rvc_zero
11658 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11659 {
11660 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11661 const int prec = fmt->p;
11662 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11663 int inexact, sg;
11664 mpfr_t m;
11665 tree result_lg;
11666
11667 mpfr_init2 (m, prec);
11668 mpfr_from_real (m, ra, GMP_RNDN);
11669 mpfr_clear_flags ();
11670 inexact = mpfr_lgamma (m, &sg, m, rnd);
11671 result_lg = do_mpfr_ckconv (m, type, inexact);
11672 mpfr_clear (m);
11673 if (result_lg)
11674 {
11675 tree result_sg;
11676
11677 /* Dereference the arg_sg pointer argument. */
11678 arg_sg = build_fold_indirect_ref (arg_sg);
11679 /* Assign the signgam value into *arg_sg. */
11680 result_sg = fold_build2 (MODIFY_EXPR,
11681 TREE_TYPE (arg_sg), arg_sg,
11682 build_int_cst (TREE_TYPE (arg_sg), sg));
11683 TREE_SIDE_EFFECTS (result_sg) = 1;
11684 /* Combine the signgam assignment with the lgamma result. */
11685 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11686 result_sg, result_lg));
11687 }
11688 }
11689 }
11690
11691 return result;
11692 }
11693
11694 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11695 function FUNC on it and return the resulting value as a tree with
11696 type TYPE. The mpfr precision is set to the precision of TYPE. We
11697 assume that function FUNC returns zero if the result could be
11698 calculated exactly within the requested precision. */
11699
11700 static tree
11701 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11702 {
11703 tree result = NULL_TREE;
11704
11705 STRIP_NOPS (arg);
11706
11707 /* To proceed, MPFR must exactly represent the target floating point
11708 format, which only happens when the target base equals two. */
11709 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11711 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11712 {
11713 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11714 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11715
11716 if (real_isfinite (re) && real_isfinite (im))
11717 {
11718 const struct real_format *const fmt =
11719 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11720 const int prec = fmt->p;
11721 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11722 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11723 int inexact;
11724 mpc_t m;
11725
11726 mpc_init2 (m, prec);
11727 mpfr_from_real (mpc_realref (m), re, rnd);
11728 mpfr_from_real (mpc_imagref (m), im, rnd);
11729 mpfr_clear_flags ();
11730 inexact = func (m, m, crnd);
11731 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11732 mpc_clear (m);
11733 }
11734 }
11735
11736 return result;
11737 }
11738
11739 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11740 mpc function FUNC on it and return the resulting value as a tree
11741 with type TYPE. The mpfr precision is set to the precision of
11742 TYPE. We assume that function FUNC returns zero if the result
11743 could be calculated exactly within the requested precision. If
11744 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11745 in the arguments and/or results. */
11746
11747 tree
11748 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11749 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11750 {
11751 tree result = NULL_TREE;
11752
11753 STRIP_NOPS (arg0);
11754 STRIP_NOPS (arg1);
11755
11756 /* To proceed, MPFR must exactly represent the target floating point
11757 format, which only happens when the target base equals two. */
11758 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11760 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11762 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11763 {
11764 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11765 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11766 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11767 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11768
11769 if (do_nonfinite
11770 || (real_isfinite (re0) && real_isfinite (im0)
11771 && real_isfinite (re1) && real_isfinite (im1)))
11772 {
11773 const struct real_format *const fmt =
11774 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11775 const int prec = fmt->p;
11776 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11777 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11778 int inexact;
11779 mpc_t m0, m1;
11780
11781 mpc_init2 (m0, prec);
11782 mpc_init2 (m1, prec);
11783 mpfr_from_real (mpc_realref (m0), re0, rnd);
11784 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11785 mpfr_from_real (mpc_realref (m1), re1, rnd);
11786 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11787 mpfr_clear_flags ();
11788 inexact = func (m0, m0, m1, crnd);
11789 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11790 mpc_clear (m0);
11791 mpc_clear (m1);
11792 }
11793 }
11794
11795 return result;
11796 }
11797
11798 /* A wrapper function for builtin folding that prevents warnings for
11799 "statement without effect" and the like, caused by removing the
11800 call node earlier than the warning is generated. */
11801
11802 tree
11803 fold_call_stmt (gcall *stmt, bool ignore)
11804 {
11805 tree ret = NULL_TREE;
11806 tree fndecl = gimple_call_fndecl (stmt);
11807 location_t loc = gimple_location (stmt);
11808 if (fndecl
11809 && TREE_CODE (fndecl) == FUNCTION_DECL
11810 && DECL_BUILT_IN (fndecl)
11811 && !gimple_call_va_arg_pack_p (stmt))
11812 {
11813 int nargs = gimple_call_num_args (stmt);
11814 tree *args = (nargs > 0
11815 ? gimple_call_arg_ptr (stmt, 0)
11816 : &error_mark_node);
11817
11818 if (avoid_folding_inline_builtin (fndecl))
11819 return NULL_TREE;
11820 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11821 {
11822 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11823 }
11824 else
11825 {
11826 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11827 if (ret)
11828 {
11829 /* Propagate location information from original call to
11830 expansion of builtin. Otherwise things like
11831 maybe_emit_chk_warning, that operate on the expansion
11832 of a builtin, will use the wrong location information. */
11833 if (gimple_has_location (stmt))
11834 {
11835 tree realret = ret;
11836 if (TREE_CODE (ret) == NOP_EXPR)
11837 realret = TREE_OPERAND (ret, 0);
11838 if (CAN_HAVE_LOCATION_P (realret)
11839 && !EXPR_HAS_LOCATION (realret))
11840 SET_EXPR_LOCATION (realret, loc);
11841 return realret;
11842 }
11843 return ret;
11844 }
11845 }
11846 }
11847 return NULL_TREE;
11848 }
11849
11850 /* Look up the function in builtin_decl that corresponds to DECL
11851 and set ASMSPEC as its user assembler name. DECL must be a
11852 function decl that declares a builtin. */
11853
11854 void
11855 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11856 {
11857 tree builtin;
11858 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11859 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11860 && asmspec != 0);
11861
11862 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11863 set_user_assembler_name (builtin, asmspec);
11864 switch (DECL_FUNCTION_CODE (decl))
11865 {
11866 case BUILT_IN_MEMCPY:
11867 init_block_move_fn (asmspec);
11868 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11869 break;
11870 case BUILT_IN_MEMSET:
11871 init_block_clear_fn (asmspec);
11872 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11873 break;
11874 case BUILT_IN_MEMMOVE:
11875 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11876 break;
11877 case BUILT_IN_MEMCMP:
11878 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11879 break;
11880 case BUILT_IN_ABORT:
11881 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11882 break;
11883 case BUILT_IN_FFS:
11884 if (INT_TYPE_SIZE < BITS_PER_WORD)
11885 {
11886 set_user_assembler_libfunc ("ffs", asmspec);
11887 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11888 MODE_INT, 0), "ffs");
11889 }
11890 break;
11891 default:
11892 break;
11893 }
11894 }
11895
11896 /* Return true if DECL is a builtin that expands to a constant or similarly
11897 simple code. */
11898 bool
11899 is_simple_builtin (tree decl)
11900 {
11901 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11902 switch (DECL_FUNCTION_CODE (decl))
11903 {
11904 /* Builtins that expand to constants. */
11905 case BUILT_IN_CONSTANT_P:
11906 case BUILT_IN_EXPECT:
11907 case BUILT_IN_OBJECT_SIZE:
11908 case BUILT_IN_UNREACHABLE:
11909 /* Simple register moves or loads from stack. */
11910 case BUILT_IN_ASSUME_ALIGNED:
11911 case BUILT_IN_RETURN_ADDRESS:
11912 case BUILT_IN_EXTRACT_RETURN_ADDR:
11913 case BUILT_IN_FROB_RETURN_ADDR:
11914 case BUILT_IN_RETURN:
11915 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11916 case BUILT_IN_FRAME_ADDRESS:
11917 case BUILT_IN_VA_END:
11918 case BUILT_IN_STACK_SAVE:
11919 case BUILT_IN_STACK_RESTORE:
11920 /* Exception state returns or moves registers around. */
11921 case BUILT_IN_EH_FILTER:
11922 case BUILT_IN_EH_POINTER:
11923 case BUILT_IN_EH_COPY_VALUES:
11924 return true;
11925
11926 default:
11927 return false;
11928 }
11929
11930 return false;
11931 }
11932
11933 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11934 most probably expanded inline into reasonably simple code. This is a
11935 superset of is_simple_builtin. */
11936 bool
11937 is_inexpensive_builtin (tree decl)
11938 {
11939 if (!decl)
11940 return false;
11941 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11942 return true;
11943 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11944 switch (DECL_FUNCTION_CODE (decl))
11945 {
11946 case BUILT_IN_ABS:
11947 case BUILT_IN_ALLOCA:
11948 case BUILT_IN_ALLOCA_WITH_ALIGN:
11949 case BUILT_IN_BSWAP16:
11950 case BUILT_IN_BSWAP32:
11951 case BUILT_IN_BSWAP64:
11952 case BUILT_IN_CLZ:
11953 case BUILT_IN_CLZIMAX:
11954 case BUILT_IN_CLZL:
11955 case BUILT_IN_CLZLL:
11956 case BUILT_IN_CTZ:
11957 case BUILT_IN_CTZIMAX:
11958 case BUILT_IN_CTZL:
11959 case BUILT_IN_CTZLL:
11960 case BUILT_IN_FFS:
11961 case BUILT_IN_FFSIMAX:
11962 case BUILT_IN_FFSL:
11963 case BUILT_IN_FFSLL:
11964 case BUILT_IN_IMAXABS:
11965 case BUILT_IN_FINITE:
11966 case BUILT_IN_FINITEF:
11967 case BUILT_IN_FINITEL:
11968 case BUILT_IN_FINITED32:
11969 case BUILT_IN_FINITED64:
11970 case BUILT_IN_FINITED128:
11971 case BUILT_IN_FPCLASSIFY:
11972 case BUILT_IN_ISFINITE:
11973 case BUILT_IN_ISINF_SIGN:
11974 case BUILT_IN_ISINF:
11975 case BUILT_IN_ISINFF:
11976 case BUILT_IN_ISINFL:
11977 case BUILT_IN_ISINFD32:
11978 case BUILT_IN_ISINFD64:
11979 case BUILT_IN_ISINFD128:
11980 case BUILT_IN_ISNAN:
11981 case BUILT_IN_ISNANF:
11982 case BUILT_IN_ISNANL:
11983 case BUILT_IN_ISNAND32:
11984 case BUILT_IN_ISNAND64:
11985 case BUILT_IN_ISNAND128:
11986 case BUILT_IN_ISNORMAL:
11987 case BUILT_IN_ISGREATER:
11988 case BUILT_IN_ISGREATEREQUAL:
11989 case BUILT_IN_ISLESS:
11990 case BUILT_IN_ISLESSEQUAL:
11991 case BUILT_IN_ISLESSGREATER:
11992 case BUILT_IN_ISUNORDERED:
11993 case BUILT_IN_VA_ARG_PACK:
11994 case BUILT_IN_VA_ARG_PACK_LEN:
11995 case BUILT_IN_VA_COPY:
11996 case BUILT_IN_TRAP:
11997 case BUILT_IN_SAVEREGS:
11998 case BUILT_IN_POPCOUNTL:
11999 case BUILT_IN_POPCOUNTLL:
12000 case BUILT_IN_POPCOUNTIMAX:
12001 case BUILT_IN_POPCOUNT:
12002 case BUILT_IN_PARITYL:
12003 case BUILT_IN_PARITYLL:
12004 case BUILT_IN_PARITYIMAX:
12005 case BUILT_IN_PARITY:
12006 case BUILT_IN_LABS:
12007 case BUILT_IN_LLABS:
12008 case BUILT_IN_PREFETCH:
12009 case BUILT_IN_ACC_ON_DEVICE:
12010 return true;
12011
12012 default:
12013 return is_simple_builtin (decl);
12014 }
12015
12016 return false;
12017 }