]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Revert r250771
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memchr (tree, rtx);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
124 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
125 static rtx expand_builtin_memmove (tree, rtx);
126 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
127 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
128 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
129 machine_mode, int, tree);
130 static rtx expand_builtin_strcat (tree, rtx);
131 static rtx expand_builtin_strcpy (tree, rtx);
132 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
133 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
134 static rtx expand_builtin_stpncpy (tree, rtx);
135 static rtx expand_builtin_strncat (tree, rtx);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
157 static tree fold_builtin_isascii (location_t, tree);
158 static tree fold_builtin_toascii (location_t, tree);
159 static tree fold_builtin_isdigit (location_t, tree);
160 static tree fold_builtin_fabs (location_t, tree, tree);
161 static tree fold_builtin_abs (location_t, tree, tree);
162 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
163 enum tree_code);
164 static tree fold_builtin_0 (location_t, tree);
165 static tree fold_builtin_1 (location_t, tree, tree);
166 static tree fold_builtin_2 (location_t, tree, tree, tree);
167 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_varargs (location_t, tree, tree*, int);
169
170 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
173
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
181
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
192
193 /* Return true if NAME starts with __builtin_ or __sync_. */
194
195 static bool
196 is_builtin_name (const char *name)
197 {
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
209 }
210
211
212 /* Return true if DECL is a function symbol representing a built-in. */
213
214 bool
215 is_builtin_fn (tree decl)
216 {
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 }
219
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
224 bool
225 called_as_built_in (tree node)
226 {
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232 }
233
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
245
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 {
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
259
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
268 {
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
275 }
276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
279 {
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285
286 known_alignment = true;
287 }
288 else if (DECL_P (exp))
289 {
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
292 }
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
296 {
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 {
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
310 }
311
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
315
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
318
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 {
323 if (TMR_INDEX (exp))
324 {
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 }
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
333 }
334
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
346 {
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
352 }
353 }
354 else if (TREE_CODE (exp) == STRING_CST)
355 {
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
361
362 known_alignment = true;
363 }
364
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
368 {
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
371 {
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
375 }
376 }
377
378 *alignp = align;
379 *bitposp = bitpos & (*alignp - 1);
380 return known_alignment;
381 }
382
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391 {
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394
395 /* Return the alignment in bits of EXP, an object. */
396
397 unsigned int
398 get_object_alignment (tree exp)
399 {
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
403 get_object_alignment_1 (exp, &align, &bitpos);
404
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
411 }
412
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
417
418 If EXP is not a pointer, false is returned too. */
419
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 STRIP_NOPS (exp);
425
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 {
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
467 }
468 else
469 {
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
473 }
474 }
475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
482
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
486 }
487
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495 unsigned int
496 get_pointer_alignment (tree exp)
497 {
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
508
509 return align;
510 }
511
512 /* Return the number of non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
516 static unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 {
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543 }
544
545 /* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
550
551 ONLY_VALUE should be nonzero if the result is not going to be emitted
552 into the instruction stream and zero if it is going to be expanded.
553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
557
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
561
562 The value returned is of type `ssizetype'.
563
564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
566
567 tree
568 c_strlen (tree src, int only_value)
569 {
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
586 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
587
588 /* Offset from the beginning of the string in bytes. */
589 tree byteoff;
590 src = string_constant (src, &byteoff);
591 if (src == 0)
592 return NULL_TREE;
593
594 /* Determine the size of the string element. */
595 unsigned eltsize
596 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
597
598 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
599 length of SRC. */
600 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
601
602 /* PTR can point to the byte representation of any string type, including
603 char* and wchar_t*. */
604 const char *ptr = TREE_STRING_POINTER (src);
605
606 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
607 {
608 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
609 compute the offset to the following null if we don't know where to
610 start searching for it. */
611 if (string_length (ptr, eltsize, maxelts) < maxelts)
612 {
613 /* Return when an embedded null character is found. */
614 return NULL_TREE;
615 }
616
617 /* We don't know the starting offset, but we do know that the string
618 has no internal zero bytes. We can assume that the offset falls
619 within the bounds of the string; otherwise, the programmer deserves
620 what he gets. Subtract the offset from the length of the string,
621 and return that. This would perhaps not be valid if we were dealing
622 with named arrays in addition to literal string constants. */
623
624 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
625 }
626
627 /* Offset from the beginning of the string in elements. */
628 HOST_WIDE_INT eltoff;
629
630 /* We have a known offset into the string. Start searching there for
631 a null character if we can represent it as a single HOST_WIDE_INT. */
632 if (byteoff == 0)
633 eltoff = 0;
634 else if (! tree_fits_shwi_p (byteoff))
635 eltoff = -1;
636 else
637 eltoff = tree_to_shwi (byteoff) / eltsize;
638
639 /* If the offset is known to be out of bounds, warn, and call strlen at
640 runtime. */
641 if (eltoff < 0 || eltoff > maxelts)
642 {
643 /* Suppress multiple warnings for propagated constant strings. */
644 if (only_value != 2
645 && !TREE_NO_WARNING (src))
646 {
647 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
648 eltoff);
649 TREE_NO_WARNING (src) = 1;
650 }
651 return NULL_TREE;
652 }
653
654 /* Use strlen to search for the first zero byte. Since any strings
655 constructed with build_string will have nulls appended, we win even
656 if we get handed something like (char[4])"abcd".
657
658 Since ELTOFF is our starting index into the string, no further
659 calculation is needed. */
660 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
661 maxelts - eltoff);
662
663 return ssize_int (len);
664 }
665
666 /* Return a constant integer corresponding to target reading
667 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
668
669 static rtx
670 c_readstr (const char *str, machine_mode mode)
671 {
672 HOST_WIDE_INT ch;
673 unsigned int i, j;
674 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
675
676 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
677 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
678 / HOST_BITS_PER_WIDE_INT;
679
680 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
681 for (i = 0; i < len; i++)
682 tmp[i] = 0;
683
684 ch = 1;
685 for (i = 0; i < GET_MODE_SIZE (mode); i++)
686 {
687 j = i;
688 if (WORDS_BIG_ENDIAN)
689 j = GET_MODE_SIZE (mode) - i - 1;
690 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
691 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
692 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
693 j *= BITS_PER_UNIT;
694
695 if (ch)
696 ch = (unsigned char) str[i];
697 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
698 }
699
700 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
701 return immed_wide_int_const (c, mode);
702 }
703
704 /* Cast a target constant CST to target CHAR and if that value fits into
705 host char type, return zero and put that value into variable pointed to by
706 P. */
707
708 static int
709 target_char_cast (tree cst, char *p)
710 {
711 unsigned HOST_WIDE_INT val, hostval;
712
713 if (TREE_CODE (cst) != INTEGER_CST
714 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
715 return 1;
716
717 /* Do not care if it fits or not right here. */
718 val = TREE_INT_CST_LOW (cst);
719
720 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
721 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
722
723 hostval = val;
724 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
725 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
726
727 if (val != hostval)
728 return 1;
729
730 *p = hostval;
731 return 0;
732 }
733
734 /* Similar to save_expr, but assumes that arbitrary code is not executed
735 in between the multiple evaluations. In particular, we assume that a
736 non-addressable local variable will not be modified. */
737
738 static tree
739 builtin_save_expr (tree exp)
740 {
741 if (TREE_CODE (exp) == SSA_NAME
742 || (TREE_ADDRESSABLE (exp) == 0
743 && (TREE_CODE (exp) == PARM_DECL
744 || (VAR_P (exp) && !TREE_STATIC (exp)))))
745 return exp;
746
747 return save_expr (exp);
748 }
749
750 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
751 times to get the address of either a higher stack frame, or a return
752 address located within it (depending on FNDECL_CODE). */
753
754 static rtx
755 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
756 {
757 int i;
758 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
759 if (tem == NULL_RTX)
760 {
761 /* For a zero count with __builtin_return_address, we don't care what
762 frame address we return, because target-specific definitions will
763 override us. Therefore frame pointer elimination is OK, and using
764 the soft frame pointer is OK.
765
766 For a nonzero count, or a zero count with __builtin_frame_address,
767 we require a stable offset from the current frame pointer to the
768 previous one, so we must use the hard frame pointer, and
769 we must disable frame pointer elimination. */
770 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
771 tem = frame_pointer_rtx;
772 else
773 {
774 tem = hard_frame_pointer_rtx;
775
776 /* Tell reload not to eliminate the frame pointer. */
777 crtl->accesses_prior_frames = 1;
778 }
779 }
780
781 if (count > 0)
782 SETUP_FRAME_ADDRESSES ();
783
784 /* On the SPARC, the return address is not in the frame, it is in a
785 register. There is no way to access it off of the current frame
786 pointer, but it can be accessed off the previous frame pointer by
787 reading the value from the register window save area. */
788 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 count--;
790
791 /* Scan back COUNT frames to the specified frame. */
792 for (i = 0; i < count; i++)
793 {
794 /* Assume the dynamic chain pointer is in the word that the
795 frame address points to, unless otherwise specified. */
796 tem = DYNAMIC_CHAIN_ADDRESS (tem);
797 tem = memory_address (Pmode, tem);
798 tem = gen_frame_mem (Pmode, tem);
799 tem = copy_to_reg (tem);
800 }
801
802 /* For __builtin_frame_address, return what we've got. But, on
803 the SPARC for example, we may have to add a bias. */
804 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
805 return FRAME_ADDR_RTX (tem);
806
807 /* For __builtin_return_address, get the return address from that frame. */
808 #ifdef RETURN_ADDR_RTX
809 tem = RETURN_ADDR_RTX (count, tem);
810 #else
811 tem = memory_address (Pmode,
812 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
813 tem = gen_frame_mem (Pmode, tem);
814 #endif
815 return tem;
816 }
817
818 /* Alias set used for setjmp buffer. */
819 static alias_set_type setjmp_alias_set = -1;
820
821 /* Construct the leading half of a __builtin_setjmp call. Control will
822 return to RECEIVER_LABEL. This is also called directly by the SJLJ
823 exception handling code. */
824
825 void
826 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
827 {
828 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829 rtx stack_save;
830 rtx mem;
831
832 if (setjmp_alias_set == -1)
833 setjmp_alias_set = new_alias_set ();
834
835 buf_addr = convert_memory_address (Pmode, buf_addr);
836
837 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
838
839 /* We store the frame pointer and the address of receiver_label in
840 the buffer and use the rest of it for the stack save area, which
841 is machine-dependent. */
842
843 mem = gen_rtx_MEM (Pmode, buf_addr);
844 set_mem_alias_set (mem, setjmp_alias_set);
845 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
846
847 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
848 GET_MODE_SIZE (Pmode))),
849 set_mem_alias_set (mem, setjmp_alias_set);
850
851 emit_move_insn (validize_mem (mem),
852 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
853
854 stack_save = gen_rtx_MEM (sa_mode,
855 plus_constant (Pmode, buf_addr,
856 2 * GET_MODE_SIZE (Pmode)));
857 set_mem_alias_set (stack_save, setjmp_alias_set);
858 emit_stack_save (SAVE_NONLOCAL, &stack_save);
859
860 /* If there is further processing to do, do it. */
861 if (targetm.have_builtin_setjmp_setup ())
862 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
863
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
866 }
867
868 /* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code.
870 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
871
872 void
873 expand_builtin_setjmp_receiver (rtx receiver_label)
874 {
875 rtx chain;
876
877 /* Mark the FP as used when we get here, so we have to make sure it's
878 marked as used by this function. */
879 emit_use (hard_frame_pointer_rtx);
880
881 /* Mark the static chain as clobbered here so life information
882 doesn't get messed up for it. */
883 chain = targetm.calls.static_chain (current_function_decl, true);
884 if (chain && REG_P (chain))
885 emit_clobber (chain);
886
887 /* Now put in the code to restore the frame pointer, and argument
888 pointer, if needed. */
889 if (! targetm.have_nonlocal_goto ())
890 {
891 /* First adjust our frame pointer to its actual value. It was
892 previously set to the start of the virtual area corresponding to
893 the stacked variables when we branched here and now needs to be
894 adjusted to the actual hardware fp value.
895
896 Assignments to virtual registers are converted by
897 instantiate_virtual_regs into the corresponding assignment
898 to the underlying register (fp in this case) that makes
899 the original assignment true.
900 So the following insn will actually be decrementing fp by
901 STARTING_FRAME_OFFSET. */
902 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
903
904 /* Restoring the frame pointer also modifies the hard frame pointer.
905 Mark it used (so that the previous assignment remains live once
906 the frame pointer is eliminated) and clobbered (to represent the
907 implicit update from the assignment). */
908 emit_use (hard_frame_pointer_rtx);
909 emit_clobber (hard_frame_pointer_rtx);
910 }
911
912 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
913 {
914 /* If the argument pointer can be eliminated in favor of the
915 frame pointer, we don't need to restore it. We assume here
916 that if such an elimination is present, it can always be used.
917 This is the case on all known machines; if we don't make this
918 assumption, we do unnecessary saving on many machines. */
919 size_t i;
920 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
921
922 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
923 if (elim_regs[i].from == ARG_POINTER_REGNUM
924 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
925 break;
926
927 if (i == ARRAY_SIZE (elim_regs))
928 {
929 /* Now restore our arg pointer from the address at which it
930 was saved in our stack frame. */
931 emit_move_insn (crtl->args.internal_arg_pointer,
932 copy_to_reg (get_arg_pointer_save_area ()));
933 }
934 }
935
936 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
937 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
938 else if (targetm.have_nonlocal_goto_receiver ())
939 emit_insn (targetm.gen_nonlocal_goto_receiver ());
940 else
941 { /* Nothing */ }
942
943 /* We must not allow the code we just generated to be reordered by
944 scheduling. Specifically, the update of the frame pointer must
945 happen immediately, not later. */
946 emit_insn (gen_blockage ());
947 }
948
949 /* __builtin_longjmp is passed a pointer to an array of five words (not
950 all will be used on all machines). It operates similarly to the C
951 library function of the same name, but is more efficient. Much of
952 the code below is copied from the handling of non-local gotos. */
953
954 static void
955 expand_builtin_longjmp (rtx buf_addr, rtx value)
956 {
957 rtx fp, lab, stack;
958 rtx_insn *insn, *last;
959 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
960
961 /* DRAP is needed for stack realign if longjmp is expanded to current
962 function */
963 if (SUPPORTS_STACK_ALIGNMENT)
964 crtl->need_drap = true;
965
966 if (setjmp_alias_set == -1)
967 setjmp_alias_set = new_alias_set ();
968
969 buf_addr = convert_memory_address (Pmode, buf_addr);
970
971 buf_addr = force_reg (Pmode, buf_addr);
972
973 /* We require that the user must pass a second argument of 1, because
974 that is what builtin_setjmp will return. */
975 gcc_assert (value == const1_rtx);
976
977 last = get_last_insn ();
978 if (targetm.have_builtin_longjmp ())
979 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
980 else
981 {
982 fp = gen_rtx_MEM (Pmode, buf_addr);
983 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
984 GET_MODE_SIZE (Pmode)));
985
986 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
987 2 * GET_MODE_SIZE (Pmode)));
988 set_mem_alias_set (fp, setjmp_alias_set);
989 set_mem_alias_set (lab, setjmp_alias_set);
990 set_mem_alias_set (stack, setjmp_alias_set);
991
992 /* Pick up FP, label, and SP from the block and jump. This code is
993 from expand_goto in stmt.c; see there for detailed comments. */
994 if (targetm.have_nonlocal_goto ())
995 /* We have to pass a value to the nonlocal_goto pattern that will
996 get copied into the static_chain pointer, but it does not matter
997 what that value is, because builtin_setjmp does not use it. */
998 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
999 else
1000 {
1001 lab = copy_to_reg (lab);
1002
1003 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1004 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1005
1006 emit_move_insn (hard_frame_pointer_rtx, fp);
1007 emit_stack_restore (SAVE_NONLOCAL, stack);
1008
1009 emit_use (hard_frame_pointer_rtx);
1010 emit_use (stack_pointer_rtx);
1011 emit_indirect_jump (lab);
1012 }
1013 }
1014
1015 /* Search backwards and mark the jump insn as a non-local goto.
1016 Note that this precludes the use of __builtin_longjmp to a
1017 __builtin_setjmp target in the same function. However, we've
1018 already cautioned the user that these functions are for
1019 internal exception handling use only. */
1020 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1021 {
1022 gcc_assert (insn != last);
1023
1024 if (JUMP_P (insn))
1025 {
1026 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1027 break;
1028 }
1029 else if (CALL_P (insn))
1030 break;
1031 }
1032 }
1033
1034 static inline bool
1035 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1036 {
1037 return (iter->i < iter->n);
1038 }
1039
1040 /* This function validates the types of a function call argument list
1041 against a specified list of tree_codes. If the last specifier is a 0,
1042 that represents an ellipsis, otherwise the last specifier must be a
1043 VOID_TYPE. */
1044
1045 static bool
1046 validate_arglist (const_tree callexpr, ...)
1047 {
1048 enum tree_code code;
1049 bool res = 0;
1050 va_list ap;
1051 const_call_expr_arg_iterator iter;
1052 const_tree arg;
1053
1054 va_start (ap, callexpr);
1055 init_const_call_expr_arg_iterator (callexpr, &iter);
1056
1057 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1058 tree fn = CALL_EXPR_FN (callexpr);
1059 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1060
1061 for (unsigned argno = 1; ; ++argno)
1062 {
1063 code = (enum tree_code) va_arg (ap, int);
1064
1065 switch (code)
1066 {
1067 case 0:
1068 /* This signifies an ellipses, any further arguments are all ok. */
1069 res = true;
1070 goto end;
1071 case VOID_TYPE:
1072 /* This signifies an endlink, if no arguments remain, return
1073 true, otherwise return false. */
1074 res = !more_const_call_expr_args_p (&iter);
1075 goto end;
1076 case POINTER_TYPE:
1077 /* The actual argument must be nonnull when either the whole
1078 called function has been declared nonnull, or when the formal
1079 argument corresponding to the actual argument has been. */
1080 if (argmap
1081 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1082 {
1083 arg = next_const_call_expr_arg (&iter);
1084 if (!validate_arg (arg, code) || integer_zerop (arg))
1085 goto end;
1086 break;
1087 }
1088 /* FALLTHRU */
1089 default:
1090 /* If no parameters remain or the parameter's code does not
1091 match the specified code, return false. Otherwise continue
1092 checking any remaining arguments. */
1093 arg = next_const_call_expr_arg (&iter);
1094 if (!validate_arg (arg, code))
1095 goto end;
1096 break;
1097 }
1098 }
1099
1100 /* We need gotos here since we can only have one VA_CLOSE in a
1101 function. */
1102 end: ;
1103 va_end (ap);
1104
1105 BITMAP_FREE (argmap);
1106
1107 return res;
1108 }
1109
1110 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1111 and the address of the save area. */
1112
1113 static rtx
1114 expand_builtin_nonlocal_goto (tree exp)
1115 {
1116 tree t_label, t_save_area;
1117 rtx r_label, r_save_area, r_fp, r_sp;
1118 rtx_insn *insn;
1119
1120 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1121 return NULL_RTX;
1122
1123 t_label = CALL_EXPR_ARG (exp, 0);
1124 t_save_area = CALL_EXPR_ARG (exp, 1);
1125
1126 r_label = expand_normal (t_label);
1127 r_label = convert_memory_address (Pmode, r_label);
1128 r_save_area = expand_normal (t_save_area);
1129 r_save_area = convert_memory_address (Pmode, r_save_area);
1130 /* Copy the address of the save location to a register just in case it was
1131 based on the frame pointer. */
1132 r_save_area = copy_to_reg (r_save_area);
1133 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1134 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1135 plus_constant (Pmode, r_save_area,
1136 GET_MODE_SIZE (Pmode)));
1137
1138 crtl->has_nonlocal_goto = 1;
1139
1140 /* ??? We no longer need to pass the static chain value, afaik. */
1141 if (targetm.have_nonlocal_goto ())
1142 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1143 else
1144 {
1145 r_label = copy_to_reg (r_label);
1146
1147 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1148 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1149
1150 /* Restore frame pointer for containing function. */
1151 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1152 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1153
1154 /* USE of hard_frame_pointer_rtx added for consistency;
1155 not clear if really needed. */
1156 emit_use (hard_frame_pointer_rtx);
1157 emit_use (stack_pointer_rtx);
1158
1159 /* If the architecture is using a GP register, we must
1160 conservatively assume that the target function makes use of it.
1161 The prologue of functions with nonlocal gotos must therefore
1162 initialize the GP register to the appropriate value, and we
1163 must then make sure that this value is live at the point
1164 of the jump. (Note that this doesn't necessarily apply
1165 to targets with a nonlocal_goto pattern; they are free
1166 to implement it in their own way. Note also that this is
1167 a no-op if the GP register is a global invariant.) */
1168 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1169 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1170 emit_use (pic_offset_table_rtx);
1171
1172 emit_indirect_jump (r_label);
1173 }
1174
1175 /* Search backwards to the jump insn and mark it as a
1176 non-local goto. */
1177 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1178 {
1179 if (JUMP_P (insn))
1180 {
1181 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1182 break;
1183 }
1184 else if (CALL_P (insn))
1185 break;
1186 }
1187
1188 return const0_rtx;
1189 }
1190
1191 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1192 (not all will be used on all machines) that was passed to __builtin_setjmp.
1193 It updates the stack pointer in that block to the current value. This is
1194 also called directly by the SJLJ exception handling code. */
1195
1196 void
1197 expand_builtin_update_setjmp_buf (rtx buf_addr)
1198 {
1199 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1200 rtx stack_save
1201 = gen_rtx_MEM (sa_mode,
1202 memory_address
1203 (sa_mode,
1204 plus_constant (Pmode, buf_addr,
1205 2 * GET_MODE_SIZE (Pmode))));
1206
1207 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1208 }
1209
1210 /* Expand a call to __builtin_prefetch. For a target that does not support
1211 data prefetch, evaluate the memory address argument in case it has side
1212 effects. */
1213
1214 static void
1215 expand_builtin_prefetch (tree exp)
1216 {
1217 tree arg0, arg1, arg2;
1218 int nargs;
1219 rtx op0, op1, op2;
1220
1221 if (!validate_arglist (exp, POINTER_TYPE, 0))
1222 return;
1223
1224 arg0 = CALL_EXPR_ARG (exp, 0);
1225
1226 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1227 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1228 locality). */
1229 nargs = call_expr_nargs (exp);
1230 if (nargs > 1)
1231 arg1 = CALL_EXPR_ARG (exp, 1);
1232 else
1233 arg1 = integer_zero_node;
1234 if (nargs > 2)
1235 arg2 = CALL_EXPR_ARG (exp, 2);
1236 else
1237 arg2 = integer_three_node;
1238
1239 /* Argument 0 is an address. */
1240 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1241
1242 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1243 if (TREE_CODE (arg1) != INTEGER_CST)
1244 {
1245 error ("second argument to %<__builtin_prefetch%> must be a constant");
1246 arg1 = integer_zero_node;
1247 }
1248 op1 = expand_normal (arg1);
1249 /* Argument 1 must be either zero or one. */
1250 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1251 {
1252 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1253 " using zero");
1254 op1 = const0_rtx;
1255 }
1256
1257 /* Argument 2 (locality) must be a compile-time constant int. */
1258 if (TREE_CODE (arg2) != INTEGER_CST)
1259 {
1260 error ("third argument to %<__builtin_prefetch%> must be a constant");
1261 arg2 = integer_zero_node;
1262 }
1263 op2 = expand_normal (arg2);
1264 /* Argument 2 must be 0, 1, 2, or 3. */
1265 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1266 {
1267 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1268 op2 = const0_rtx;
1269 }
1270
1271 if (targetm.have_prefetch ())
1272 {
1273 struct expand_operand ops[3];
1274
1275 create_address_operand (&ops[0], op0);
1276 create_integer_operand (&ops[1], INTVAL (op1));
1277 create_integer_operand (&ops[2], INTVAL (op2));
1278 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1279 return;
1280 }
1281
1282 /* Don't do anything with direct references to volatile memory, but
1283 generate code to handle other side effects. */
1284 if (!MEM_P (op0) && side_effects_p (op0))
1285 emit_insn (op0);
1286 }
1287
1288 /* Get a MEM rtx for expression EXP which is the address of an operand
1289 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1290 the maximum length of the block of memory that might be accessed or
1291 NULL if unknown. */
1292
1293 static rtx
1294 get_memory_rtx (tree exp, tree len)
1295 {
1296 tree orig_exp = exp;
1297 rtx addr, mem;
1298
1299 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1300 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1301 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1302 exp = TREE_OPERAND (exp, 0);
1303
1304 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1305 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1306
1307 /* Get an expression we can use to find the attributes to assign to MEM.
1308 First remove any nops. */
1309 while (CONVERT_EXPR_P (exp)
1310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1311 exp = TREE_OPERAND (exp, 0);
1312
1313 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1314 (as builtin stringops may alias with anything). */
1315 exp = fold_build2 (MEM_REF,
1316 build_array_type (char_type_node,
1317 build_range_type (sizetype,
1318 size_one_node, len)),
1319 exp, build_int_cst (ptr_type_node, 0));
1320
1321 /* If the MEM_REF has no acceptable address, try to get the base object
1322 from the original address we got, and build an all-aliasing
1323 unknown-sized access to that one. */
1324 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1325 set_mem_attributes (mem, exp, 0);
1326 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1327 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1328 0))))
1329 {
1330 exp = build_fold_addr_expr (exp);
1331 exp = fold_build2 (MEM_REF,
1332 build_array_type (char_type_node,
1333 build_range_type (sizetype,
1334 size_zero_node,
1335 NULL)),
1336 exp, build_int_cst (ptr_type_node, 0));
1337 set_mem_attributes (mem, exp, 0);
1338 }
1339 set_mem_alias_set (mem, 0);
1340 return mem;
1341 }
1342 \f
1343 /* Built-in functions to perform an untyped call and return. */
1344
1345 #define apply_args_mode \
1346 (this_target_builtins->x_apply_args_mode)
1347 #define apply_result_mode \
1348 (this_target_builtins->x_apply_result_mode)
1349
1350 /* Return the size required for the block returned by __builtin_apply_args,
1351 and initialize apply_args_mode. */
1352
1353 static int
1354 apply_args_size (void)
1355 {
1356 static int size = -1;
1357 int align;
1358 unsigned int regno;
1359 machine_mode mode;
1360
1361 /* The values computed by this function never change. */
1362 if (size < 0)
1363 {
1364 /* The first value is the incoming arg-pointer. */
1365 size = GET_MODE_SIZE (Pmode);
1366
1367 /* The second value is the structure value address unless this is
1368 passed as an "invisible" first argument. */
1369 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1370 size += GET_MODE_SIZE (Pmode);
1371
1372 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1373 if (FUNCTION_ARG_REGNO_P (regno))
1374 {
1375 mode = targetm.calls.get_raw_arg_mode (regno);
1376
1377 gcc_assert (mode != VOIDmode);
1378
1379 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1380 if (size % align != 0)
1381 size = CEIL (size, align) * align;
1382 size += GET_MODE_SIZE (mode);
1383 apply_args_mode[regno] = mode;
1384 }
1385 else
1386 {
1387 apply_args_mode[regno] = VOIDmode;
1388 }
1389 }
1390 return size;
1391 }
1392
1393 /* Return the size required for the block returned by __builtin_apply,
1394 and initialize apply_result_mode. */
1395
1396 static int
1397 apply_result_size (void)
1398 {
1399 static int size = -1;
1400 int align, regno;
1401 machine_mode mode;
1402
1403 /* The values computed by this function never change. */
1404 if (size < 0)
1405 {
1406 size = 0;
1407
1408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1409 if (targetm.calls.function_value_regno_p (regno))
1410 {
1411 mode = targetm.calls.get_raw_result_mode (regno);
1412
1413 gcc_assert (mode != VOIDmode);
1414
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 size += GET_MODE_SIZE (mode);
1419 apply_result_mode[regno] = mode;
1420 }
1421 else
1422 apply_result_mode[regno] = VOIDmode;
1423
1424 /* Allow targets that use untyped_call and untyped_return to override
1425 the size so that machine-specific information can be stored here. */
1426 #ifdef APPLY_RESULT_SIZE
1427 size = APPLY_RESULT_SIZE;
1428 #endif
1429 }
1430 return size;
1431 }
1432
1433 /* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1436
1437 static rtx
1438 result_vector (int savep, rtx result)
1439 {
1440 int regno, size, align, nelts;
1441 machine_mode mode;
1442 rtx reg, mem;
1443 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1444
1445 size = nelts = 0;
1446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1447 if ((mode = apply_result_mode[regno]) != VOIDmode)
1448 {
1449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1450 if (size % align != 0)
1451 size = CEIL (size, align) * align;
1452 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1453 mem = adjust_address (result, mode, size);
1454 savevec[nelts++] = (savep
1455 ? gen_rtx_SET (mem, reg)
1456 : gen_rtx_SET (reg, mem));
1457 size += GET_MODE_SIZE (mode);
1458 }
1459 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1460 }
1461
1462 /* Save the state required to perform an untyped call with the same
1463 arguments as were passed to the current function. */
1464
1465 static rtx
1466 expand_builtin_apply_args_1 (void)
1467 {
1468 rtx registers, tem;
1469 int size, align, regno;
1470 machine_mode mode;
1471 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1472
1473 /* Create a block where the arg-pointer, structure value address,
1474 and argument registers can be saved. */
1475 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1476
1477 /* Walk past the arg-pointer and structure value address. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1480 size += GET_MODE_SIZE (Pmode);
1481
1482 /* Save each register used in calling a function to the block. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1485 {
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489
1490 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1491
1492 emit_move_insn (adjust_address (registers, mode, size), tem);
1493 size += GET_MODE_SIZE (mode);
1494 }
1495
1496 /* Save the arg pointer to the block. */
1497 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1498 /* We need the pointer as the caller actually passed them to us, not
1499 as we might have pretended they were passed. Make sure it's a valid
1500 operand, as emit_move_insn isn't expected to handle a PLUS. */
1501 if (STACK_GROWS_DOWNWARD)
1502 tem
1503 = force_operand (plus_constant (Pmode, tem,
1504 crtl->args.pretend_args_size),
1505 NULL_RTX);
1506 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1507
1508 size = GET_MODE_SIZE (Pmode);
1509
1510 /* Save the structure value address unless this is passed as an
1511 "invisible" first argument. */
1512 if (struct_incoming_value)
1513 {
1514 emit_move_insn (adjust_address (registers, Pmode, size),
1515 copy_to_reg (struct_incoming_value));
1516 size += GET_MODE_SIZE (Pmode);
1517 }
1518
1519 /* Return the address of the block. */
1520 return copy_addr_to_reg (XEXP (registers, 0));
1521 }
1522
1523 /* __builtin_apply_args returns block of memory allocated on
1524 the stack into which is stored the arg pointer, structure
1525 value address, static chain, and all the registers that might
1526 possibly be used in performing a function call. The code is
1527 moved to the start of the function so the incoming values are
1528 saved. */
1529
1530 static rtx
1531 expand_builtin_apply_args (void)
1532 {
1533 /* Don't do __builtin_apply_args more than once in a function.
1534 Save the result of the first call and reuse it. */
1535 if (apply_args_value != 0)
1536 return apply_args_value;
1537 {
1538 /* When this function is called, it means that registers must be
1539 saved on entry to this function. So we migrate the
1540 call to the first insn of this function. */
1541 rtx temp;
1542
1543 start_sequence ();
1544 temp = expand_builtin_apply_args_1 ();
1545 rtx_insn *seq = get_insns ();
1546 end_sequence ();
1547
1548 apply_args_value = temp;
1549
1550 /* Put the insns after the NOTE that starts the function.
1551 If this is inside a start_sequence, make the outer-level insn
1552 chain current, so the code is placed at the start of the
1553 function. If internal_arg_pointer is a non-virtual pseudo,
1554 it needs to be placed after the function that initializes
1555 that pseudo. */
1556 push_topmost_sequence ();
1557 if (REG_P (crtl->args.internal_arg_pointer)
1558 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1559 emit_insn_before (seq, parm_birth_insn);
1560 else
1561 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1562 pop_topmost_sequence ();
1563 return temp;
1564 }
1565 }
1566
1567 /* Perform an untyped call and save the state required to perform an
1568 untyped return of whatever value was returned by the given function. */
1569
1570 static rtx
1571 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1572 {
1573 int size, align, regno;
1574 machine_mode mode;
1575 rtx incoming_args, result, reg, dest, src;
1576 rtx_call_insn *call_insn;
1577 rtx old_stack_level = 0;
1578 rtx call_fusage = 0;
1579 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1580
1581 arguments = convert_memory_address (Pmode, arguments);
1582
1583 /* Create a block where the return registers can be saved. */
1584 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1585
1586 /* Fetch the arg pointer from the ARGUMENTS block. */
1587 incoming_args = gen_reg_rtx (Pmode);
1588 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1589 if (!STACK_GROWS_DOWNWARD)
1590 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1591 incoming_args, 0, OPTAB_LIB_WIDEN);
1592
1593 /* Push a new argument block and copy the arguments. Do not allow
1594 the (potential) memcpy call below to interfere with our stack
1595 manipulations. */
1596 do_pending_stack_adjust ();
1597 NO_DEFER_POP;
1598
1599 /* Save the stack with nonlocal if available. */
1600 if (targetm.have_save_stack_nonlocal ())
1601 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1602 else
1603 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1604
1605 /* Allocate a block of memory onto the stack and copy the memory
1606 arguments to the outgoing arguments address. We can pass TRUE
1607 as the 4th argument because we just saved the stack pointer
1608 and will restore it right after the call. */
1609 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1610
1611 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1612 may have already set current_function_calls_alloca to true.
1613 current_function_calls_alloca won't be set if argsize is zero,
1614 so we have to guarantee need_drap is true here. */
1615 if (SUPPORTS_STACK_ALIGNMENT)
1616 crtl->need_drap = true;
1617
1618 dest = virtual_outgoing_args_rtx;
1619 if (!STACK_GROWS_DOWNWARD)
1620 {
1621 if (CONST_INT_P (argsize))
1622 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1623 else
1624 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1625 }
1626 dest = gen_rtx_MEM (BLKmode, dest);
1627 set_mem_align (dest, PARM_BOUNDARY);
1628 src = gen_rtx_MEM (BLKmode, incoming_args);
1629 set_mem_align (src, PARM_BOUNDARY);
1630 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1631
1632 /* Refer to the argument block. */
1633 apply_args_size ();
1634 arguments = gen_rtx_MEM (BLKmode, arguments);
1635 set_mem_align (arguments, PARM_BOUNDARY);
1636
1637 /* Walk past the arg-pointer and structure value address. */
1638 size = GET_MODE_SIZE (Pmode);
1639 if (struct_value)
1640 size += GET_MODE_SIZE (Pmode);
1641
1642 /* Restore each of the registers previously saved. Make USE insns
1643 for each of these registers for use in making the call. */
1644 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1645 if ((mode = apply_args_mode[regno]) != VOIDmode)
1646 {
1647 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1648 if (size % align != 0)
1649 size = CEIL (size, align) * align;
1650 reg = gen_rtx_REG (mode, regno);
1651 emit_move_insn (reg, adjust_address (arguments, mode, size));
1652 use_reg (&call_fusage, reg);
1653 size += GET_MODE_SIZE (mode);
1654 }
1655
1656 /* Restore the structure value address unless this is passed as an
1657 "invisible" first argument. */
1658 size = GET_MODE_SIZE (Pmode);
1659 if (struct_value)
1660 {
1661 rtx value = gen_reg_rtx (Pmode);
1662 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1663 emit_move_insn (struct_value, value);
1664 if (REG_P (struct_value))
1665 use_reg (&call_fusage, struct_value);
1666 size += GET_MODE_SIZE (Pmode);
1667 }
1668
1669 /* All arguments and registers used for the call are set up by now! */
1670 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1671
1672 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1673 and we don't want to load it into a register as an optimization,
1674 because prepare_call_address already did it if it should be done. */
1675 if (GET_CODE (function) != SYMBOL_REF)
1676 function = memory_address (FUNCTION_MODE, function);
1677
1678 /* Generate the actual call instruction and save the return value. */
1679 if (targetm.have_untyped_call ())
1680 {
1681 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1682 emit_call_insn (targetm.gen_untyped_call (mem, result,
1683 result_vector (1, result)));
1684 }
1685 else if (targetm.have_call_value ())
1686 {
1687 rtx valreg = 0;
1688
1689 /* Locate the unique return register. It is not possible to
1690 express a call that sets more than one return register using
1691 call_value; use untyped_call for that. In fact, untyped_call
1692 only needs to save the return registers in the given block. */
1693 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1694 if ((mode = apply_result_mode[regno]) != VOIDmode)
1695 {
1696 gcc_assert (!valreg); /* have_untyped_call required. */
1697
1698 valreg = gen_rtx_REG (mode, regno);
1699 }
1700
1701 emit_insn (targetm.gen_call_value (valreg,
1702 gen_rtx_MEM (FUNCTION_MODE, function),
1703 const0_rtx, NULL_RTX, const0_rtx));
1704
1705 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1706 }
1707 else
1708 gcc_unreachable ();
1709
1710 /* Find the CALL insn we just emitted, and attach the register usage
1711 information. */
1712 call_insn = last_call_insn ();
1713 add_function_usage_to (call_insn, call_fusage);
1714
1715 /* Restore the stack. */
1716 if (targetm.have_save_stack_nonlocal ())
1717 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1718 else
1719 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1720 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1721
1722 OK_DEFER_POP;
1723
1724 /* Return the address of the result block. */
1725 result = copy_addr_to_reg (XEXP (result, 0));
1726 return convert_memory_address (ptr_mode, result);
1727 }
1728
1729 /* Perform an untyped return. */
1730
1731 static void
1732 expand_builtin_return (rtx result)
1733 {
1734 int size, align, regno;
1735 machine_mode mode;
1736 rtx reg;
1737 rtx_insn *call_fusage = 0;
1738
1739 result = convert_memory_address (Pmode, result);
1740
1741 apply_result_size ();
1742 result = gen_rtx_MEM (BLKmode, result);
1743
1744 if (targetm.have_untyped_return ())
1745 {
1746 rtx vector = result_vector (0, result);
1747 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1748 emit_barrier ();
1749 return;
1750 }
1751
1752 /* Restore the return value and note that each value is used. */
1753 size = 0;
1754 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1755 if ((mode = apply_result_mode[regno]) != VOIDmode)
1756 {
1757 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1758 if (size % align != 0)
1759 size = CEIL (size, align) * align;
1760 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1761 emit_move_insn (reg, adjust_address (result, mode, size));
1762
1763 push_to_sequence (call_fusage);
1764 emit_use (reg);
1765 call_fusage = get_insns ();
1766 end_sequence ();
1767 size += GET_MODE_SIZE (mode);
1768 }
1769
1770 /* Put the USE insns before the return. */
1771 emit_insn (call_fusage);
1772
1773 /* Return whatever values was restored by jumping directly to the end
1774 of the function. */
1775 expand_naked_return ();
1776 }
1777
1778 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1779
1780 static enum type_class
1781 type_to_class (tree type)
1782 {
1783 switch (TREE_CODE (type))
1784 {
1785 case VOID_TYPE: return void_type_class;
1786 case INTEGER_TYPE: return integer_type_class;
1787 case ENUMERAL_TYPE: return enumeral_type_class;
1788 case BOOLEAN_TYPE: return boolean_type_class;
1789 case POINTER_TYPE: return pointer_type_class;
1790 case REFERENCE_TYPE: return reference_type_class;
1791 case OFFSET_TYPE: return offset_type_class;
1792 case REAL_TYPE: return real_type_class;
1793 case COMPLEX_TYPE: return complex_type_class;
1794 case FUNCTION_TYPE: return function_type_class;
1795 case METHOD_TYPE: return method_type_class;
1796 case RECORD_TYPE: return record_type_class;
1797 case UNION_TYPE:
1798 case QUAL_UNION_TYPE: return union_type_class;
1799 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1800 ? string_type_class : array_type_class);
1801 case LANG_TYPE: return lang_type_class;
1802 default: return no_type_class;
1803 }
1804 }
1805
1806 /* Expand a call EXP to __builtin_classify_type. */
1807
1808 static rtx
1809 expand_builtin_classify_type (tree exp)
1810 {
1811 if (call_expr_nargs (exp))
1812 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1813 return GEN_INT (no_type_class);
1814 }
1815
1816 /* This helper macro, meant to be used in mathfn_built_in below,
1817 determines which among a set of three builtin math functions is
1818 appropriate for a given type mode. The `F' and `L' cases are
1819 automatically generated from the `double' case. */
1820 #define CASE_MATHFN(MATHFN) \
1821 CASE_CFN_##MATHFN: \
1822 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1823 fcodel = BUILT_IN_##MATHFN##L ; break;
1824 /* Similar to above, but appends _R after any F/L suffix. */
1825 #define CASE_MATHFN_REENT(MATHFN) \
1826 case CFN_BUILT_IN_##MATHFN##_R: \
1827 case CFN_BUILT_IN_##MATHFN##F_R: \
1828 case CFN_BUILT_IN_##MATHFN##L_R: \
1829 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1830 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1831
1832 /* Return a function equivalent to FN but operating on floating-point
1833 values of type TYPE, or END_BUILTINS if no such function exists.
1834 This is purely an operation on function codes; it does not guarantee
1835 that the target actually has an implementation of the function. */
1836
1837 static built_in_function
1838 mathfn_built_in_2 (tree type, combined_fn fn)
1839 {
1840 built_in_function fcode, fcodef, fcodel;
1841
1842 switch (fn)
1843 {
1844 CASE_MATHFN (ACOS)
1845 CASE_MATHFN (ACOSH)
1846 CASE_MATHFN (ASIN)
1847 CASE_MATHFN (ASINH)
1848 CASE_MATHFN (ATAN)
1849 CASE_MATHFN (ATAN2)
1850 CASE_MATHFN (ATANH)
1851 CASE_MATHFN (CBRT)
1852 CASE_MATHFN (CEIL)
1853 CASE_MATHFN (CEXPI)
1854 CASE_MATHFN (COPYSIGN)
1855 CASE_MATHFN (COS)
1856 CASE_MATHFN (COSH)
1857 CASE_MATHFN (DREM)
1858 CASE_MATHFN (ERF)
1859 CASE_MATHFN (ERFC)
1860 CASE_MATHFN (EXP)
1861 CASE_MATHFN (EXP10)
1862 CASE_MATHFN (EXP2)
1863 CASE_MATHFN (EXPM1)
1864 CASE_MATHFN (FABS)
1865 CASE_MATHFN (FDIM)
1866 CASE_MATHFN (FLOOR)
1867 CASE_MATHFN (FMA)
1868 CASE_MATHFN (FMAX)
1869 CASE_MATHFN (FMIN)
1870 CASE_MATHFN (FMOD)
1871 CASE_MATHFN (FREXP)
1872 CASE_MATHFN (GAMMA)
1873 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1874 CASE_MATHFN (HUGE_VAL)
1875 CASE_MATHFN (HYPOT)
1876 CASE_MATHFN (ILOGB)
1877 CASE_MATHFN (ICEIL)
1878 CASE_MATHFN (IFLOOR)
1879 CASE_MATHFN (INF)
1880 CASE_MATHFN (IRINT)
1881 CASE_MATHFN (IROUND)
1882 CASE_MATHFN (ISINF)
1883 CASE_MATHFN (J0)
1884 CASE_MATHFN (J1)
1885 CASE_MATHFN (JN)
1886 CASE_MATHFN (LCEIL)
1887 CASE_MATHFN (LDEXP)
1888 CASE_MATHFN (LFLOOR)
1889 CASE_MATHFN (LGAMMA)
1890 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1891 CASE_MATHFN (LLCEIL)
1892 CASE_MATHFN (LLFLOOR)
1893 CASE_MATHFN (LLRINT)
1894 CASE_MATHFN (LLROUND)
1895 CASE_MATHFN (LOG)
1896 CASE_MATHFN (LOG10)
1897 CASE_MATHFN (LOG1P)
1898 CASE_MATHFN (LOG2)
1899 CASE_MATHFN (LOGB)
1900 CASE_MATHFN (LRINT)
1901 CASE_MATHFN (LROUND)
1902 CASE_MATHFN (MODF)
1903 CASE_MATHFN (NAN)
1904 CASE_MATHFN (NANS)
1905 CASE_MATHFN (NEARBYINT)
1906 CASE_MATHFN (NEXTAFTER)
1907 CASE_MATHFN (NEXTTOWARD)
1908 CASE_MATHFN (POW)
1909 CASE_MATHFN (POWI)
1910 CASE_MATHFN (POW10)
1911 CASE_MATHFN (REMAINDER)
1912 CASE_MATHFN (REMQUO)
1913 CASE_MATHFN (RINT)
1914 CASE_MATHFN (ROUND)
1915 CASE_MATHFN (SCALB)
1916 CASE_MATHFN (SCALBLN)
1917 CASE_MATHFN (SCALBN)
1918 CASE_MATHFN (SIGNBIT)
1919 CASE_MATHFN (SIGNIFICAND)
1920 CASE_MATHFN (SIN)
1921 CASE_MATHFN (SINCOS)
1922 CASE_MATHFN (SINH)
1923 CASE_MATHFN (SQRT)
1924 CASE_MATHFN (TAN)
1925 CASE_MATHFN (TANH)
1926 CASE_MATHFN (TGAMMA)
1927 CASE_MATHFN (TRUNC)
1928 CASE_MATHFN (Y0)
1929 CASE_MATHFN (Y1)
1930 CASE_MATHFN (YN)
1931
1932 default:
1933 return END_BUILTINS;
1934 }
1935
1936 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1937 return fcode;
1938 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1939 return fcodef;
1940 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1941 return fcodel;
1942 else
1943 return END_BUILTINS;
1944 }
1945
1946 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1947 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1948 otherwise use the explicit declaration. If we can't do the conversion,
1949 return null. */
1950
1951 static tree
1952 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1953 {
1954 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1955 if (fcode2 == END_BUILTINS)
1956 return NULL_TREE;
1957
1958 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1959 return NULL_TREE;
1960
1961 return builtin_decl_explicit (fcode2);
1962 }
1963
1964 /* Like mathfn_built_in_1, but always use the implicit array. */
1965
1966 tree
1967 mathfn_built_in (tree type, combined_fn fn)
1968 {
1969 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1970 }
1971
1972 /* Like mathfn_built_in_1, but take a built_in_function and
1973 always use the implicit array. */
1974
1975 tree
1976 mathfn_built_in (tree type, enum built_in_function fn)
1977 {
1978 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1979 }
1980
1981 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1982 return its code, otherwise return IFN_LAST. Note that this function
1983 only tests whether the function is defined in internals.def, not whether
1984 it is actually available on the target. */
1985
1986 internal_fn
1987 associated_internal_fn (tree fndecl)
1988 {
1989 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1990 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1991 switch (DECL_FUNCTION_CODE (fndecl))
1992 {
1993 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1994 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1995 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1997 #include "internal-fn.def"
1998
1999 CASE_FLT_FN (BUILT_IN_POW10):
2000 return IFN_EXP10;
2001
2002 CASE_FLT_FN (BUILT_IN_DREM):
2003 return IFN_REMAINDER;
2004
2005 CASE_FLT_FN (BUILT_IN_SCALBN):
2006 CASE_FLT_FN (BUILT_IN_SCALBLN):
2007 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2008 return IFN_LDEXP;
2009 return IFN_LAST;
2010
2011 default:
2012 return IFN_LAST;
2013 }
2014 }
2015
2016 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2017 on the current target by a call to an internal function, return the
2018 code of that internal function, otherwise return IFN_LAST. The caller
2019 is responsible for ensuring that any side-effects of the built-in
2020 call are dealt with correctly. E.g. if CALL sets errno, the caller
2021 must decide that the errno result isn't needed or make it available
2022 in some other way. */
2023
2024 internal_fn
2025 replacement_internal_fn (gcall *call)
2026 {
2027 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2028 {
2029 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2030 if (ifn != IFN_LAST)
2031 {
2032 tree_pair types = direct_internal_fn_types (ifn, call);
2033 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2034 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2035 return ifn;
2036 }
2037 }
2038 return IFN_LAST;
2039 }
2040
2041 /* Expand a call to the builtin trinary math functions (fma).
2042 Return NULL_RTX if a normal call should be emitted rather than expanding the
2043 function in-line. EXP is the expression that is a call to the builtin
2044 function; if convenient, the result should be placed in TARGET.
2045 SUBTARGET may be used as the target for computing one of EXP's
2046 operands. */
2047
2048 static rtx
2049 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2050 {
2051 optab builtin_optab;
2052 rtx op0, op1, op2, result;
2053 rtx_insn *insns;
2054 tree fndecl = get_callee_fndecl (exp);
2055 tree arg0, arg1, arg2;
2056 machine_mode mode;
2057
2058 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2059 return NULL_RTX;
2060
2061 arg0 = CALL_EXPR_ARG (exp, 0);
2062 arg1 = CALL_EXPR_ARG (exp, 1);
2063 arg2 = CALL_EXPR_ARG (exp, 2);
2064
2065 switch (DECL_FUNCTION_CODE (fndecl))
2066 {
2067 CASE_FLT_FN (BUILT_IN_FMA):
2068 builtin_optab = fma_optab; break;
2069 default:
2070 gcc_unreachable ();
2071 }
2072
2073 /* Make a suitable register to place result in. */
2074 mode = TYPE_MODE (TREE_TYPE (exp));
2075
2076 /* Before working hard, check whether the instruction is available. */
2077 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2078 return NULL_RTX;
2079
2080 result = gen_reg_rtx (mode);
2081
2082 /* Always stabilize the argument list. */
2083 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2084 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2085 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2086
2087 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2088 op1 = expand_normal (arg1);
2089 op2 = expand_normal (arg2);
2090
2091 start_sequence ();
2092
2093 /* Compute into RESULT.
2094 Set RESULT to wherever the result comes back. */
2095 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2096 result, 0);
2097
2098 /* If we were unable to expand via the builtin, stop the sequence
2099 (without outputting the insns) and call to the library function
2100 with the stabilized argument list. */
2101 if (result == 0)
2102 {
2103 end_sequence ();
2104 return expand_call (exp, target, target == const0_rtx);
2105 }
2106
2107 /* Output the entire sequence. */
2108 insns = get_insns ();
2109 end_sequence ();
2110 emit_insn (insns);
2111
2112 return result;
2113 }
2114
2115 /* Expand a call to the builtin sin and cos math functions.
2116 Return NULL_RTX if a normal call should be emitted rather than expanding the
2117 function in-line. EXP is the expression that is a call to the builtin
2118 function; if convenient, the result should be placed in TARGET.
2119 SUBTARGET may be used as the target for computing one of EXP's
2120 operands. */
2121
2122 static rtx
2123 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2124 {
2125 optab builtin_optab;
2126 rtx op0;
2127 rtx_insn *insns;
2128 tree fndecl = get_callee_fndecl (exp);
2129 machine_mode mode;
2130 tree arg;
2131
2132 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2133 return NULL_RTX;
2134
2135 arg = CALL_EXPR_ARG (exp, 0);
2136
2137 switch (DECL_FUNCTION_CODE (fndecl))
2138 {
2139 CASE_FLT_FN (BUILT_IN_SIN):
2140 CASE_FLT_FN (BUILT_IN_COS):
2141 builtin_optab = sincos_optab; break;
2142 default:
2143 gcc_unreachable ();
2144 }
2145
2146 /* Make a suitable register to place result in. */
2147 mode = TYPE_MODE (TREE_TYPE (exp));
2148
2149 /* Check if sincos insn is available, otherwise fallback
2150 to sin or cos insn. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 switch (DECL_FUNCTION_CODE (fndecl))
2153 {
2154 CASE_FLT_FN (BUILT_IN_SIN):
2155 builtin_optab = sin_optab; break;
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = cos_optab; break;
2158 default:
2159 gcc_unreachable ();
2160 }
2161
2162 /* Before working hard, check whether the instruction is available. */
2163 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2164 {
2165 rtx result = gen_reg_rtx (mode);
2166
2167 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2168 need to expand the argument again. This way, we will not perform
2169 side-effects more the once. */
2170 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2171
2172 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2173
2174 start_sequence ();
2175
2176 /* Compute into RESULT.
2177 Set RESULT to wherever the result comes back. */
2178 if (builtin_optab == sincos_optab)
2179 {
2180 int ok;
2181
2182 switch (DECL_FUNCTION_CODE (fndecl))
2183 {
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2186 break;
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2189 break;
2190 default:
2191 gcc_unreachable ();
2192 }
2193 gcc_assert (ok);
2194 }
2195 else
2196 result = expand_unop (mode, builtin_optab, op0, result, 0);
2197
2198 if (result != 0)
2199 {
2200 /* Output the entire sequence. */
2201 insns = get_insns ();
2202 end_sequence ();
2203 emit_insn (insns);
2204 return result;
2205 }
2206
2207 /* If we were unable to expand via the builtin, stop the sequence
2208 (without outputting the insns) and call to the library function
2209 with the stabilized argument list. */
2210 end_sequence ();
2211 }
2212
2213 return expand_call (exp, target, target == const0_rtx);
2214 }
2215
2216 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2217 return an RTL instruction code that implements the functionality.
2218 If that isn't possible or available return CODE_FOR_nothing. */
2219
2220 static enum insn_code
2221 interclass_mathfn_icode (tree arg, tree fndecl)
2222 {
2223 bool errno_set = false;
2224 optab builtin_optab = unknown_optab;
2225 machine_mode mode;
2226
2227 switch (DECL_FUNCTION_CODE (fndecl))
2228 {
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 case BUILT_IN_FINITED32:
2237 case BUILT_IN_FINITED64:
2238 case BUILT_IN_FINITED128:
2239 case BUILT_IN_ISINFD32:
2240 case BUILT_IN_ISINFD64:
2241 case BUILT_IN_ISINFD128:
2242 /* These builtins have no optabs (yet). */
2243 break;
2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 /* There's no easy way to detect the case we need to set EDOM. */
2249 if (flag_errno_math && errno_set)
2250 return CODE_FOR_nothing;
2251
2252 /* Optab mode depends on the mode of the input argument. */
2253 mode = TYPE_MODE (TREE_TYPE (arg));
2254
2255 if (builtin_optab)
2256 return optab_handler (builtin_optab, mode);
2257 return CODE_FOR_nothing;
2258 }
2259
2260 /* Expand a call to one of the builtin math functions that operate on
2261 floating point argument and output an integer result (ilogb, isinf,
2262 isnan, etc).
2263 Return 0 if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
2265 function; if convenient, the result should be placed in TARGET. */
2266
2267 static rtx
2268 expand_builtin_interclass_mathfn (tree exp, rtx target)
2269 {
2270 enum insn_code icode = CODE_FOR_nothing;
2271 rtx op0;
2272 tree fndecl = get_callee_fndecl (exp);
2273 machine_mode mode;
2274 tree arg;
2275
2276 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2278
2279 arg = CALL_EXPR_ARG (exp, 0);
2280 icode = interclass_mathfn_icode (arg, fndecl);
2281 mode = TYPE_MODE (TREE_TYPE (arg));
2282
2283 if (icode != CODE_FOR_nothing)
2284 {
2285 struct expand_operand ops[1];
2286 rtx_insn *last = get_last_insn ();
2287 tree orig_arg = arg;
2288
2289 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2290 need to expand the argument again. This way, we will not perform
2291 side-effects more the once. */
2292 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2293
2294 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2295
2296 if (mode != GET_MODE (op0))
2297 op0 = convert_to_mode (mode, op0, 0);
2298
2299 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2300 if (maybe_legitimize_operands (icode, 0, 1, ops)
2301 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2302 return ops[0].value;
2303
2304 delete_insns_since (last);
2305 CALL_EXPR_ARG (exp, 0) = orig_arg;
2306 }
2307
2308 return NULL_RTX;
2309 }
2310
2311 /* Expand a call to the builtin sincos math function.
2312 Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function. */
2315
2316 static rtx
2317 expand_builtin_sincos (tree exp)
2318 {
2319 rtx op0, op1, op2, target1, target2;
2320 machine_mode mode;
2321 tree arg, sinp, cosp;
2322 int result;
2323 location_t loc = EXPR_LOCATION (exp);
2324 tree alias_type, alias_off;
2325
2326 if (!validate_arglist (exp, REAL_TYPE,
2327 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2328 return NULL_RTX;
2329
2330 arg = CALL_EXPR_ARG (exp, 0);
2331 sinp = CALL_EXPR_ARG (exp, 1);
2332 cosp = CALL_EXPR_ARG (exp, 2);
2333
2334 /* Make a suitable register to place result in. */
2335 mode = TYPE_MODE (TREE_TYPE (arg));
2336
2337 /* Check if sincos insn is available, otherwise emit the call. */
2338 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2339 return NULL_RTX;
2340
2341 target1 = gen_reg_rtx (mode);
2342 target2 = gen_reg_rtx (mode);
2343
2344 op0 = expand_normal (arg);
2345 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2346 alias_off = build_int_cst (alias_type, 0);
2347 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2348 sinp, alias_off));
2349 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 cosp, alias_off));
2351
2352 /* Compute into target1 and target2.
2353 Set TARGET to wherever the result comes back. */
2354 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2355 gcc_assert (result);
2356
2357 /* Move target1 and target2 to the memory locations indicated
2358 by op1 and op2. */
2359 emit_move_insn (op1, target1);
2360 emit_move_insn (op2, target2);
2361
2362 return const0_rtx;
2363 }
2364
2365 /* Expand a call to the internal cexpi builtin to the sincos math function.
2366 EXP is the expression that is a call to the builtin function; if convenient,
2367 the result should be placed in TARGET. */
2368
2369 static rtx
2370 expand_builtin_cexpi (tree exp, rtx target)
2371 {
2372 tree fndecl = get_callee_fndecl (exp);
2373 tree arg, type;
2374 machine_mode mode;
2375 rtx op0, op1, op2;
2376 location_t loc = EXPR_LOCATION (exp);
2377
2378 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2379 return NULL_RTX;
2380
2381 arg = CALL_EXPR_ARG (exp, 0);
2382 type = TREE_TYPE (arg);
2383 mode = TYPE_MODE (TREE_TYPE (arg));
2384
2385 /* Try expanding via a sincos optab, fall back to emitting a libcall
2386 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2387 is only generated from sincos, cexp or if we have either of them. */
2388 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2389 {
2390 op1 = gen_reg_rtx (mode);
2391 op2 = gen_reg_rtx (mode);
2392
2393 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2394
2395 /* Compute into op1 and op2. */
2396 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2397 }
2398 else if (targetm.libc_has_function (function_sincos))
2399 {
2400 tree call, fn = NULL_TREE;
2401 tree top1, top2;
2402 rtx op1a, op2a;
2403
2404 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2405 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2406 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2407 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2409 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2410 else
2411 gcc_unreachable ();
2412
2413 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2414 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2415 op1a = copy_addr_to_reg (XEXP (op1, 0));
2416 op2a = copy_addr_to_reg (XEXP (op2, 0));
2417 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2418 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2419
2420 /* Make sure not to fold the sincos call again. */
2421 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2422 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2423 call, 3, arg, top1, top2));
2424 }
2425 else
2426 {
2427 tree call, fn = NULL_TREE, narg;
2428 tree ctype = build_complex_type (type);
2429
2430 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2431 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2433 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2435 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2436 else
2437 gcc_unreachable ();
2438
2439 /* If we don't have a decl for cexp create one. This is the
2440 friendliest fallback if the user calls __builtin_cexpi
2441 without full target C99 function support. */
2442 if (fn == NULL_TREE)
2443 {
2444 tree fntype;
2445 const char *name = NULL;
2446
2447 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2448 name = "cexpf";
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2450 name = "cexp";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2452 name = "cexpl";
2453
2454 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2455 fn = build_fn_decl (name, fntype);
2456 }
2457
2458 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2459 build_real (type, dconst0), arg);
2460
2461 /* Make sure not to fold the cexp call again. */
2462 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2463 return expand_expr (build_call_nary (ctype, call, 1, narg),
2464 target, VOIDmode, EXPAND_NORMAL);
2465 }
2466
2467 /* Now build the proper return type. */
2468 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2469 make_tree (TREE_TYPE (arg), op2),
2470 make_tree (TREE_TYPE (arg), op1)),
2471 target, VOIDmode, EXPAND_NORMAL);
2472 }
2473
2474 /* Conveniently construct a function call expression. FNDECL names the
2475 function to be called, N is the number of arguments, and the "..."
2476 parameters are the argument expressions. Unlike build_call_exr
2477 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2478
2479 static tree
2480 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2481 {
2482 va_list ap;
2483 tree fntype = TREE_TYPE (fndecl);
2484 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2485
2486 va_start (ap, n);
2487 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2488 va_end (ap);
2489 SET_EXPR_LOCATION (fn, loc);
2490 return fn;
2491 }
2492
2493 /* Expand a call to one of the builtin rounding functions gcc defines
2494 as an extension (lfloor and lceil). As these are gcc extensions we
2495 do not need to worry about setting errno to EDOM.
2496 If expanding via optab fails, lower expression to (int)(floor(x)).
2497 EXP is the expression that is a call to the builtin function;
2498 if convenient, the result should be placed in TARGET. */
2499
2500 static rtx
2501 expand_builtin_int_roundingfn (tree exp, rtx target)
2502 {
2503 convert_optab builtin_optab;
2504 rtx op0, tmp;
2505 rtx_insn *insns;
2506 tree fndecl = get_callee_fndecl (exp);
2507 enum built_in_function fallback_fn;
2508 tree fallback_fndecl;
2509 machine_mode mode;
2510 tree arg;
2511
2512 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2513 gcc_unreachable ();
2514
2515 arg = CALL_EXPR_ARG (exp, 0);
2516
2517 switch (DECL_FUNCTION_CODE (fndecl))
2518 {
2519 CASE_FLT_FN (BUILT_IN_ICEIL):
2520 CASE_FLT_FN (BUILT_IN_LCEIL):
2521 CASE_FLT_FN (BUILT_IN_LLCEIL):
2522 builtin_optab = lceil_optab;
2523 fallback_fn = BUILT_IN_CEIL;
2524 break;
2525
2526 CASE_FLT_FN (BUILT_IN_IFLOOR):
2527 CASE_FLT_FN (BUILT_IN_LFLOOR):
2528 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2529 builtin_optab = lfloor_optab;
2530 fallback_fn = BUILT_IN_FLOOR;
2531 break;
2532
2533 default:
2534 gcc_unreachable ();
2535 }
2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (exp));
2539
2540 target = gen_reg_rtx (mode);
2541
2542 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2543 need to expand the argument again. This way, we will not perform
2544 side-effects more the once. */
2545 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2546
2547 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2548
2549 start_sequence ();
2550
2551 /* Compute into TARGET. */
2552 if (expand_sfix_optab (target, op0, builtin_optab))
2553 {
2554 /* Output the entire sequence. */
2555 insns = get_insns ();
2556 end_sequence ();
2557 emit_insn (insns);
2558 return target;
2559 }
2560
2561 /* If we were unable to expand via the builtin, stop the sequence
2562 (without outputting the insns). */
2563 end_sequence ();
2564
2565 /* Fall back to floating point rounding optab. */
2566 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2567
2568 /* For non-C99 targets we may end up without a fallback fndecl here
2569 if the user called __builtin_lfloor directly. In this case emit
2570 a call to the floor/ceil variants nevertheless. This should result
2571 in the best user experience for not full C99 targets. */
2572 if (fallback_fndecl == NULL_TREE)
2573 {
2574 tree fntype;
2575 const char *name = NULL;
2576
2577 switch (DECL_FUNCTION_CODE (fndecl))
2578 {
2579 case BUILT_IN_ICEIL:
2580 case BUILT_IN_LCEIL:
2581 case BUILT_IN_LLCEIL:
2582 name = "ceil";
2583 break;
2584 case BUILT_IN_ICEILF:
2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2587 name = "ceilf";
2588 break;
2589 case BUILT_IN_ICEILL:
2590 case BUILT_IN_LCEILL:
2591 case BUILT_IN_LLCEILL:
2592 name = "ceill";
2593 break;
2594 case BUILT_IN_IFLOOR:
2595 case BUILT_IN_LFLOOR:
2596 case BUILT_IN_LLFLOOR:
2597 name = "floor";
2598 break;
2599 case BUILT_IN_IFLOORF:
2600 case BUILT_IN_LFLOORF:
2601 case BUILT_IN_LLFLOORF:
2602 name = "floorf";
2603 break;
2604 case BUILT_IN_IFLOORL:
2605 case BUILT_IN_LFLOORL:
2606 case BUILT_IN_LLFLOORL:
2607 name = "floorl";
2608 break;
2609 default:
2610 gcc_unreachable ();
2611 }
2612
2613 fntype = build_function_type_list (TREE_TYPE (arg),
2614 TREE_TYPE (arg), NULL_TREE);
2615 fallback_fndecl = build_fn_decl (name, fntype);
2616 }
2617
2618 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2619
2620 tmp = expand_normal (exp);
2621 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2622
2623 /* Truncate the result of floating point optab to integer
2624 via expand_fix (). */
2625 target = gen_reg_rtx (mode);
2626 expand_fix (target, tmp, 0);
2627
2628 return target;
2629 }
2630
2631 /* Expand a call to one of the builtin math functions doing integer
2632 conversion (lrint).
2633 Return 0 if a normal call should be emitted rather than expanding the
2634 function in-line. EXP is the expression that is a call to the builtin
2635 function; if convenient, the result should be placed in TARGET. */
2636
2637 static rtx
2638 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2639 {
2640 convert_optab builtin_optab;
2641 rtx op0;
2642 rtx_insn *insns;
2643 tree fndecl = get_callee_fndecl (exp);
2644 tree arg;
2645 machine_mode mode;
2646 enum built_in_function fallback_fn = BUILT_IN_NONE;
2647
2648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2649 gcc_unreachable ();
2650
2651 arg = CALL_EXPR_ARG (exp, 0);
2652
2653 switch (DECL_FUNCTION_CODE (fndecl))
2654 {
2655 CASE_FLT_FN (BUILT_IN_IRINT):
2656 fallback_fn = BUILT_IN_LRINT;
2657 gcc_fallthrough ();
2658 CASE_FLT_FN (BUILT_IN_LRINT):
2659 CASE_FLT_FN (BUILT_IN_LLRINT):
2660 builtin_optab = lrint_optab;
2661 break;
2662
2663 CASE_FLT_FN (BUILT_IN_IROUND):
2664 fallback_fn = BUILT_IN_LROUND;
2665 gcc_fallthrough ();
2666 CASE_FLT_FN (BUILT_IN_LROUND):
2667 CASE_FLT_FN (BUILT_IN_LLROUND):
2668 builtin_optab = lround_optab;
2669 break;
2670
2671 default:
2672 gcc_unreachable ();
2673 }
2674
2675 /* There's no easy way to detect the case we need to set EDOM. */
2676 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2677 return NULL_RTX;
2678
2679 /* Make a suitable register to place result in. */
2680 mode = TYPE_MODE (TREE_TYPE (exp));
2681
2682 /* There's no easy way to detect the case we need to set EDOM. */
2683 if (!flag_errno_math)
2684 {
2685 rtx result = gen_reg_rtx (mode);
2686
2687 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2688 need to expand the argument again. This way, we will not perform
2689 side-effects more the once. */
2690 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2691
2692 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2693
2694 start_sequence ();
2695
2696 if (expand_sfix_optab (result, op0, builtin_optab))
2697 {
2698 /* Output the entire sequence. */
2699 insns = get_insns ();
2700 end_sequence ();
2701 emit_insn (insns);
2702 return result;
2703 }
2704
2705 /* If we were unable to expand via the builtin, stop the sequence
2706 (without outputting the insns) and call to the library function
2707 with the stabilized argument list. */
2708 end_sequence ();
2709 }
2710
2711 if (fallback_fn != BUILT_IN_NONE)
2712 {
2713 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2714 targets, (int) round (x) should never be transformed into
2715 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2716 a call to lround in the hope that the target provides at least some
2717 C99 functions. This should result in the best user experience for
2718 not full C99 targets. */
2719 tree fallback_fndecl = mathfn_built_in_1
2720 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2721
2722 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2723 fallback_fndecl, 1, arg);
2724
2725 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2726 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2727 return convert_to_mode (mode, target, 0);
2728 }
2729
2730 return expand_call (exp, target, target == const0_rtx);
2731 }
2732
2733 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2734 a normal call should be emitted rather than expanding the function
2735 in-line. EXP is the expression that is a call to the builtin
2736 function; if convenient, the result should be placed in TARGET. */
2737
2738 static rtx
2739 expand_builtin_powi (tree exp, rtx target)
2740 {
2741 tree arg0, arg1;
2742 rtx op0, op1;
2743 machine_mode mode;
2744 machine_mode mode2;
2745
2746 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2747 return NULL_RTX;
2748
2749 arg0 = CALL_EXPR_ARG (exp, 0);
2750 arg1 = CALL_EXPR_ARG (exp, 1);
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2752
2753 /* Emit a libcall to libgcc. */
2754
2755 /* Mode of the 2nd argument must match that of an int. */
2756 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2757
2758 if (target == NULL_RTX)
2759 target = gen_reg_rtx (mode);
2760
2761 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2762 if (GET_MODE (op0) != mode)
2763 op0 = convert_to_mode (mode, op0, 0);
2764 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2765 if (GET_MODE (op1) != mode2)
2766 op1 = convert_to_mode (mode2, op1, 0);
2767
2768 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2769 target, LCT_CONST, mode, 2,
2770 op0, mode, op1, mode2);
2771
2772 return target;
2773 }
2774
2775 /* Expand expression EXP which is a call to the strlen builtin. Return
2776 NULL_RTX if we failed the caller should emit a normal call, otherwise
2777 try to get the result in TARGET, if convenient. */
2778
2779 static rtx
2780 expand_builtin_strlen (tree exp, rtx target,
2781 machine_mode target_mode)
2782 {
2783 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2784 return NULL_RTX;
2785 else
2786 {
2787 struct expand_operand ops[4];
2788 rtx pat;
2789 tree len;
2790 tree src = CALL_EXPR_ARG (exp, 0);
2791 rtx src_reg;
2792 rtx_insn *before_strlen;
2793 machine_mode insn_mode = target_mode;
2794 enum insn_code icode = CODE_FOR_nothing;
2795 unsigned int align;
2796
2797 /* If the length can be computed at compile-time, return it. */
2798 len = c_strlen (src, 0);
2799 if (len)
2800 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2801
2802 /* If the length can be computed at compile-time and is constant
2803 integer, but there are side-effects in src, evaluate
2804 src for side-effects, then return len.
2805 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2806 can be optimized into: i++; x = 3; */
2807 len = c_strlen (src, 1);
2808 if (len && TREE_CODE (len) == INTEGER_CST)
2809 {
2810 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2811 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2812 }
2813
2814 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2815
2816 /* If SRC is not a pointer type, don't do this operation inline. */
2817 if (align == 0)
2818 return NULL_RTX;
2819
2820 /* Bail out if we can't compute strlen in the right mode. */
2821 while (insn_mode != VOIDmode)
2822 {
2823 icode = optab_handler (strlen_optab, insn_mode);
2824 if (icode != CODE_FOR_nothing)
2825 break;
2826
2827 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2828 }
2829 if (insn_mode == VOIDmode)
2830 return NULL_RTX;
2831
2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg = gen_reg_rtx (Pmode);
2836
2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
2839 before_strlen = get_last_insn ();
2840
2841 create_output_operand (&ops[0], target, insn_mode);
2842 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2843 create_integer_operand (&ops[2], 0);
2844 create_integer_operand (&ops[3], align);
2845 if (!maybe_expand_insn (icode, 4, ops))
2846 return NULL_RTX;
2847
2848 /* Now that we are assured of success, expand the source. */
2849 start_sequence ();
2850 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2851 if (pat != src_reg)
2852 {
2853 #ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat) != Pmode)
2855 pat = convert_to_mode (Pmode, pat,
2856 POINTERS_EXTEND_UNSIGNED);
2857 #endif
2858 emit_move_insn (src_reg, pat);
2859 }
2860 pat = get_insns ();
2861 end_sequence ();
2862
2863 if (before_strlen)
2864 emit_insn_after (pat, before_strlen);
2865 else
2866 emit_insn_before (pat, get_insns ());
2867
2868 /* Return the value in the proper mode for this function. */
2869 if (GET_MODE (ops[0].value) == target_mode)
2870 target = ops[0].value;
2871 else if (target != 0)
2872 convert_move (target, ops[0].value, 0);
2873 else
2874 target = convert_to_mode (target_mode, ops[0].value, 0);
2875
2876 return target;
2877 }
2878 }
2879
2880 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2882 constant. */
2883
2884 static rtx
2885 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2886 machine_mode mode)
2887 {
2888 const char *str = (const char *) data;
2889
2890 gcc_assert (offset >= 0
2891 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2892 <= strlen (str) + 1));
2893
2894 return c_readstr (str + offset, mode);
2895 }
2896
2897 /* LEN specify length of the block of memcpy/memset operation.
2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
2901
2902 static void
2903 determine_block_size (tree len, rtx len_rtx,
2904 unsigned HOST_WIDE_INT *min_size,
2905 unsigned HOST_WIDE_INT *max_size,
2906 unsigned HOST_WIDE_INT *probable_max_size)
2907 {
2908 if (CONST_INT_P (len_rtx))
2909 {
2910 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2911 return;
2912 }
2913 else
2914 {
2915 wide_int min, max;
2916 enum value_range_type range_type = VR_UNDEFINED;
2917
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2920 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2921 else
2922 *min_size = 0;
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2924 *probable_max_size = *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2926 else
2927 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2928
2929 if (TREE_CODE (len) == SSA_NAME)
2930 range_type = get_range_info (len, &min, &max);
2931 if (range_type == VR_RANGE)
2932 {
2933 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2934 *min_size = min.to_uhwi ();
2935 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2936 *probable_max_size = *max_size = max.to_uhwi ();
2937 }
2938 else if (range_type == VR_ANTI_RANGE)
2939 {
2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
2941 if (min == 0)
2942 {
2943 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2944 *min_size = max.to_uhwi () + 1;
2945 }
2946 /* Code like
2947
2948 int n;
2949 if (n < 100)
2950 memcpy (a, b, n)
2951
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2954 */
2955 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2956 *probable_max_size = min.to_uhwi () - 1;
2957 }
2958 }
2959 gcc_checking_assert (*max_size <=
2960 (unsigned HOST_WIDE_INT)
2961 GET_MODE_MASK (GET_MODE (len_rtx)));
2962 }
2963
2964 /* Helper function to do the actual work for expand_builtin_memcpy. */
2965
2966 static rtx
2967 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2968 {
2969 const char *src_str;
2970 unsigned int src_align = get_pointer_alignment (src);
2971 unsigned int dest_align = get_pointer_alignment (dest);
2972 rtx dest_mem, src_mem, dest_addr, len_rtx;
2973 HOST_WIDE_INT expected_size = -1;
2974 unsigned int expected_align = 0;
2975 unsigned HOST_WIDE_INT min_size;
2976 unsigned HOST_WIDE_INT max_size;
2977 unsigned HOST_WIDE_INT probable_max_size;
2978
2979 /* If DEST is not a pointer type, call the normal function. */
2980 if (dest_align == 0)
2981 return NULL_RTX;
2982
2983 /* If either SRC is not a pointer type, don't do this
2984 operation in-line. */
2985 if (src_align == 0)
2986 return NULL_RTX;
2987
2988 if (currently_expanding_gimple_stmt)
2989 stringop_block_profile (currently_expanding_gimple_stmt,
2990 &expected_align, &expected_size);
2991
2992 if (expected_align < dest_align)
2993 expected_align = dest_align;
2994 dest_mem = get_memory_rtx (dest, len);
2995 set_mem_align (dest_mem, dest_align);
2996 len_rtx = expand_normal (len);
2997 determine_block_size (len, len_rtx, &min_size, &max_size,
2998 &probable_max_size);
2999 src_str = c_getstr (src);
3000
3001 /* If SRC is a string constant and block move would be done
3002 by pieces, we can avoid loading the string from memory
3003 and only stored the computed constants. */
3004 if (src_str
3005 && CONST_INT_P (len_rtx)
3006 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3007 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3008 CONST_CAST (char *, src_str),
3009 dest_align, false))
3010 {
3011 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3012 builtin_memcpy_read_str,
3013 CONST_CAST (char *, src_str),
3014 dest_align, false, 0);
3015 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3016 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3017 return dest_mem;
3018 }
3019
3020 src_mem = get_memory_rtx (src, len);
3021 set_mem_align (src_mem, src_align);
3022
3023 /* Copy word part most expediently. */
3024 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3025 CALL_EXPR_TAILCALL (exp)
3026 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3027 expected_align, expected_size,
3028 min_size, max_size, probable_max_size);
3029
3030 if (dest_addr == 0)
3031 {
3032 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3033 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3034 }
3035
3036 return dest_addr;
3037 }
3038
3039 /* Try to verify that the sizes and lengths of the arguments to a string
3040 manipulation function given by EXP are within valid bounds and that
3041 the operation does not lead to buffer overflow. Arguments other than
3042 EXP may be null. When non-null, the arguments have the following
3043 meaning:
3044 SIZE is the user-supplied size argument to the function (such as in
3045 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3046 number of bytes to write.
3047 MAXLEN is the user-supplied bound on the length of the source sequence
3048 (such as in strncat(d, s, N). It specifies the upper limit on the number
3049 of bytes to write.
3050 SRC is the source string (such as in strcpy(d, s)) when the expression
3051 EXP is a string function call (as opposed to a memory call like memcpy).
3052 As an exception, SRC can also be an integer denoting the precomputed
3053 size of the source string or object (for functions like memcpy).
3054 OBJSIZE is the size of the destination object specified by the last
3055 argument to the _chk builtins, typically resulting from the expansion
3056 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3057 OBJSIZE).
3058
3059 When SIZE is null LEN is checked to verify that it doesn't exceed
3060 SIZE_MAX.
3061
3062 If the call is successfully verified as safe from buffer overflow
3063 the function returns true, otherwise false.. */
3064
3065 static bool
3066 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3067 {
3068 /* The size of the largest object is half the address space, or
3069 SSIZE_MAX. (This is way too permissive.) */
3070 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3071
3072 tree slen = NULL_TREE;
3073
3074 tree range[2] = { NULL_TREE, NULL_TREE };
3075
3076 /* Set to true when the exact number of bytes written by a string
3077 function like strcpy is not known and the only thing that is
3078 known is that it must be at least one (for the terminating nul). */
3079 bool at_least_one = false;
3080 if (src)
3081 {
3082 /* SRC is normally a pointer to string but as a special case
3083 it can be an integer denoting the length of a string. */
3084 if (POINTER_TYPE_P (TREE_TYPE (src)))
3085 {
3086 /* Try to determine the range of lengths the source string
3087 refers to. If it can be determined and is less than
3088 the upper bound given by MAXLEN add one to it for
3089 the terminating nul. Otherwise, set it to one for
3090 the same reason, or to MAXLEN as appropriate. */
3091 get_range_strlen (src, range);
3092 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3093 {
3094 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3095 range[0] = range[1] = maxlen;
3096 else
3097 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3098 range[0], size_one_node);
3099
3100 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3101 range[1] = maxlen;
3102 else if (!integer_all_onesp (range[1]))
3103 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3104 range[1], size_one_node);
3105
3106 slen = range[0];
3107 }
3108 else
3109 {
3110 at_least_one = true;
3111 slen = size_one_node;
3112 }
3113 }
3114 else
3115 slen = src;
3116 }
3117
3118 if (!size && !maxlen)
3119 {
3120 /* When the only available piece of data is the object size
3121 there is nothing to do. */
3122 if (!slen)
3123 return true;
3124
3125 /* Otherwise, when the length of the source sequence is known
3126 (as with with strlen), set SIZE to it. */
3127 if (!range[0])
3128 size = slen;
3129 }
3130
3131 if (!objsize)
3132 objsize = maxobjsize;
3133
3134 /* The SIZE is exact if it's non-null, constant, and in range of
3135 unsigned HOST_WIDE_INT. */
3136 bool exactsize = size && tree_fits_uhwi_p (size);
3137
3138 if (size)
3139 get_size_range (size, range);
3140
3141 /* First check the number of bytes to be written against the maximum
3142 object size. */
3143 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3144 {
3145 location_t loc = tree_nonartificial_location (exp);
3146 loc = expansion_point_location_if_in_system_header (loc);
3147
3148 if (range[0] == range[1])
3149 warning_at (loc, opt,
3150 "%K%qD specified size %E "
3151 "exceeds maximum object size %E",
3152 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3153 else
3154 warning_at (loc, opt,
3155 "%K%qD specified size between %E and %E "
3156 "exceeds maximum object size %E",
3157 exp, get_callee_fndecl (exp),
3158 range[0], range[1], maxobjsize);
3159 return false;
3160 }
3161
3162 /* Next check the number of bytes to be written against the destination
3163 object size. */
3164 if (range[0] || !exactsize || integer_all_onesp (size))
3165 {
3166 if (range[0]
3167 && ((tree_fits_uhwi_p (objsize)
3168 && tree_int_cst_lt (objsize, range[0]))
3169 || (tree_fits_uhwi_p (size)
3170 && tree_int_cst_lt (size, range[0]))))
3171 {
3172 location_t loc = tree_nonartificial_location (exp);
3173 loc = expansion_point_location_if_in_system_header (loc);
3174
3175 if (size == slen && at_least_one)
3176 {
3177 /* This is a call to strcpy with a destination of 0 size
3178 and a source of unknown length. The call will write
3179 at least one byte past the end of the destination. */
3180 warning_at (loc, opt,
3181 "%K%qD writing %E or more bytes into a region "
3182 "of size %E overflows the destination",
3183 exp, get_callee_fndecl (exp), range[0], objsize);
3184 }
3185 else if (tree_int_cst_equal (range[0], range[1]))
3186 warning_at (loc, opt,
3187 (integer_onep (range[0])
3188 ? G_("%K%qD writing %E byte into a region "
3189 "of size %E overflows the destination")
3190 : G_("%K%qD writing %E bytes into a region "
3191 "of size %E overflows the destination")),
3192 exp, get_callee_fndecl (exp), range[0], objsize);
3193 else if (tree_int_cst_sign_bit (range[1]))
3194 {
3195 /* Avoid printing the upper bound if it's invalid. */
3196 warning_at (loc, opt,
3197 "%K%qD writing %E or more bytes into a region "
3198 "of size %E overflows the destination",
3199 exp, get_callee_fndecl (exp), range[0], objsize);
3200 }
3201 else
3202 warning_at (loc, opt,
3203 "%K%qD writing between %E and %E bytes into "
3204 "a region of size %E overflows the destination",
3205 exp, get_callee_fndecl (exp), range[0], range[1],
3206 objsize);
3207
3208 /* Return error when an overflow has been detected. */
3209 return false;
3210 }
3211 }
3212
3213 /* Check the maximum length of the source sequence against the size
3214 of the destination object if known, or against the maximum size
3215 of an object. */
3216 if (maxlen)
3217 {
3218 get_size_range (maxlen, range);
3219
3220 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3221 {
3222 location_t loc = tree_nonartificial_location (exp);
3223 loc = expansion_point_location_if_in_system_header (loc);
3224
3225 if (tree_int_cst_lt (maxobjsize, range[0]))
3226 {
3227 /* Warn about crazy big sizes first since that's more
3228 likely to be meaningful than saying that the bound
3229 is greater than the object size if both are big. */
3230 if (range[0] == range[1])
3231 warning_at (loc, opt,
3232 "%K%qD specified bound %E "
3233 "exceeds maximum object size %E",
3234 exp, get_callee_fndecl (exp),
3235 range[0], maxobjsize);
3236 else
3237 warning_at (loc, opt,
3238 "%K%qD specified bound between %E and %E "
3239 "exceeds maximum object size %E",
3240 exp, get_callee_fndecl (exp),
3241 range[0], range[1], maxobjsize);
3242
3243 return false;
3244 }
3245
3246 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3247 {
3248 if (tree_int_cst_equal (range[0], range[1]))
3249 warning_at (loc, opt,
3250 "%K%qD specified bound %E "
3251 "exceeds destination size %E",
3252 exp, get_callee_fndecl (exp),
3253 range[0], objsize);
3254 else
3255 warning_at (loc, opt,
3256 "%K%qD specified bound between %E and %E "
3257 "exceeds destination size %E",
3258 exp, get_callee_fndecl (exp),
3259 range[0], range[1], objsize);
3260 return false;
3261 }
3262 }
3263 }
3264
3265 if (slen
3266 && slen == src
3267 && size && range[0]
3268 && tree_int_cst_lt (slen, range[0]))
3269 {
3270 location_t loc = tree_nonartificial_location (exp);
3271
3272 if (tree_int_cst_equal (range[0], range[1]))
3273 warning_at (loc, opt,
3274 (tree_int_cst_equal (range[0], integer_one_node)
3275 ? G_("%K%qD reading %E byte from a region of size %E")
3276 : G_("%K%qD reading %E bytes from a region of size %E")),
3277 exp, get_callee_fndecl (exp), range[0], slen);
3278 else if (tree_int_cst_sign_bit (range[1]))
3279 {
3280 /* Avoid printing the upper bound if it's invalid. */
3281 warning_at (loc, opt,
3282 "%K%qD reading %E or more bytes from a region "
3283 "of size %E",
3284 exp, get_callee_fndecl (exp), range[0], slen);
3285 }
3286 else
3287 warning_at (loc, opt,
3288 "%K%qD reading between %E and %E bytes from a region "
3289 "of size %E",
3290 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3291 return false;
3292 }
3293
3294 return true;
3295 }
3296
3297 /* Helper to compute the size of the object referenced by the DEST
3298 expression which must of of pointer type, using Object Size type
3299 OSTYPE (only the least significant 2 bits are used). Return
3300 the size of the object if successful or NULL when the size cannot
3301 be determined. */
3302
3303 static inline tree
3304 compute_objsize (tree dest, int ostype)
3305 {
3306 unsigned HOST_WIDE_INT size;
3307 if (compute_builtin_object_size (dest, ostype & 3, &size))
3308 return build_int_cst (sizetype, size);
3309
3310 return NULL_TREE;
3311 }
3312
3313 /* Helper to determine and check the sizes of the source and the destination
3314 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3315 call expression, DEST is the destination argument, SRC is the source
3316 argument or null, and LEN is the number of bytes. Use Object Size type-0
3317 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3318 (no overflow or invalid sizes), false otherwise. */
3319
3320 static bool
3321 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3322 {
3323 if (!warn_stringop_overflow)
3324 return true;
3325
3326 /* For functions like memset and memcpy that operate on raw memory
3327 try to determine the size of the largest source and destination
3328 object using type-0 Object Size regardless of the object size
3329 type specified by the option. */
3330 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3331 tree dstsize = compute_objsize (dest, 0);
3332
3333 return check_sizes (OPT_Wstringop_overflow_, exp,
3334 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3335 }
3336
3337 /* Validate memchr arguments without performing any expansion.
3338 Return NULL_RTX. */
3339
3340 static rtx
3341 expand_builtin_memchr (tree exp, rtx)
3342 {
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3346
3347 tree arg1 = CALL_EXPR_ARG (exp, 0);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3349
3350 /* Diagnose calls where the specified length exceeds the size
3351 of the object. */
3352 if (warn_stringop_overflow)
3353 {
3354 tree size = compute_objsize (arg1, 0);
3355 check_sizes (OPT_Wstringop_overflow_,
3356 exp, len, /*maxlen=*/NULL_TREE,
3357 size, /*objsize=*/NULL_TREE);
3358 }
3359
3360 return NULL_RTX;
3361 }
3362
3363 /* Expand a call EXP to the memcpy builtin.
3364 Return NULL_RTX if we failed, the caller should emit a normal call,
3365 otherwise try to get the result in TARGET, if convenient (and in
3366 mode MODE if that's convenient). */
3367
3368 static rtx
3369 expand_builtin_memcpy (tree exp, rtx target)
3370 {
3371 if (!validate_arglist (exp,
3372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 return NULL_RTX;
3374
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3378
3379 check_memop_sizes (exp, dest, src, len);
3380
3381 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3382 }
3383
3384 /* Check a call EXP to the memmove built-in for validity.
3385 Return NULL_RTX on both success and failure. */
3386
3387 static rtx
3388 expand_builtin_memmove (tree exp, rtx)
3389 {
3390 if (!validate_arglist (exp,
3391 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3392 return NULL_RTX;
3393
3394 tree dest = CALL_EXPR_ARG (exp, 0);
3395 tree src = CALL_EXPR_ARG (exp, 1);
3396 tree len = CALL_EXPR_ARG (exp, 2);
3397
3398 check_memop_sizes (exp, dest, src, len);
3399
3400 return NULL_RTX;
3401 }
3402
3403 /* Expand an instrumented call EXP to the memcpy builtin.
3404 Return NULL_RTX if we failed, the caller should emit a normal call,
3405 otherwise try to get the result in TARGET, if convenient (and in
3406 mode MODE if that's convenient). */
3407
3408 static rtx
3409 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3410 {
3411 if (!validate_arglist (exp,
3412 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3413 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3414 INTEGER_TYPE, VOID_TYPE))
3415 return NULL_RTX;
3416 else
3417 {
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 2);
3420 tree len = CALL_EXPR_ARG (exp, 4);
3421 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3422
3423 /* Return src bounds with the result. */
3424 if (res)
3425 {
3426 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3427 expand_normal (CALL_EXPR_ARG (exp, 1)));
3428 res = chkp_join_splitted_slot (res, bnd);
3429 }
3430 return res;
3431 }
3432 }
3433
3434 /* Expand a call EXP to the mempcpy builtin.
3435 Return NULL_RTX if we failed; the caller should emit a normal call,
3436 otherwise try to get the result in TARGET, if convenient (and in
3437 mode MODE if that's convenient). If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 stpcpy. */
3441
3442 static rtx
3443 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3444 {
3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3448
3449 tree dest = CALL_EXPR_ARG (exp, 0);
3450 tree src = CALL_EXPR_ARG (exp, 1);
3451 tree len = CALL_EXPR_ARG (exp, 2);
3452
3453 /* Avoid expanding mempcpy into memcpy when the call is determined
3454 to overflow the buffer. This also prevents the same overflow
3455 from being diagnosed again when expanding memcpy. */
3456 if (!check_memop_sizes (exp, dest, src, len))
3457 return NULL_RTX;
3458
3459 return expand_builtin_mempcpy_args (dest, src, len,
3460 target, mode, /*endp=*/ 1,
3461 exp);
3462 }
3463
3464 /* Expand an instrumented call EXP to the mempcpy builtin.
3465 Return NULL_RTX if we failed, the caller should emit a normal call,
3466 otherwise try to get the result in TARGET, if convenient (and in
3467 mode MODE if that's convenient). */
3468
3469 static rtx
3470 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3471 {
3472 if (!validate_arglist (exp,
3473 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3474 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3475 INTEGER_TYPE, VOID_TYPE))
3476 return NULL_RTX;
3477 else
3478 {
3479 tree dest = CALL_EXPR_ARG (exp, 0);
3480 tree src = CALL_EXPR_ARG (exp, 2);
3481 tree len = CALL_EXPR_ARG (exp, 4);
3482 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3483 mode, 1, exp);
3484
3485 /* Return src bounds with the result. */
3486 if (res)
3487 {
3488 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3489 expand_normal (CALL_EXPR_ARG (exp, 1)));
3490 res = chkp_join_splitted_slot (res, bnd);
3491 }
3492 return res;
3493 }
3494 }
3495
3496 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3497 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3498 so that this can also be called without constructing an actual CALL_EXPR.
3499 The other arguments and return value are the same as for
3500 expand_builtin_mempcpy. */
3501
3502 static rtx
3503 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3504 rtx target, machine_mode mode, int endp,
3505 tree orig_exp)
3506 {
3507 tree fndecl = get_callee_fndecl (orig_exp);
3508
3509 /* If return value is ignored, transform mempcpy into memcpy. */
3510 if (target == const0_rtx
3511 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3512 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3513 {
3514 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3515 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3516 dest, src, len);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3518 }
3519 else if (target == const0_rtx
3520 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3521 {
3522 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3523 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3524 dest, src, len);
3525 return expand_expr (result, target, mode, EXPAND_NORMAL);
3526 }
3527 else
3528 {
3529 const char *src_str;
3530 unsigned int src_align = get_pointer_alignment (src);
3531 unsigned int dest_align = get_pointer_alignment (dest);
3532 rtx dest_mem, src_mem, len_rtx;
3533
3534 /* If either SRC or DEST is not a pointer type, don't do this
3535 operation in-line. */
3536 if (dest_align == 0 || src_align == 0)
3537 return NULL_RTX;
3538
3539 /* If LEN is not constant, call the normal function. */
3540 if (! tree_fits_uhwi_p (len))
3541 return NULL_RTX;
3542
3543 len_rtx = expand_normal (len);
3544 src_str = c_getstr (src);
3545
3546 /* If SRC is a string constant and block move would be done
3547 by pieces, we can avoid loading the string from memory
3548 and only stored the computed constants. */
3549 if (src_str
3550 && CONST_INT_P (len_rtx)
3551 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3552 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3553 CONST_CAST (char *, src_str),
3554 dest_align, false))
3555 {
3556 dest_mem = get_memory_rtx (dest, len);
3557 set_mem_align (dest_mem, dest_align);
3558 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3559 builtin_memcpy_read_str,
3560 CONST_CAST (char *, src_str),
3561 dest_align, false, endp);
3562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3565 }
3566
3567 if (CONST_INT_P (len_rtx)
3568 && can_move_by_pieces (INTVAL (len_rtx),
3569 MIN (dest_align, src_align)))
3570 {
3571 dest_mem = get_memory_rtx (dest, len);
3572 set_mem_align (dest_mem, dest_align);
3573 src_mem = get_memory_rtx (src, len);
3574 set_mem_align (src_mem, src_align);
3575 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3576 MIN (dest_align, src_align), endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3580 }
3581
3582 return NULL_RTX;
3583 }
3584 }
3585
3586 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3587 we failed, the caller should emit a normal call, otherwise try to
3588 get the result in TARGET, if convenient. If ENDP is 0 return the
3589 destination pointer, if ENDP is 1 return the end pointer ala
3590 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3591 stpcpy. */
3592
3593 static rtx
3594 expand_movstr (tree dest, tree src, rtx target, int endp)
3595 {
3596 struct expand_operand ops[3];
3597 rtx dest_mem;
3598 rtx src_mem;
3599
3600 if (!targetm.have_movstr ())
3601 return NULL_RTX;
3602
3603 dest_mem = get_memory_rtx (dest, NULL);
3604 src_mem = get_memory_rtx (src, NULL);
3605 if (!endp)
3606 {
3607 target = force_reg (Pmode, XEXP (dest_mem, 0));
3608 dest_mem = replace_equiv_address (dest_mem, target);
3609 }
3610
3611 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3612 create_fixed_operand (&ops[1], dest_mem);
3613 create_fixed_operand (&ops[2], src_mem);
3614 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3615 return NULL_RTX;
3616
3617 if (endp && target != const0_rtx)
3618 {
3619 target = ops[0].value;
3620 /* movstr is supposed to set end to the address of the NUL
3621 terminator. If the caller requested a mempcpy-like return value,
3622 adjust it. */
3623 if (endp == 1)
3624 {
3625 rtx tem = plus_constant (GET_MODE (target),
3626 gen_lowpart (GET_MODE (target), target), 1);
3627 emit_move_insn (target, force_operand (tem, NULL_RTX));
3628 }
3629 }
3630 return target;
3631 }
3632
3633 /* Do some very basic size validation of a call to the strcpy builtin
3634 given by EXP. Return NULL_RTX to have the built-in expand to a call
3635 to the library function. */
3636
3637 static rtx
3638 expand_builtin_strcat (tree exp, rtx)
3639 {
3640 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3641 || !warn_stringop_overflow)
3642 return NULL_RTX;
3643
3644 tree dest = CALL_EXPR_ARG (exp, 0);
3645 tree src = CALL_EXPR_ARG (exp, 1);
3646
3647 /* There is no way here to determine the length of the string in
3648 the destination to which the SRC string is being appended so
3649 just diagnose cases when the souce string is longer than
3650 the destination object. */
3651
3652 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3653
3654 check_sizes (OPT_Wstringop_overflow_,
3655 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3656
3657 return NULL_RTX;
3658 }
3659
3660 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3661 NULL_RTX if we failed the caller should emit a normal call, otherwise
3662 try to get the result in TARGET, if convenient (and in mode MODE if that's
3663 convenient). */
3664
3665 static rtx
3666 expand_builtin_strcpy (tree exp, rtx target)
3667 {
3668 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3669 return NULL_RTX;
3670
3671 tree dest = CALL_EXPR_ARG (exp, 0);
3672 tree src = CALL_EXPR_ARG (exp, 1);
3673
3674 if (warn_stringop_overflow)
3675 {
3676 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3677 check_sizes (OPT_Wstringop_overflow_,
3678 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3679 }
3680
3681 return expand_builtin_strcpy_args (dest, src, target);
3682 }
3683
3684 /* Helper function to do the actual work for expand_builtin_strcpy. The
3685 arguments to the builtin_strcpy call DEST and SRC are broken out
3686 so that this can also be called without constructing an actual CALL_EXPR.
3687 The other arguments and return value are the same as for
3688 expand_builtin_strcpy. */
3689
3690 static rtx
3691 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3692 {
3693 return expand_movstr (dest, src, target, /*endp=*/0);
3694 }
3695
3696 /* Expand a call EXP to the stpcpy builtin.
3697 Return NULL_RTX if we failed the caller should emit a normal call,
3698 otherwise try to get the result in TARGET, if convenient (and in
3699 mode MODE if that's convenient). */
3700
3701 static rtx
3702 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3703 {
3704 tree dst, src;
3705 location_t loc = EXPR_LOCATION (exp);
3706
3707 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3708 return NULL_RTX;
3709
3710 dst = CALL_EXPR_ARG (exp, 0);
3711 src = CALL_EXPR_ARG (exp, 1);
3712
3713 if (warn_stringop_overflow)
3714 {
3715 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3716 check_sizes (OPT_Wstringop_overflow_,
3717 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3718 }
3719
3720 /* If return value is ignored, transform stpcpy into strcpy. */
3721 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3722 {
3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3724 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3725 return expand_expr (result, target, mode, EXPAND_NORMAL);
3726 }
3727 else
3728 {
3729 tree len, lenp1;
3730 rtx ret;
3731
3732 /* Ensure we get an actual string whose length can be evaluated at
3733 compile-time, not an expression containing a string. This is
3734 because the latter will potentially produce pessimized code
3735 when used to produce the return value. */
3736 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3737 return expand_movstr (dst, src, target, /*endp=*/2);
3738
3739 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3740 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3741 target, mode, /*endp=*/2,
3742 exp);
3743
3744 if (ret)
3745 return ret;
3746
3747 if (TREE_CODE (len) == INTEGER_CST)
3748 {
3749 rtx len_rtx = expand_normal (len);
3750
3751 if (CONST_INT_P (len_rtx))
3752 {
3753 ret = expand_builtin_strcpy_args (dst, src, target);
3754
3755 if (ret)
3756 {
3757 if (! target)
3758 {
3759 if (mode != VOIDmode)
3760 target = gen_reg_rtx (mode);
3761 else
3762 target = gen_reg_rtx (GET_MODE (ret));
3763 }
3764 if (GET_MODE (target) != GET_MODE (ret))
3765 ret = gen_lowpart (GET_MODE (target), ret);
3766
3767 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3768 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3769 gcc_assert (ret);
3770
3771 return target;
3772 }
3773 }
3774 }
3775
3776 return expand_movstr (dst, src, target, /*endp=*/2);
3777 }
3778 }
3779
3780 /* Check a call EXP to the stpncpy built-in for validity.
3781 Return NULL_RTX on both success and failure. */
3782
3783 static rtx
3784 expand_builtin_stpncpy (tree exp, rtx)
3785 {
3786 if (!validate_arglist (exp,
3787 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3788 || !warn_stringop_overflow)
3789 return NULL_RTX;
3790
3791 /* The source and destination of the call. */
3792 tree dest = CALL_EXPR_ARG (exp, 0);
3793 tree src = CALL_EXPR_ARG (exp, 1);
3794
3795 /* The exact number of bytes to write (not the maximum). */
3796 tree len = CALL_EXPR_ARG (exp, 2);
3797
3798 /* The size of the destination object. */
3799 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3800
3801 check_sizes (OPT_Wstringop_overflow_,
3802 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3803
3804 return NULL_RTX;
3805 }
3806
3807 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3808 bytes from constant string DATA + OFFSET and return it as target
3809 constant. */
3810
3811 rtx
3812 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3813 machine_mode mode)
3814 {
3815 const char *str = (const char *) data;
3816
3817 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3818 return const0_rtx;
3819
3820 return c_readstr (str + offset, mode);
3821 }
3822
3823 /* Helper to check the sizes of sequences and the destination of calls
3824 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3825 success (no overflow or invalid sizes), false otherwise. */
3826
3827 static bool
3828 check_strncat_sizes (tree exp, tree objsize)
3829 {
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree src = CALL_EXPR_ARG (exp, 1);
3832 tree maxlen = CALL_EXPR_ARG (exp, 2);
3833
3834 /* Try to determine the range of lengths that the source expression
3835 refers to. */
3836 tree lenrange[2];
3837 get_range_strlen (src, lenrange);
3838
3839 /* Try to verify that the destination is big enough for the shortest
3840 string. */
3841
3842 if (!objsize && warn_stringop_overflow)
3843 {
3844 /* If it hasn't been provided by __strncat_chk, try to determine
3845 the size of the destination object into which the source is
3846 being copied. */
3847 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3848 }
3849
3850 /* Add one for the terminating nul. */
3851 tree srclen = (lenrange[0]
3852 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3853 size_one_node)
3854 : NULL_TREE);
3855
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3860 && tree_int_cst_equal (objsize, maxlen))
3861 {
3862 location_t loc = tree_nonartificial_location (exp);
3863 loc = expansion_point_location_if_in_system_header (loc);
3864
3865 warning_at (loc, OPT_Wstringop_overflow_,
3866 "%K%qD specified bound %E equals destination size",
3867 exp, get_callee_fndecl (exp), maxlen);
3868
3869 return false;
3870 }
3871
3872 if (!srclen
3873 || (maxlen && tree_fits_uhwi_p (maxlen)
3874 && tree_fits_uhwi_p (srclen)
3875 && tree_int_cst_lt (maxlen, srclen)))
3876 srclen = maxlen;
3877
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 return check_sizes (OPT_Wstringop_overflow_,
3881 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3882 }
3883
3884 /* Similar to expand_builtin_strcat, do some very basic size validation
3885 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3886 the built-in expand to a call to the library function. */
3887
3888 static rtx
3889 expand_builtin_strncat (tree exp, rtx)
3890 {
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3893 || !warn_stringop_overflow)
3894 return NULL_RTX;
3895
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree src = CALL_EXPR_ARG (exp, 1);
3898 /* The upper bound on the number of bytes to write. */
3899 tree maxlen = CALL_EXPR_ARG (exp, 2);
3900 /* The length of the source sequence. */
3901 tree slen = c_strlen (src, 1);
3902
3903 /* Try to determine the range of lengths that the source expression
3904 refers to. */
3905 tree lenrange[2];
3906 if (slen)
3907 lenrange[0] = lenrange[1] = slen;
3908 else
3909 get_range_strlen (src, lenrange);
3910
3911 /* Try to verify that the destination is big enough for the shortest
3912 string. First try to determine the size of the destination object
3913 into which the source is being copied. */
3914 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3915
3916 /* Add one for the terminating nul. */
3917 tree srclen = (lenrange[0]
3918 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3919 size_one_node)
3920 : NULL_TREE);
3921
3922 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3923 nul so the specified upper bound should never be equal to (or greater
3924 than) the size of the destination. */
3925 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3926 && tree_int_cst_equal (destsize, maxlen))
3927 {
3928 location_t loc = tree_nonartificial_location (exp);
3929 loc = expansion_point_location_if_in_system_header (loc);
3930
3931 warning_at (loc, OPT_Wstringop_overflow_,
3932 "%K%qD specified bound %E equals destination size",
3933 exp, get_callee_fndecl (exp), maxlen);
3934
3935 return NULL_RTX;
3936 }
3937
3938 if (!srclen
3939 || (maxlen && tree_fits_uhwi_p (maxlen)
3940 && tree_fits_uhwi_p (srclen)
3941 && tree_int_cst_lt (maxlen, srclen)))
3942 srclen = maxlen;
3943
3944 /* The number of bytes to write is LEN but check_sizes will also
3945 check SRCLEN if LEN's value isn't known. */
3946 check_sizes (OPT_Wstringop_overflow_,
3947 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3948
3949 return NULL_RTX;
3950 }
3951
3952 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3953 NULL_RTX if we failed the caller should emit a normal call. */
3954
3955 static rtx
3956 expand_builtin_strncpy (tree exp, rtx target)
3957 {
3958 location_t loc = EXPR_LOCATION (exp);
3959
3960 if (validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3962 {
3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
3965 /* The number of bytes to write (not the maximum). */
3966 tree len = CALL_EXPR_ARG (exp, 2);
3967 /* The length of the source sequence. */
3968 tree slen = c_strlen (src, 1);
3969
3970 if (warn_stringop_overflow)
3971 {
3972 tree destsize = compute_objsize (dest,
3973 warn_stringop_overflow - 1);
3974
3975 /* The number of bytes to write is LEN but check_sizes will also
3976 check SLEN if LEN's value isn't known. */
3977 check_sizes (OPT_Wstringop_overflow_,
3978 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3979 }
3980
3981 /* We must be passed a constant len and src parameter. */
3982 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3983 return NULL_RTX;
3984
3985 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3986
3987 /* We're required to pad with trailing zeros if the requested
3988 len is greater than strlen(s2)+1. In that case try to
3989 use store_by_pieces, if it fails, punt. */
3990 if (tree_int_cst_lt (slen, len))
3991 {
3992 unsigned int dest_align = get_pointer_alignment (dest);
3993 const char *p = c_getstr (src);
3994 rtx dest_mem;
3995
3996 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3997 || !can_store_by_pieces (tree_to_uhwi (len),
3998 builtin_strncpy_read_str,
3999 CONST_CAST (char *, p),
4000 dest_align, false))
4001 return NULL_RTX;
4002
4003 dest_mem = get_memory_rtx (dest, len);
4004 store_by_pieces (dest_mem, tree_to_uhwi (len),
4005 builtin_strncpy_read_str,
4006 CONST_CAST (char *, p), dest_align, false, 0);
4007 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4008 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4009 return dest_mem;
4010 }
4011 }
4012 return NULL_RTX;
4013 }
4014
4015 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4016 bytes from constant string DATA + OFFSET and return it as target
4017 constant. */
4018
4019 rtx
4020 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4021 machine_mode mode)
4022 {
4023 const char *c = (const char *) data;
4024 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4025
4026 memset (p, *c, GET_MODE_SIZE (mode));
4027
4028 return c_readstr (p, mode);
4029 }
4030
4031 /* Callback routine for store_by_pieces. Return the RTL of a register
4032 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4033 char value given in the RTL register data. For example, if mode is
4034 4 bytes wide, return the RTL for 0x01010101*data. */
4035
4036 static rtx
4037 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4038 machine_mode mode)
4039 {
4040 rtx target, coeff;
4041 size_t size;
4042 char *p;
4043
4044 size = GET_MODE_SIZE (mode);
4045 if (size == 1)
4046 return (rtx) data;
4047
4048 p = XALLOCAVEC (char, size);
4049 memset (p, 1, size);
4050 coeff = c_readstr (p, mode);
4051
4052 target = convert_to_mode (mode, (rtx) data, 1);
4053 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4054 return force_reg (mode, target);
4055 }
4056
4057 /* Expand expression EXP, which is a call to the memset builtin. Return
4058 NULL_RTX if we failed the caller should emit a normal call, otherwise
4059 try to get the result in TARGET, if convenient (and in mode MODE if that's
4060 convenient). */
4061
4062 static rtx
4063 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4064 {
4065 if (!validate_arglist (exp,
4066 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4067 return NULL_RTX;
4068
4069 tree dest = CALL_EXPR_ARG (exp, 0);
4070 tree val = CALL_EXPR_ARG (exp, 1);
4071 tree len = CALL_EXPR_ARG (exp, 2);
4072
4073 check_memop_sizes (exp, dest, NULL_TREE, len);
4074
4075 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4076 }
4077
4078 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4079 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4080 try to get the result in TARGET, if convenient (and in mode MODE if that's
4081 convenient). */
4082
4083 static rtx
4084 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4085 {
4086 if (!validate_arglist (exp,
4087 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4088 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4089 return NULL_RTX;
4090 else
4091 {
4092 tree dest = CALL_EXPR_ARG (exp, 0);
4093 tree val = CALL_EXPR_ARG (exp, 2);
4094 tree len = CALL_EXPR_ARG (exp, 3);
4095 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4096
4097 /* Return src bounds with the result. */
4098 if (res)
4099 {
4100 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4101 expand_normal (CALL_EXPR_ARG (exp, 1)));
4102 res = chkp_join_splitted_slot (res, bnd);
4103 }
4104 return res;
4105 }
4106 }
4107
4108 /* Helper function to do the actual work for expand_builtin_memset. The
4109 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4110 so that this can also be called without constructing an actual CALL_EXPR.
4111 The other arguments and return value are the same as for
4112 expand_builtin_memset. */
4113
4114 static rtx
4115 expand_builtin_memset_args (tree dest, tree val, tree len,
4116 rtx target, machine_mode mode, tree orig_exp)
4117 {
4118 tree fndecl, fn;
4119 enum built_in_function fcode;
4120 machine_mode val_mode;
4121 char c;
4122 unsigned int dest_align;
4123 rtx dest_mem, dest_addr, len_rtx;
4124 HOST_WIDE_INT expected_size = -1;
4125 unsigned int expected_align = 0;
4126 unsigned HOST_WIDE_INT min_size;
4127 unsigned HOST_WIDE_INT max_size;
4128 unsigned HOST_WIDE_INT probable_max_size;
4129
4130 dest_align = get_pointer_alignment (dest);
4131
4132 /* If DEST is not a pointer type, don't do this operation in-line. */
4133 if (dest_align == 0)
4134 return NULL_RTX;
4135
4136 if (currently_expanding_gimple_stmt)
4137 stringop_block_profile (currently_expanding_gimple_stmt,
4138 &expected_align, &expected_size);
4139
4140 if (expected_align < dest_align)
4141 expected_align = dest_align;
4142
4143 /* If the LEN parameter is zero, return DEST. */
4144 if (integer_zerop (len))
4145 {
4146 /* Evaluate and ignore VAL in case it has side-effects. */
4147 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4148 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4149 }
4150
4151 /* Stabilize the arguments in case we fail. */
4152 dest = builtin_save_expr (dest);
4153 val = builtin_save_expr (val);
4154 len = builtin_save_expr (len);
4155
4156 len_rtx = expand_normal (len);
4157 determine_block_size (len, len_rtx, &min_size, &max_size,
4158 &probable_max_size);
4159 dest_mem = get_memory_rtx (dest, len);
4160 val_mode = TYPE_MODE (unsigned_char_type_node);
4161
4162 if (TREE_CODE (val) != INTEGER_CST)
4163 {
4164 rtx val_rtx;
4165
4166 val_rtx = expand_normal (val);
4167 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4168
4169 /* Assume that we can memset by pieces if we can store
4170 * the coefficients by pieces (in the required modes).
4171 * We can't pass builtin_memset_gen_str as that emits RTL. */
4172 c = 1;
4173 if (tree_fits_uhwi_p (len)
4174 && can_store_by_pieces (tree_to_uhwi (len),
4175 builtin_memset_read_str, &c, dest_align,
4176 true))
4177 {
4178 val_rtx = force_reg (val_mode, val_rtx);
4179 store_by_pieces (dest_mem, tree_to_uhwi (len),
4180 builtin_memset_gen_str, val_rtx, dest_align,
4181 true, 0);
4182 }
4183 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4184 dest_align, expected_align,
4185 expected_size, min_size, max_size,
4186 probable_max_size))
4187 goto do_libcall;
4188
4189 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4191 return dest_mem;
4192 }
4193
4194 if (target_char_cast (val, &c))
4195 goto do_libcall;
4196
4197 if (c)
4198 {
4199 if (tree_fits_uhwi_p (len)
4200 && can_store_by_pieces (tree_to_uhwi (len),
4201 builtin_memset_read_str, &c, dest_align,
4202 true))
4203 store_by_pieces (dest_mem, tree_to_uhwi (len),
4204 builtin_memset_read_str, &c, dest_align, true, 0);
4205 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4206 gen_int_mode (c, val_mode),
4207 dest_align, expected_align,
4208 expected_size, min_size, max_size,
4209 probable_max_size))
4210 goto do_libcall;
4211
4212 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4213 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4214 return dest_mem;
4215 }
4216
4217 set_mem_align (dest_mem, dest_align);
4218 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4219 CALL_EXPR_TAILCALL (orig_exp)
4220 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4221 expected_align, expected_size,
4222 min_size, max_size,
4223 probable_max_size);
4224
4225 if (dest_addr == 0)
4226 {
4227 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4228 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4229 }
4230
4231 return dest_addr;
4232
4233 do_libcall:
4234 fndecl = get_callee_fndecl (orig_exp);
4235 fcode = DECL_FUNCTION_CODE (fndecl);
4236 if (fcode == BUILT_IN_MEMSET
4237 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4238 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4239 dest, val, len);
4240 else if (fcode == BUILT_IN_BZERO)
4241 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4242 dest, len);
4243 else
4244 gcc_unreachable ();
4245 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4246 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4247 return expand_call (fn, target, target == const0_rtx);
4248 }
4249
4250 /* Expand expression EXP, which is a call to the bzero builtin. Return
4251 NULL_RTX if we failed the caller should emit a normal call. */
4252
4253 static rtx
4254 expand_builtin_bzero (tree exp)
4255 {
4256 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4258
4259 tree dest = CALL_EXPR_ARG (exp, 0);
4260 tree size = CALL_EXPR_ARG (exp, 1);
4261
4262 check_memop_sizes (exp, dest, NULL_TREE, size);
4263
4264 /* New argument list transforming bzero(ptr x, int y) to
4265 memset(ptr x, int 0, size_t y). This is done this way
4266 so that if it isn't expanded inline, we fallback to
4267 calling bzero instead of memset. */
4268
4269 location_t loc = EXPR_LOCATION (exp);
4270
4271 return expand_builtin_memset_args (dest, integer_zero_node,
4272 fold_convert_loc (loc,
4273 size_type_node, size),
4274 const0_rtx, VOIDmode, exp);
4275 }
4276
4277 /* Try to expand cmpstr operation ICODE with the given operands.
4278 Return the result rtx on success, otherwise return null. */
4279
4280 static rtx
4281 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4282 HOST_WIDE_INT align)
4283 {
4284 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4285
4286 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4287 target = NULL_RTX;
4288
4289 struct expand_operand ops[4];
4290 create_output_operand (&ops[0], target, insn_mode);
4291 create_fixed_operand (&ops[1], arg1_rtx);
4292 create_fixed_operand (&ops[2], arg2_rtx);
4293 create_integer_operand (&ops[3], align);
4294 if (maybe_expand_insn (icode, 4, ops))
4295 return ops[0].value;
4296 return NULL_RTX;
4297 }
4298
4299 /* Expand expression EXP, which is a call to the memcmp built-in function.
4300 Return NULL_RTX if we failed and the caller should emit a normal call,
4301 otherwise try to get the result in TARGET, if convenient.
4302 RESULT_EQ is true if we can relax the returned value to be either zero
4303 or nonzero, without caring about the sign. */
4304
4305 static rtx
4306 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4307 {
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4310 return NULL_RTX;
4311
4312 tree arg1 = CALL_EXPR_ARG (exp, 0);
4313 tree arg2 = CALL_EXPR_ARG (exp, 1);
4314 tree len = CALL_EXPR_ARG (exp, 2);
4315
4316 /* Diagnose calls where the specified length exceeds the size of either
4317 object. */
4318 if (warn_stringop_overflow)
4319 {
4320 tree size = compute_objsize (arg1, 0);
4321 if (check_sizes (OPT_Wstringop_overflow_,
4322 exp, len, /*maxlen=*/NULL_TREE,
4323 size, /*objsize=*/NULL_TREE))
4324 {
4325 size = compute_objsize (arg2, 0);
4326 check_sizes (OPT_Wstringop_overflow_,
4327 exp, len, /*maxlen=*/NULL_TREE,
4328 size, /*objsize=*/NULL_TREE);
4329 }
4330 }
4331
4332 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4333 location_t loc = EXPR_LOCATION (exp);
4334
4335 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4336 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4337
4338 /* If we don't have POINTER_TYPE, call the function. */
4339 if (arg1_align == 0 || arg2_align == 0)
4340 return NULL_RTX;
4341
4342 rtx arg1_rtx = get_memory_rtx (arg1, len);
4343 rtx arg2_rtx = get_memory_rtx (arg2, len);
4344 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4345
4346 /* Set MEM_SIZE as appropriate. */
4347 if (CONST_INT_P (len_rtx))
4348 {
4349 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4350 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4351 }
4352
4353 by_pieces_constfn constfn = NULL;
4354
4355 const char *src_str = c_getstr (arg2);
4356 if (result_eq && src_str == NULL)
4357 {
4358 src_str = c_getstr (arg1);
4359 if (src_str != NULL)
4360 std::swap (arg1_rtx, arg2_rtx);
4361 }
4362
4363 /* If SRC is a string constant and block move would be done
4364 by pieces, we can avoid loading the string from memory
4365 and only stored the computed constants. */
4366 if (src_str
4367 && CONST_INT_P (len_rtx)
4368 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4369 constfn = builtin_memcpy_read_str;
4370
4371 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4372 TREE_TYPE (len), target,
4373 result_eq, constfn,
4374 CONST_CAST (char *, src_str));
4375
4376 if (result)
4377 {
4378 /* Return the value in the proper mode for this function. */
4379 if (GET_MODE (result) == mode)
4380 return result;
4381
4382 if (target != 0)
4383 {
4384 convert_move (target, result, 0);
4385 return target;
4386 }
4387
4388 return convert_to_mode (mode, result, 0);
4389 }
4390
4391 return NULL_RTX;
4392 }
4393
4394 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4395 if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
4397
4398 static rtx
4399 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4400 {
4401 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4402 return NULL_RTX;
4403
4404 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4405 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4406 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4407 {
4408 rtx arg1_rtx, arg2_rtx;
4409 tree fndecl, fn;
4410 tree arg1 = CALL_EXPR_ARG (exp, 0);
4411 tree arg2 = CALL_EXPR_ARG (exp, 1);
4412 rtx result = NULL_RTX;
4413
4414 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4415 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4416
4417 /* If we don't have POINTER_TYPE, call the function. */
4418 if (arg1_align == 0 || arg2_align == 0)
4419 return NULL_RTX;
4420
4421 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4422 arg1 = builtin_save_expr (arg1);
4423 arg2 = builtin_save_expr (arg2);
4424
4425 arg1_rtx = get_memory_rtx (arg1, NULL);
4426 arg2_rtx = get_memory_rtx (arg2, NULL);
4427
4428 /* Try to call cmpstrsi. */
4429 if (cmpstr_icode != CODE_FOR_nothing)
4430 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4431 MIN (arg1_align, arg2_align));
4432
4433 /* Try to determine at least one length and call cmpstrnsi. */
4434 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4435 {
4436 tree len;
4437 rtx arg3_rtx;
4438
4439 tree len1 = c_strlen (arg1, 1);
4440 tree len2 = c_strlen (arg2, 1);
4441
4442 if (len1)
4443 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4444 if (len2)
4445 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4446
4447 /* If we don't have a constant length for the first, use the length
4448 of the second, if we know it. We don't require a constant for
4449 this case; some cost analysis could be done if both are available
4450 but neither is constant. For now, assume they're equally cheap,
4451 unless one has side effects. If both strings have constant lengths,
4452 use the smaller. */
4453
4454 if (!len1)
4455 len = len2;
4456 else if (!len2)
4457 len = len1;
4458 else if (TREE_SIDE_EFFECTS (len1))
4459 len = len2;
4460 else if (TREE_SIDE_EFFECTS (len2))
4461 len = len1;
4462 else if (TREE_CODE (len1) != INTEGER_CST)
4463 len = len2;
4464 else if (TREE_CODE (len2) != INTEGER_CST)
4465 len = len1;
4466 else if (tree_int_cst_lt (len1, len2))
4467 len = len1;
4468 else
4469 len = len2;
4470
4471 /* If both arguments have side effects, we cannot optimize. */
4472 if (len && !TREE_SIDE_EFFECTS (len))
4473 {
4474 arg3_rtx = expand_normal (len);
4475 result = expand_cmpstrn_or_cmpmem
4476 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4477 arg3_rtx, MIN (arg1_align, arg2_align));
4478 }
4479 }
4480
4481 if (result)
4482 {
4483 /* Return the value in the proper mode for this function. */
4484 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4485 if (GET_MODE (result) == mode)
4486 return result;
4487 if (target == 0)
4488 return convert_to_mode (mode, result, 0);
4489 convert_move (target, result, 0);
4490 return target;
4491 }
4492
4493 /* Expand the library call ourselves using a stabilized argument
4494 list to avoid re-evaluating the function's arguments twice. */
4495 fndecl = get_callee_fndecl (exp);
4496 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4497 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4498 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4499 return expand_call (fn, target, target == const0_rtx);
4500 }
4501 return NULL_RTX;
4502 }
4503
4504 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4505 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4506 the result in TARGET, if convenient. */
4507
4508 static rtx
4509 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4510 ATTRIBUTE_UNUSED machine_mode mode)
4511 {
4512 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4513
4514 if (!validate_arglist (exp,
4515 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4516 return NULL_RTX;
4517
4518 /* If c_strlen can determine an expression for one of the string
4519 lengths, and it doesn't have side effects, then emit cmpstrnsi
4520 using length MIN(strlen(string)+1, arg3). */
4521 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4522 if (cmpstrn_icode != CODE_FOR_nothing)
4523 {
4524 tree len, len1, len2, len3;
4525 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4526 rtx result;
4527 tree fndecl, fn;
4528 tree arg1 = CALL_EXPR_ARG (exp, 0);
4529 tree arg2 = CALL_EXPR_ARG (exp, 1);
4530 tree arg3 = CALL_EXPR_ARG (exp, 2);
4531
4532 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4533 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4534
4535 len1 = c_strlen (arg1, 1);
4536 len2 = c_strlen (arg2, 1);
4537
4538 if (len1)
4539 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4540 if (len2)
4541 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4542
4543 len3 = fold_convert_loc (loc, sizetype, arg3);
4544
4545 /* If we don't have a constant length for the first, use the length
4546 of the second, if we know it. If neither string is constant length,
4547 use the given length argument. We don't require a constant for
4548 this case; some cost analysis could be done if both are available
4549 but neither is constant. For now, assume they're equally cheap,
4550 unless one has side effects. If both strings have constant lengths,
4551 use the smaller. */
4552
4553 if (!len1 && !len2)
4554 len = len3;
4555 else if (!len1)
4556 len = len2;
4557 else if (!len2)
4558 len = len1;
4559 else if (TREE_SIDE_EFFECTS (len1))
4560 len = len2;
4561 else if (TREE_SIDE_EFFECTS (len2))
4562 len = len1;
4563 else if (TREE_CODE (len1) != INTEGER_CST)
4564 len = len2;
4565 else if (TREE_CODE (len2) != INTEGER_CST)
4566 len = len1;
4567 else if (tree_int_cst_lt (len1, len2))
4568 len = len1;
4569 else
4570 len = len2;
4571
4572 /* If we are not using the given length, we must incorporate it here.
4573 The actual new length parameter will be MIN(len,arg3) in this case. */
4574 if (len != len3)
4575 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4576 arg1_rtx = get_memory_rtx (arg1, len);
4577 arg2_rtx = get_memory_rtx (arg2, len);
4578 arg3_rtx = expand_normal (len);
4579 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4580 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4581 MIN (arg1_align, arg2_align));
4582 if (result)
4583 {
4584 /* Return the value in the proper mode for this function. */
4585 mode = TYPE_MODE (TREE_TYPE (exp));
4586 if (GET_MODE (result) == mode)
4587 return result;
4588 if (target == 0)
4589 return convert_to_mode (mode, result, 0);
4590 convert_move (target, result, 0);
4591 return target;
4592 }
4593
4594 /* Expand the library call ourselves using a stabilized argument
4595 list to avoid re-evaluating the function's arguments twice. */
4596 fndecl = get_callee_fndecl (exp);
4597 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4598 arg1, arg2, len);
4599 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4600 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4601 return expand_call (fn, target, target == const0_rtx);
4602 }
4603 return NULL_RTX;
4604 }
4605
4606 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4607 if that's convenient. */
4608
4609 rtx
4610 expand_builtin_saveregs (void)
4611 {
4612 rtx val;
4613 rtx_insn *seq;
4614
4615 /* Don't do __builtin_saveregs more than once in a function.
4616 Save the result of the first call and reuse it. */
4617 if (saveregs_value != 0)
4618 return saveregs_value;
4619
4620 /* When this function is called, it means that registers must be
4621 saved on entry to this function. So we migrate the call to the
4622 first insn of this function. */
4623
4624 start_sequence ();
4625
4626 /* Do whatever the machine needs done in this case. */
4627 val = targetm.calls.expand_builtin_saveregs ();
4628
4629 seq = get_insns ();
4630 end_sequence ();
4631
4632 saveregs_value = val;
4633
4634 /* Put the insns after the NOTE that starts the function. If this
4635 is inside a start_sequence, make the outer-level insn chain current, so
4636 the code is placed at the start of the function. */
4637 push_topmost_sequence ();
4638 emit_insn_after (seq, entry_of_function ());
4639 pop_topmost_sequence ();
4640
4641 return val;
4642 }
4643
4644 /* Expand a call to __builtin_next_arg. */
4645
4646 static rtx
4647 expand_builtin_next_arg (void)
4648 {
4649 /* Checking arguments is already done in fold_builtin_next_arg
4650 that must be called before this function. */
4651 return expand_binop (ptr_mode, add_optab,
4652 crtl->args.internal_arg_pointer,
4653 crtl->args.arg_offset_rtx,
4654 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4655 }
4656
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4659
4660 static tree
4661 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4662 {
4663 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4664
4665 /* The current way of determining the type of valist is completely
4666 bogus. We should have the information on the va builtin instead. */
4667 if (!vatype)
4668 vatype = targetm.fn_abi_va_list (cfun->decl);
4669
4670 if (TREE_CODE (vatype) == ARRAY_TYPE)
4671 {
4672 if (TREE_SIDE_EFFECTS (valist))
4673 valist = save_expr (valist);
4674
4675 /* For this case, the backends will be expecting a pointer to
4676 vatype, but it's possible we've actually been given an array
4677 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4678 So fix it. */
4679 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4680 {
4681 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4682 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4683 }
4684 }
4685 else
4686 {
4687 tree pt = build_pointer_type (vatype);
4688
4689 if (! needs_lvalue)
4690 {
4691 if (! TREE_SIDE_EFFECTS (valist))
4692 return valist;
4693
4694 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4695 TREE_SIDE_EFFECTS (valist) = 1;
4696 }
4697
4698 if (TREE_SIDE_EFFECTS (valist))
4699 valist = save_expr (valist);
4700 valist = fold_build2_loc (loc, MEM_REF,
4701 vatype, valist, build_int_cst (pt, 0));
4702 }
4703
4704 return valist;
4705 }
4706
4707 /* The "standard" definition of va_list is void*. */
4708
4709 tree
4710 std_build_builtin_va_list (void)
4711 {
4712 return ptr_type_node;
4713 }
4714
4715 /* The "standard" abi va_list is va_list_type_node. */
4716
4717 tree
4718 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4719 {
4720 return va_list_type_node;
4721 }
4722
4723 /* The "standard" type of va_list is va_list_type_node. */
4724
4725 tree
4726 std_canonical_va_list_type (tree type)
4727 {
4728 tree wtype, htype;
4729
4730 wtype = va_list_type_node;
4731 htype = type;
4732
4733 if (TREE_CODE (wtype) == ARRAY_TYPE)
4734 {
4735 /* If va_list is an array type, the argument may have decayed
4736 to a pointer type, e.g. by being passed to another function.
4737 In that case, unwrap both types so that we can compare the
4738 underlying records. */
4739 if (TREE_CODE (htype) == ARRAY_TYPE
4740 || POINTER_TYPE_P (htype))
4741 {
4742 wtype = TREE_TYPE (wtype);
4743 htype = TREE_TYPE (htype);
4744 }
4745 }
4746 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4747 return va_list_type_node;
4748
4749 return NULL_TREE;
4750 }
4751
4752 /* The "standard" implementation of va_start: just assign `nextarg' to
4753 the variable. */
4754
4755 void
4756 std_expand_builtin_va_start (tree valist, rtx nextarg)
4757 {
4758 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4759 convert_move (va_r, nextarg, 0);
4760
4761 /* We do not have any valid bounds for the pointer, so
4762 just store zero bounds for it. */
4763 if (chkp_function_instrumented_p (current_function_decl))
4764 chkp_expand_bounds_reset_for_mem (valist,
4765 make_tree (TREE_TYPE (valist),
4766 nextarg));
4767 }
4768
4769 /* Expand EXP, a call to __builtin_va_start. */
4770
4771 static rtx
4772 expand_builtin_va_start (tree exp)
4773 {
4774 rtx nextarg;
4775 tree valist;
4776 location_t loc = EXPR_LOCATION (exp);
4777
4778 if (call_expr_nargs (exp) < 2)
4779 {
4780 error_at (loc, "too few arguments to function %<va_start%>");
4781 return const0_rtx;
4782 }
4783
4784 if (fold_builtin_next_arg (exp, true))
4785 return const0_rtx;
4786
4787 nextarg = expand_builtin_next_arg ();
4788 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4789
4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4792 else
4793 std_expand_builtin_va_start (valist, nextarg);
4794
4795 return const0_rtx;
4796 }
4797
4798 /* Expand EXP, a call to __builtin_va_end. */
4799
4800 static rtx
4801 expand_builtin_va_end (tree exp)
4802 {
4803 tree valist = CALL_EXPR_ARG (exp, 0);
4804
4805 /* Evaluate for side effects, if needed. I hate macros that don't
4806 do that. */
4807 if (TREE_SIDE_EFFECTS (valist))
4808 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4809
4810 return const0_rtx;
4811 }
4812
4813 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4814 builtin rather than just as an assignment in stdarg.h because of the
4815 nastiness of array-type va_list types. */
4816
4817 static rtx
4818 expand_builtin_va_copy (tree exp)
4819 {
4820 tree dst, src, t;
4821 location_t loc = EXPR_LOCATION (exp);
4822
4823 dst = CALL_EXPR_ARG (exp, 0);
4824 src = CALL_EXPR_ARG (exp, 1);
4825
4826 dst = stabilize_va_list_loc (loc, dst, 1);
4827 src = stabilize_va_list_loc (loc, src, 0);
4828
4829 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4830
4831 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4832 {
4833 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4834 TREE_SIDE_EFFECTS (t) = 1;
4835 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4836 }
4837 else
4838 {
4839 rtx dstb, srcb, size;
4840
4841 /* Evaluate to pointers. */
4842 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4845 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4846
4847 dstb = convert_memory_address (Pmode, dstb);
4848 srcb = convert_memory_address (Pmode, srcb);
4849
4850 /* "Dereference" to BLKmode memories. */
4851 dstb = gen_rtx_MEM (BLKmode, dstb);
4852 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4853 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4854 srcb = gen_rtx_MEM (BLKmode, srcb);
4855 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4856 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4857
4858 /* Copy. */
4859 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4860 }
4861
4862 return const0_rtx;
4863 }
4864
4865 /* Expand a call to one of the builtin functions __builtin_frame_address or
4866 __builtin_return_address. */
4867
4868 static rtx
4869 expand_builtin_frame_address (tree fndecl, tree exp)
4870 {
4871 /* The argument must be a nonnegative integer constant.
4872 It counts the number of frames to scan up the stack.
4873 The value is either the frame pointer value or the return
4874 address saved in that frame. */
4875 if (call_expr_nargs (exp) == 0)
4876 /* Warning about missing arg was already issued. */
4877 return const0_rtx;
4878 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4879 {
4880 error ("invalid argument to %qD", fndecl);
4881 return const0_rtx;
4882 }
4883 else
4884 {
4885 /* Number of frames to scan up the stack. */
4886 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4887
4888 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4889
4890 /* Some ports cannot access arbitrary stack frames. */
4891 if (tem == NULL)
4892 {
4893 warning (0, "unsupported argument to %qD", fndecl);
4894 return const0_rtx;
4895 }
4896
4897 if (count)
4898 {
4899 /* Warn since no effort is made to ensure that any frame
4900 beyond the current one exists or can be safely reached. */
4901 warning (OPT_Wframe_address, "calling %qD with "
4902 "a nonzero argument is unsafe", fndecl);
4903 }
4904
4905 /* For __builtin_frame_address, return what we've got. */
4906 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4907 return tem;
4908
4909 if (!REG_P (tem)
4910 && ! CONSTANT_P (tem))
4911 tem = copy_addr_to_reg (tem);
4912 return tem;
4913 }
4914 }
4915
4916 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4917 failed and the caller should emit a normal call. */
4918
4919 static rtx
4920 expand_builtin_alloca (tree exp)
4921 {
4922 rtx op0;
4923 rtx result;
4924 unsigned int align;
4925 tree fndecl = get_callee_fndecl (exp);
4926 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4927 == BUILT_IN_ALLOCA_WITH_ALIGN);
4928 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4929 bool valid_arglist
4930 = (alloca_with_align
4931 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4932 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4933
4934 if (!valid_arglist)
4935 return NULL_RTX;
4936
4937 if ((alloca_with_align && !warn_vla_limit)
4938 || (!alloca_with_align && !warn_alloca_limit))
4939 {
4940 /* -Walloca-larger-than and -Wvla-larger-than settings override
4941 the more general -Walloc-size-larger-than so unless either of
4942 the former options is specified check the alloca arguments for
4943 overflow. */
4944 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4945 int idx[] = { 0, -1 };
4946 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4947 }
4948
4949 /* Compute the argument. */
4950 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4951
4952 /* Compute the alignment. */
4953 align = (alloca_with_align
4954 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4955 : BIGGEST_ALIGNMENT);
4956
4957 /* Allocate the desired space. If the allocation stems from the declaration
4958 of a variable-sized object, it cannot accumulate. */
4959 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4960 result = convert_memory_address (ptr_mode, result);
4961
4962 return result;
4963 }
4964
4965 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4966 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4967 dummy value into second parameter relying on this function to perform the
4968 change. See motivation for this in comment to handle_builtin_stack_restore
4969 function. */
4970
4971 static rtx
4972 expand_asan_emit_allocas_unpoison (tree exp)
4973 {
4974 tree arg0 = CALL_EXPR_ARG (exp, 0);
4975 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4976 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
4977 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4978 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
4979 ptr_mode, bot, ptr_mode);
4980 return ret;
4981 }
4982
4983 /* Expand a call to bswap builtin in EXP.
4984 Return NULL_RTX if a normal call should be emitted rather than expanding the
4985 function in-line. If convenient, the result should be placed in TARGET.
4986 SUBTARGET may be used as the target for computing one of EXP's operands. */
4987
4988 static rtx
4989 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4990 rtx subtarget)
4991 {
4992 tree arg;
4993 rtx op0;
4994
4995 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4996 return NULL_RTX;
4997
4998 arg = CALL_EXPR_ARG (exp, 0);
4999 op0 = expand_expr (arg,
5000 subtarget && GET_MODE (subtarget) == target_mode
5001 ? subtarget : NULL_RTX,
5002 target_mode, EXPAND_NORMAL);
5003 if (GET_MODE (op0) != target_mode)
5004 op0 = convert_to_mode (target_mode, op0, 1);
5005
5006 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5007
5008 gcc_assert (target);
5009
5010 return convert_to_mode (target_mode, target, 1);
5011 }
5012
5013 /* Expand a call to a unary builtin in EXP.
5014 Return NULL_RTX if a normal call should be emitted rather than expanding the
5015 function in-line. If convenient, the result should be placed in TARGET.
5016 SUBTARGET may be used as the target for computing one of EXP's operands. */
5017
5018 static rtx
5019 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5020 rtx subtarget, optab op_optab)
5021 {
5022 rtx op0;
5023
5024 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5025 return NULL_RTX;
5026
5027 /* Compute the argument. */
5028 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5029 (subtarget
5030 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5031 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5032 VOIDmode, EXPAND_NORMAL);
5033 /* Compute op, into TARGET if possible.
5034 Set TARGET to wherever the result comes back. */
5035 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5036 op_optab, op0, target, op_optab != clrsb_optab);
5037 gcc_assert (target);
5038
5039 return convert_to_mode (target_mode, target, 0);
5040 }
5041
5042 /* Expand a call to __builtin_expect. We just return our argument
5043 as the builtin_expect semantic should've been already executed by
5044 tree branch prediction pass. */
5045
5046 static rtx
5047 expand_builtin_expect (tree exp, rtx target)
5048 {
5049 tree arg;
5050
5051 if (call_expr_nargs (exp) < 2)
5052 return const0_rtx;
5053 arg = CALL_EXPR_ARG (exp, 0);
5054
5055 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5056 /* When guessing was done, the hints should be already stripped away. */
5057 gcc_assert (!flag_guess_branch_prob
5058 || optimize == 0 || seen_error ());
5059 return target;
5060 }
5061
5062 /* Expand a call to __builtin_assume_aligned. We just return our first
5063 argument as the builtin_assume_aligned semantic should've been already
5064 executed by CCP. */
5065
5066 static rtx
5067 expand_builtin_assume_aligned (tree exp, rtx target)
5068 {
5069 if (call_expr_nargs (exp) < 2)
5070 return const0_rtx;
5071 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5072 EXPAND_NORMAL);
5073 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5074 && (call_expr_nargs (exp) < 3
5075 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5076 return target;
5077 }
5078
5079 void
5080 expand_builtin_trap (void)
5081 {
5082 if (targetm.have_trap ())
5083 {
5084 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5085 /* For trap insns when not accumulating outgoing args force
5086 REG_ARGS_SIZE note to prevent crossjumping of calls with
5087 different args sizes. */
5088 if (!ACCUMULATE_OUTGOING_ARGS)
5089 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5090 }
5091 else
5092 {
5093 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5094 tree call_expr = build_call_expr (fn, 0);
5095 expand_call (call_expr, NULL_RTX, false);
5096 }
5097
5098 emit_barrier ();
5099 }
5100
5101 /* Expand a call to __builtin_unreachable. We do nothing except emit
5102 a barrier saying that control flow will not pass here.
5103
5104 It is the responsibility of the program being compiled to ensure
5105 that control flow does never reach __builtin_unreachable. */
5106 static void
5107 expand_builtin_unreachable (void)
5108 {
5109 emit_barrier ();
5110 }
5111
5112 /* Expand EXP, a call to fabs, fabsf or fabsl.
5113 Return NULL_RTX if a normal call should be emitted rather than expanding
5114 the function inline. If convenient, the result should be placed
5115 in TARGET. SUBTARGET may be used as the target for computing
5116 the operand. */
5117
5118 static rtx
5119 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5120 {
5121 machine_mode mode;
5122 tree arg;
5123 rtx op0;
5124
5125 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5126 return NULL_RTX;
5127
5128 arg = CALL_EXPR_ARG (exp, 0);
5129 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5130 mode = TYPE_MODE (TREE_TYPE (arg));
5131 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5132 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5133 }
5134
5135 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5136 Return NULL is a normal call should be emitted rather than expanding the
5137 function inline. If convenient, the result should be placed in TARGET.
5138 SUBTARGET may be used as the target for computing the operand. */
5139
5140 static rtx
5141 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5142 {
5143 rtx op0, op1;
5144 tree arg;
5145
5146 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5147 return NULL_RTX;
5148
5149 arg = CALL_EXPR_ARG (exp, 0);
5150 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5151
5152 arg = CALL_EXPR_ARG (exp, 1);
5153 op1 = expand_normal (arg);
5154
5155 return expand_copysign (op0, op1, target);
5156 }
5157
5158 /* Expand a call to __builtin___clear_cache. */
5159
5160 static rtx
5161 expand_builtin___clear_cache (tree exp)
5162 {
5163 if (!targetm.code_for_clear_cache)
5164 {
5165 #ifdef CLEAR_INSN_CACHE
5166 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5167 does something. Just do the default expansion to a call to
5168 __clear_cache(). */
5169 return NULL_RTX;
5170 #else
5171 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5172 does nothing. There is no need to call it. Do nothing. */
5173 return const0_rtx;
5174 #endif /* CLEAR_INSN_CACHE */
5175 }
5176
5177 /* We have a "clear_cache" insn, and it will handle everything. */
5178 tree begin, end;
5179 rtx begin_rtx, end_rtx;
5180
5181 /* We must not expand to a library call. If we did, any
5182 fallback library function in libgcc that might contain a call to
5183 __builtin___clear_cache() would recurse infinitely. */
5184 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5185 {
5186 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5187 return const0_rtx;
5188 }
5189
5190 if (targetm.have_clear_cache ())
5191 {
5192 struct expand_operand ops[2];
5193
5194 begin = CALL_EXPR_ARG (exp, 0);
5195 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5196
5197 end = CALL_EXPR_ARG (exp, 1);
5198 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5199
5200 create_address_operand (&ops[0], begin_rtx);
5201 create_address_operand (&ops[1], end_rtx);
5202 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5203 return const0_rtx;
5204 }
5205 return const0_rtx;
5206 }
5207
5208 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5209
5210 static rtx
5211 round_trampoline_addr (rtx tramp)
5212 {
5213 rtx temp, addend, mask;
5214
5215 /* If we don't need too much alignment, we'll have been guaranteed
5216 proper alignment by get_trampoline_type. */
5217 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5218 return tramp;
5219
5220 /* Round address up to desired boundary. */
5221 temp = gen_reg_rtx (Pmode);
5222 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5223 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5224
5225 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5226 temp, 0, OPTAB_LIB_WIDEN);
5227 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5228 temp, 0, OPTAB_LIB_WIDEN);
5229
5230 return tramp;
5231 }
5232
5233 static rtx
5234 expand_builtin_init_trampoline (tree exp, bool onstack)
5235 {
5236 tree t_tramp, t_func, t_chain;
5237 rtx m_tramp, r_tramp, r_chain, tmp;
5238
5239 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5240 POINTER_TYPE, VOID_TYPE))
5241 return NULL_RTX;
5242
5243 t_tramp = CALL_EXPR_ARG (exp, 0);
5244 t_func = CALL_EXPR_ARG (exp, 1);
5245 t_chain = CALL_EXPR_ARG (exp, 2);
5246
5247 r_tramp = expand_normal (t_tramp);
5248 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5249 MEM_NOTRAP_P (m_tramp) = 1;
5250
5251 /* If ONSTACK, the TRAMP argument should be the address of a field
5252 within the local function's FRAME decl. Either way, let's see if
5253 we can fill in the MEM_ATTRs for this memory. */
5254 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5255 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5256
5257 /* Creator of a heap trampoline is responsible for making sure the
5258 address is aligned to at least STACK_BOUNDARY. Normally malloc
5259 will ensure this anyhow. */
5260 tmp = round_trampoline_addr (r_tramp);
5261 if (tmp != r_tramp)
5262 {
5263 m_tramp = change_address (m_tramp, BLKmode, tmp);
5264 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5265 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5266 }
5267
5268 /* The FUNC argument should be the address of the nested function.
5269 Extract the actual function decl to pass to the hook. */
5270 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5271 t_func = TREE_OPERAND (t_func, 0);
5272 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5273
5274 r_chain = expand_normal (t_chain);
5275
5276 /* Generate insns to initialize the trampoline. */
5277 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5278
5279 if (onstack)
5280 {
5281 trampolines_created = 1;
5282
5283 if (targetm.calls.custom_function_descriptors != 0)
5284 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5285 "trampoline generated for nested function %qD", t_func);
5286 }
5287
5288 return const0_rtx;
5289 }
5290
5291 static rtx
5292 expand_builtin_adjust_trampoline (tree exp)
5293 {
5294 rtx tramp;
5295
5296 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5297 return NULL_RTX;
5298
5299 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5300 tramp = round_trampoline_addr (tramp);
5301 if (targetm.calls.trampoline_adjust_address)
5302 tramp = targetm.calls.trampoline_adjust_address (tramp);
5303
5304 return tramp;
5305 }
5306
5307 /* Expand a call to the builtin descriptor initialization routine.
5308 A descriptor is made up of a couple of pointers to the static
5309 chain and the code entry in this order. */
5310
5311 static rtx
5312 expand_builtin_init_descriptor (tree exp)
5313 {
5314 tree t_descr, t_func, t_chain;
5315 rtx m_descr, r_descr, r_func, r_chain;
5316
5317 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5318 VOID_TYPE))
5319 return NULL_RTX;
5320
5321 t_descr = CALL_EXPR_ARG (exp, 0);
5322 t_func = CALL_EXPR_ARG (exp, 1);
5323 t_chain = CALL_EXPR_ARG (exp, 2);
5324
5325 r_descr = expand_normal (t_descr);
5326 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5327 MEM_NOTRAP_P (m_descr) = 1;
5328
5329 r_func = expand_normal (t_func);
5330 r_chain = expand_normal (t_chain);
5331
5332 /* Generate insns to initialize the descriptor. */
5333 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5334 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5335 POINTER_SIZE / BITS_PER_UNIT), r_func);
5336
5337 return const0_rtx;
5338 }
5339
5340 /* Expand a call to the builtin descriptor adjustment routine. */
5341
5342 static rtx
5343 expand_builtin_adjust_descriptor (tree exp)
5344 {
5345 rtx tramp;
5346
5347 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5348 return NULL_RTX;
5349
5350 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5351
5352 /* Unalign the descriptor to allow runtime identification. */
5353 tramp = plus_constant (ptr_mode, tramp,
5354 targetm.calls.custom_function_descriptors);
5355
5356 return force_operand (tramp, NULL_RTX);
5357 }
5358
5359 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5360 function. The function first checks whether the back end provides
5361 an insn to implement signbit for the respective mode. If not, it
5362 checks whether the floating point format of the value is such that
5363 the sign bit can be extracted. If that is not the case, error out.
5364 EXP is the expression that is a call to the builtin function; if
5365 convenient, the result should be placed in TARGET. */
5366 static rtx
5367 expand_builtin_signbit (tree exp, rtx target)
5368 {
5369 const struct real_format *fmt;
5370 machine_mode fmode, imode, rmode;
5371 tree arg;
5372 int word, bitpos;
5373 enum insn_code icode;
5374 rtx temp;
5375 location_t loc = EXPR_LOCATION (exp);
5376
5377 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5378 return NULL_RTX;
5379
5380 arg = CALL_EXPR_ARG (exp, 0);
5381 fmode = TYPE_MODE (TREE_TYPE (arg));
5382 rmode = TYPE_MODE (TREE_TYPE (exp));
5383 fmt = REAL_MODE_FORMAT (fmode);
5384
5385 arg = builtin_save_expr (arg);
5386
5387 /* Expand the argument yielding a RTX expression. */
5388 temp = expand_normal (arg);
5389
5390 /* Check if the back end provides an insn that handles signbit for the
5391 argument's mode. */
5392 icode = optab_handler (signbit_optab, fmode);
5393 if (icode != CODE_FOR_nothing)
5394 {
5395 rtx_insn *last = get_last_insn ();
5396 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5397 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5398 return target;
5399 delete_insns_since (last);
5400 }
5401
5402 /* For floating point formats without a sign bit, implement signbit
5403 as "ARG < 0.0". */
5404 bitpos = fmt->signbit_ro;
5405 if (bitpos < 0)
5406 {
5407 /* But we can't do this if the format supports signed zero. */
5408 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5409
5410 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5411 build_real (TREE_TYPE (arg), dconst0));
5412 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5413 }
5414
5415 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5416 {
5417 imode = int_mode_for_mode (fmode);
5418 gcc_assert (imode != BLKmode);
5419 temp = gen_lowpart (imode, temp);
5420 }
5421 else
5422 {
5423 imode = word_mode;
5424 /* Handle targets with different FP word orders. */
5425 if (FLOAT_WORDS_BIG_ENDIAN)
5426 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5427 else
5428 word = bitpos / BITS_PER_WORD;
5429 temp = operand_subword_force (temp, word, fmode);
5430 bitpos = bitpos % BITS_PER_WORD;
5431 }
5432
5433 /* Force the intermediate word_mode (or narrower) result into a
5434 register. This avoids attempting to create paradoxical SUBREGs
5435 of floating point modes below. */
5436 temp = force_reg (imode, temp);
5437
5438 /* If the bitpos is within the "result mode" lowpart, the operation
5439 can be implement with a single bitwise AND. Otherwise, we need
5440 a right shift and an AND. */
5441
5442 if (bitpos < GET_MODE_BITSIZE (rmode))
5443 {
5444 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5445
5446 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5447 temp = gen_lowpart (rmode, temp);
5448 temp = expand_binop (rmode, and_optab, temp,
5449 immed_wide_int_const (mask, rmode),
5450 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5451 }
5452 else
5453 {
5454 /* Perform a logical right shift to place the signbit in the least
5455 significant bit, then truncate the result to the desired mode
5456 and mask just this bit. */
5457 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5458 temp = gen_lowpart (rmode, temp);
5459 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5460 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5461 }
5462
5463 return temp;
5464 }
5465
5466 /* Expand fork or exec calls. TARGET is the desired target of the
5467 call. EXP is the call. FN is the
5468 identificator of the actual function. IGNORE is nonzero if the
5469 value is to be ignored. */
5470
5471 static rtx
5472 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5473 {
5474 tree id, decl;
5475 tree call;
5476
5477 /* If we are not profiling, just call the function. */
5478 if (!profile_arc_flag)
5479 return NULL_RTX;
5480
5481 /* Otherwise call the wrapper. This should be equivalent for the rest of
5482 compiler, so the code does not diverge, and the wrapper may run the
5483 code necessary for keeping the profiling sane. */
5484
5485 switch (DECL_FUNCTION_CODE (fn))
5486 {
5487 case BUILT_IN_FORK:
5488 id = get_identifier ("__gcov_fork");
5489 break;
5490
5491 case BUILT_IN_EXECL:
5492 id = get_identifier ("__gcov_execl");
5493 break;
5494
5495 case BUILT_IN_EXECV:
5496 id = get_identifier ("__gcov_execv");
5497 break;
5498
5499 case BUILT_IN_EXECLP:
5500 id = get_identifier ("__gcov_execlp");
5501 break;
5502
5503 case BUILT_IN_EXECLE:
5504 id = get_identifier ("__gcov_execle");
5505 break;
5506
5507 case BUILT_IN_EXECVP:
5508 id = get_identifier ("__gcov_execvp");
5509 break;
5510
5511 case BUILT_IN_EXECVE:
5512 id = get_identifier ("__gcov_execve");
5513 break;
5514
5515 default:
5516 gcc_unreachable ();
5517 }
5518
5519 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5520 FUNCTION_DECL, id, TREE_TYPE (fn));
5521 DECL_EXTERNAL (decl) = 1;
5522 TREE_PUBLIC (decl) = 1;
5523 DECL_ARTIFICIAL (decl) = 1;
5524 TREE_NOTHROW (decl) = 1;
5525 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5526 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5527 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5528 return expand_call (call, target, ignore);
5529 }
5530
5531
5532 \f
5533 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5534 the pointer in these functions is void*, the tree optimizers may remove
5535 casts. The mode computed in expand_builtin isn't reliable either, due
5536 to __sync_bool_compare_and_swap.
5537
5538 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5539 group of builtins. This gives us log2 of the mode size. */
5540
5541 static inline machine_mode
5542 get_builtin_sync_mode (int fcode_diff)
5543 {
5544 /* The size is not negotiable, so ask not to get BLKmode in return
5545 if the target indicates that a smaller size would be better. */
5546 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5547 }
5548
5549 /* Expand the memory expression LOC and return the appropriate memory operand
5550 for the builtin_sync operations. */
5551
5552 static rtx
5553 get_builtin_sync_mem (tree loc, machine_mode mode)
5554 {
5555 rtx addr, mem;
5556
5557 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5558 addr = convert_memory_address (Pmode, addr);
5559
5560 /* Note that we explicitly do not want any alias information for this
5561 memory, so that we kill all other live memories. Otherwise we don't
5562 satisfy the full barrier semantics of the intrinsic. */
5563 mem = validize_mem (gen_rtx_MEM (mode, addr));
5564
5565 /* The alignment needs to be at least according to that of the mode. */
5566 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5567 get_pointer_alignment (loc)));
5568 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5569 MEM_VOLATILE_P (mem) = 1;
5570
5571 return mem;
5572 }
5573
5574 /* Make sure an argument is in the right mode.
5575 EXP is the tree argument.
5576 MODE is the mode it should be in. */
5577
5578 static rtx
5579 expand_expr_force_mode (tree exp, machine_mode mode)
5580 {
5581 rtx val;
5582 machine_mode old_mode;
5583
5584 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5585 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5586 of CONST_INTs, where we know the old_mode only from the call argument. */
5587
5588 old_mode = GET_MODE (val);
5589 if (old_mode == VOIDmode)
5590 old_mode = TYPE_MODE (TREE_TYPE (exp));
5591 val = convert_modes (mode, old_mode, val, 1);
5592 return val;
5593 }
5594
5595
5596 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5597 EXP is the CALL_EXPR. CODE is the rtx code
5598 that corresponds to the arithmetic or logical operation from the name;
5599 an exception here is that NOT actually means NAND. TARGET is an optional
5600 place for us to store the results; AFTER is true if this is the
5601 fetch_and_xxx form. */
5602
5603 static rtx
5604 expand_builtin_sync_operation (machine_mode mode, tree exp,
5605 enum rtx_code code, bool after,
5606 rtx target)
5607 {
5608 rtx val, mem;
5609 location_t loc = EXPR_LOCATION (exp);
5610
5611 if (code == NOT && warn_sync_nand)
5612 {
5613 tree fndecl = get_callee_fndecl (exp);
5614 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5615
5616 static bool warned_f_a_n, warned_n_a_f;
5617
5618 switch (fcode)
5619 {
5620 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5621 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5622 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5623 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5624 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5625 if (warned_f_a_n)
5626 break;
5627
5628 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5629 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5630 warned_f_a_n = true;
5631 break;
5632
5633 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5634 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5635 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5636 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5637 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5638 if (warned_n_a_f)
5639 break;
5640
5641 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5642 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5643 warned_n_a_f = true;
5644 break;
5645
5646 default:
5647 gcc_unreachable ();
5648 }
5649 }
5650
5651 /* Expand the operands. */
5652 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5653 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5654
5655 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5656 after);
5657 }
5658
5659 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5660 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5661 true if this is the boolean form. TARGET is a place for us to store the
5662 results; this is NOT optional if IS_BOOL is true. */
5663
5664 static rtx
5665 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5666 bool is_bool, rtx target)
5667 {
5668 rtx old_val, new_val, mem;
5669 rtx *pbool, *poval;
5670
5671 /* Expand the operands. */
5672 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5673 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5674 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5675
5676 pbool = poval = NULL;
5677 if (target != const0_rtx)
5678 {
5679 if (is_bool)
5680 pbool = &target;
5681 else
5682 poval = &target;
5683 }
5684 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5685 false, MEMMODEL_SYNC_SEQ_CST,
5686 MEMMODEL_SYNC_SEQ_CST))
5687 return NULL_RTX;
5688
5689 return target;
5690 }
5691
5692 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5693 general form is actually an atomic exchange, and some targets only
5694 support a reduced form with the second argument being a constant 1.
5695 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5696 the results. */
5697
5698 static rtx
5699 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5700 rtx target)
5701 {
5702 rtx val, mem;
5703
5704 /* Expand the operands. */
5705 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5706 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5707
5708 return expand_sync_lock_test_and_set (target, mem, val);
5709 }
5710
5711 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5712
5713 static void
5714 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5715 {
5716 rtx mem;
5717
5718 /* Expand the operands. */
5719 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5720
5721 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5722 }
5723
5724 /* Given an integer representing an ``enum memmodel'', verify its
5725 correctness and return the memory model enum. */
5726
5727 static enum memmodel
5728 get_memmodel (tree exp)
5729 {
5730 rtx op;
5731 unsigned HOST_WIDE_INT val;
5732 source_location loc
5733 = expansion_point_location_if_in_system_header (input_location);
5734
5735 /* If the parameter is not a constant, it's a run time value so we'll just
5736 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5737 if (TREE_CODE (exp) != INTEGER_CST)
5738 return MEMMODEL_SEQ_CST;
5739
5740 op = expand_normal (exp);
5741
5742 val = INTVAL (op);
5743 if (targetm.memmodel_check)
5744 val = targetm.memmodel_check (val);
5745 else if (val & ~MEMMODEL_MASK)
5746 {
5747 warning_at (loc, OPT_Winvalid_memory_model,
5748 "unknown architecture specifier in memory model to builtin");
5749 return MEMMODEL_SEQ_CST;
5750 }
5751
5752 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5753 if (memmodel_base (val) >= MEMMODEL_LAST)
5754 {
5755 warning_at (loc, OPT_Winvalid_memory_model,
5756 "invalid memory model argument to builtin");
5757 return MEMMODEL_SEQ_CST;
5758 }
5759
5760 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5761 be conservative and promote consume to acquire. */
5762 if (val == MEMMODEL_CONSUME)
5763 val = MEMMODEL_ACQUIRE;
5764
5765 return (enum memmodel) val;
5766 }
5767
5768 /* Expand the __atomic_exchange intrinsic:
5769 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5770 EXP is the CALL_EXPR.
5771 TARGET is an optional place for us to store the results. */
5772
5773 static rtx
5774 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5775 {
5776 rtx val, mem;
5777 enum memmodel model;
5778
5779 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5780
5781 if (!flag_inline_atomics)
5782 return NULL_RTX;
5783
5784 /* Expand the operands. */
5785 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5786 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5787
5788 return expand_atomic_exchange (target, mem, val, model);
5789 }
5790
5791 /* Expand the __atomic_compare_exchange intrinsic:
5792 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5793 TYPE desired, BOOL weak,
5794 enum memmodel success,
5795 enum memmodel failure)
5796 EXP is the CALL_EXPR.
5797 TARGET is an optional place for us to store the results. */
5798
5799 static rtx
5800 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5801 rtx target)
5802 {
5803 rtx expect, desired, mem, oldval;
5804 rtx_code_label *label;
5805 enum memmodel success, failure;
5806 tree weak;
5807 bool is_weak;
5808 source_location loc
5809 = expansion_point_location_if_in_system_header (input_location);
5810
5811 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5812 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5813
5814 if (failure > success)
5815 {
5816 warning_at (loc, OPT_Winvalid_memory_model,
5817 "failure memory model cannot be stronger than success "
5818 "memory model for %<__atomic_compare_exchange%>");
5819 success = MEMMODEL_SEQ_CST;
5820 }
5821
5822 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5823 {
5824 warning_at (loc, OPT_Winvalid_memory_model,
5825 "invalid failure memory model for "
5826 "%<__atomic_compare_exchange%>");
5827 failure = MEMMODEL_SEQ_CST;
5828 success = MEMMODEL_SEQ_CST;
5829 }
5830
5831
5832 if (!flag_inline_atomics)
5833 return NULL_RTX;
5834
5835 /* Expand the operands. */
5836 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5837
5838 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5839 expect = convert_memory_address (Pmode, expect);
5840 expect = gen_rtx_MEM (mode, expect);
5841 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5842
5843 weak = CALL_EXPR_ARG (exp, 3);
5844 is_weak = false;
5845 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5846 is_weak = true;
5847
5848 if (target == const0_rtx)
5849 target = NULL;
5850
5851 /* Lest the rtl backend create a race condition with an imporoper store
5852 to memory, always create a new pseudo for OLDVAL. */
5853 oldval = NULL;
5854
5855 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5856 is_weak, success, failure))
5857 return NULL_RTX;
5858
5859 /* Conditionally store back to EXPECT, lest we create a race condition
5860 with an improper store to memory. */
5861 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5862 the normal case where EXPECT is totally private, i.e. a register. At
5863 which point the store can be unconditional. */
5864 label = gen_label_rtx ();
5865 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5866 GET_MODE (target), 1, label);
5867 emit_move_insn (expect, oldval);
5868 emit_label (label);
5869
5870 return target;
5871 }
5872
5873 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5874 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5875 call. The weak parameter must be dropped to match the expected parameter
5876 list and the expected argument changed from value to pointer to memory
5877 slot. */
5878
5879 static void
5880 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5881 {
5882 unsigned int z;
5883 vec<tree, va_gc> *vec;
5884
5885 vec_alloc (vec, 5);
5886 vec->quick_push (gimple_call_arg (call, 0));
5887 tree expected = gimple_call_arg (call, 1);
5888 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5889 TREE_TYPE (expected));
5890 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5891 if (expd != x)
5892 emit_move_insn (x, expd);
5893 tree v = make_tree (TREE_TYPE (expected), x);
5894 vec->quick_push (build1 (ADDR_EXPR,
5895 build_pointer_type (TREE_TYPE (expected)), v));
5896 vec->quick_push (gimple_call_arg (call, 2));
5897 /* Skip the boolean weak parameter. */
5898 for (z = 4; z < 6; z++)
5899 vec->quick_push (gimple_call_arg (call, z));
5900 built_in_function fncode
5901 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5902 + exact_log2 (GET_MODE_SIZE (mode)));
5903 tree fndecl = builtin_decl_explicit (fncode);
5904 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5905 fndecl);
5906 tree exp = build_call_vec (boolean_type_node, fn, vec);
5907 tree lhs = gimple_call_lhs (call);
5908 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5909 if (lhs)
5910 {
5911 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5912 if (GET_MODE (boolret) != mode)
5913 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5914 x = force_reg (mode, x);
5915 write_complex_part (target, boolret, true);
5916 write_complex_part (target, x, false);
5917 }
5918 }
5919
5920 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5921
5922 void
5923 expand_ifn_atomic_compare_exchange (gcall *call)
5924 {
5925 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5926 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5927 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5928 rtx expect, desired, mem, oldval, boolret;
5929 enum memmodel success, failure;
5930 tree lhs;
5931 bool is_weak;
5932 source_location loc
5933 = expansion_point_location_if_in_system_header (gimple_location (call));
5934
5935 success = get_memmodel (gimple_call_arg (call, 4));
5936 failure = get_memmodel (gimple_call_arg (call, 5));
5937
5938 if (failure > success)
5939 {
5940 warning_at (loc, OPT_Winvalid_memory_model,
5941 "failure memory model cannot be stronger than success "
5942 "memory model for %<__atomic_compare_exchange%>");
5943 success = MEMMODEL_SEQ_CST;
5944 }
5945
5946 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5947 {
5948 warning_at (loc, OPT_Winvalid_memory_model,
5949 "invalid failure memory model for "
5950 "%<__atomic_compare_exchange%>");
5951 failure = MEMMODEL_SEQ_CST;
5952 success = MEMMODEL_SEQ_CST;
5953 }
5954
5955 if (!flag_inline_atomics)
5956 {
5957 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5958 return;
5959 }
5960
5961 /* Expand the operands. */
5962 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5963
5964 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5965 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5966
5967 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5968
5969 boolret = NULL;
5970 oldval = NULL;
5971
5972 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5973 is_weak, success, failure))
5974 {
5975 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5976 return;
5977 }
5978
5979 lhs = gimple_call_lhs (call);
5980 if (lhs)
5981 {
5982 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5983 if (GET_MODE (boolret) != mode)
5984 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5985 write_complex_part (target, boolret, true);
5986 write_complex_part (target, oldval, false);
5987 }
5988 }
5989
5990 /* Expand the __atomic_load intrinsic:
5991 TYPE __atomic_load (TYPE *object, enum memmodel)
5992 EXP is the CALL_EXPR.
5993 TARGET is an optional place for us to store the results. */
5994
5995 static rtx
5996 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5997 {
5998 rtx mem;
5999 enum memmodel model;
6000
6001 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6002 if (is_mm_release (model) || is_mm_acq_rel (model))
6003 {
6004 source_location loc
6005 = expansion_point_location_if_in_system_header (input_location);
6006 warning_at (loc, OPT_Winvalid_memory_model,
6007 "invalid memory model for %<__atomic_load%>");
6008 model = MEMMODEL_SEQ_CST;
6009 }
6010
6011 if (!flag_inline_atomics)
6012 return NULL_RTX;
6013
6014 /* Expand the operand. */
6015 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6016
6017 return expand_atomic_load (target, mem, model);
6018 }
6019
6020
6021 /* Expand the __atomic_store intrinsic:
6022 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6023 EXP is the CALL_EXPR.
6024 TARGET is an optional place for us to store the results. */
6025
6026 static rtx
6027 expand_builtin_atomic_store (machine_mode mode, tree exp)
6028 {
6029 rtx mem, val;
6030 enum memmodel model;
6031
6032 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6033 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6034 || is_mm_release (model)))
6035 {
6036 source_location loc
6037 = expansion_point_location_if_in_system_header (input_location);
6038 warning_at (loc, OPT_Winvalid_memory_model,
6039 "invalid memory model for %<__atomic_store%>");
6040 model = MEMMODEL_SEQ_CST;
6041 }
6042
6043 if (!flag_inline_atomics)
6044 return NULL_RTX;
6045
6046 /* Expand the operands. */
6047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6048 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6049
6050 return expand_atomic_store (mem, val, model, false);
6051 }
6052
6053 /* Expand the __atomic_fetch_XXX intrinsic:
6054 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6055 EXP is the CALL_EXPR.
6056 TARGET is an optional place for us to store the results.
6057 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6058 FETCH_AFTER is true if returning the result of the operation.
6059 FETCH_AFTER is false if returning the value before the operation.
6060 IGNORE is true if the result is not used.
6061 EXT_CALL is the correct builtin for an external call if this cannot be
6062 resolved to an instruction sequence. */
6063
6064 static rtx
6065 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6066 enum rtx_code code, bool fetch_after,
6067 bool ignore, enum built_in_function ext_call)
6068 {
6069 rtx val, mem, ret;
6070 enum memmodel model;
6071 tree fndecl;
6072 tree addr;
6073
6074 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6075
6076 /* Expand the operands. */
6077 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6078 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6079
6080 /* Only try generating instructions if inlining is turned on. */
6081 if (flag_inline_atomics)
6082 {
6083 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6084 if (ret)
6085 return ret;
6086 }
6087
6088 /* Return if a different routine isn't needed for the library call. */
6089 if (ext_call == BUILT_IN_NONE)
6090 return NULL_RTX;
6091
6092 /* Change the call to the specified function. */
6093 fndecl = get_callee_fndecl (exp);
6094 addr = CALL_EXPR_FN (exp);
6095 STRIP_NOPS (addr);
6096
6097 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6098 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6099
6100 /* If we will emit code after the call, the call can not be a tail call.
6101 If it is emitted as a tail call, a barrier is emitted after it, and
6102 then all trailing code is removed. */
6103 if (!ignore)
6104 CALL_EXPR_TAILCALL (exp) = 0;
6105
6106 /* Expand the call here so we can emit trailing code. */
6107 ret = expand_call (exp, target, ignore);
6108
6109 /* Replace the original function just in case it matters. */
6110 TREE_OPERAND (addr, 0) = fndecl;
6111
6112 /* Then issue the arithmetic correction to return the right result. */
6113 if (!ignore)
6114 {
6115 if (code == NOT)
6116 {
6117 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6118 OPTAB_LIB_WIDEN);
6119 ret = expand_simple_unop (mode, NOT, ret, target, true);
6120 }
6121 else
6122 ret = expand_simple_binop (mode, code, ret, val, target, true,
6123 OPTAB_LIB_WIDEN);
6124 }
6125 return ret;
6126 }
6127
6128 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6129
6130 void
6131 expand_ifn_atomic_bit_test_and (gcall *call)
6132 {
6133 tree ptr = gimple_call_arg (call, 0);
6134 tree bit = gimple_call_arg (call, 1);
6135 tree flag = gimple_call_arg (call, 2);
6136 tree lhs = gimple_call_lhs (call);
6137 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6138 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6139 enum rtx_code code;
6140 optab optab;
6141 struct expand_operand ops[5];
6142
6143 gcc_assert (flag_inline_atomics);
6144
6145 if (gimple_call_num_args (call) == 4)
6146 model = get_memmodel (gimple_call_arg (call, 3));
6147
6148 rtx mem = get_builtin_sync_mem (ptr, mode);
6149 rtx val = expand_expr_force_mode (bit, mode);
6150
6151 switch (gimple_call_internal_fn (call))
6152 {
6153 case IFN_ATOMIC_BIT_TEST_AND_SET:
6154 code = IOR;
6155 optab = atomic_bit_test_and_set_optab;
6156 break;
6157 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6158 code = XOR;
6159 optab = atomic_bit_test_and_complement_optab;
6160 break;
6161 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6162 code = AND;
6163 optab = atomic_bit_test_and_reset_optab;
6164 break;
6165 default:
6166 gcc_unreachable ();
6167 }
6168
6169 if (lhs == NULL_TREE)
6170 {
6171 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6172 val, NULL_RTX, true, OPTAB_DIRECT);
6173 if (code == AND)
6174 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6175 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6176 return;
6177 }
6178
6179 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6180 enum insn_code icode = direct_optab_handler (optab, mode);
6181 gcc_assert (icode != CODE_FOR_nothing);
6182 create_output_operand (&ops[0], target, mode);
6183 create_fixed_operand (&ops[1], mem);
6184 create_convert_operand_to (&ops[2], val, mode, true);
6185 create_integer_operand (&ops[3], model);
6186 create_integer_operand (&ops[4], integer_onep (flag));
6187 if (maybe_expand_insn (icode, 5, ops))
6188 return;
6189
6190 rtx bitval = val;
6191 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6192 val, NULL_RTX, true, OPTAB_DIRECT);
6193 rtx maskval = val;
6194 if (code == AND)
6195 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6196 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6197 code, model, false);
6198 if (integer_onep (flag))
6199 {
6200 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6201 NULL_RTX, true, OPTAB_DIRECT);
6202 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6203 true, OPTAB_DIRECT);
6204 }
6205 else
6206 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6207 OPTAB_DIRECT);
6208 if (result != target)
6209 emit_move_insn (target, result);
6210 }
6211
6212 /* Expand an atomic clear operation.
6213 void _atomic_clear (BOOL *obj, enum memmodel)
6214 EXP is the call expression. */
6215
6216 static rtx
6217 expand_builtin_atomic_clear (tree exp)
6218 {
6219 machine_mode mode;
6220 rtx mem, ret;
6221 enum memmodel model;
6222
6223 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6224 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6225 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6226
6227 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6228 {
6229 source_location loc
6230 = expansion_point_location_if_in_system_header (input_location);
6231 warning_at (loc, OPT_Winvalid_memory_model,
6232 "invalid memory model for %<__atomic_store%>");
6233 model = MEMMODEL_SEQ_CST;
6234 }
6235
6236 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6237 Failing that, a store is issued by __atomic_store. The only way this can
6238 fail is if the bool type is larger than a word size. Unlikely, but
6239 handle it anyway for completeness. Assume a single threaded model since
6240 there is no atomic support in this case, and no barriers are required. */
6241 ret = expand_atomic_store (mem, const0_rtx, model, true);
6242 if (!ret)
6243 emit_move_insn (mem, const0_rtx);
6244 return const0_rtx;
6245 }
6246
6247 /* Expand an atomic test_and_set operation.
6248 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6249 EXP is the call expression. */
6250
6251 static rtx
6252 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6253 {
6254 rtx mem;
6255 enum memmodel model;
6256 machine_mode mode;
6257
6258 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6259 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6260 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6261
6262 return expand_atomic_test_and_set (target, mem, model);
6263 }
6264
6265
6266 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6267 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6268
6269 static tree
6270 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6271 {
6272 int size;
6273 machine_mode mode;
6274 unsigned int mode_align, type_align;
6275
6276 if (TREE_CODE (arg0) != INTEGER_CST)
6277 return NULL_TREE;
6278
6279 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6280 mode = mode_for_size (size, MODE_INT, 0);
6281 mode_align = GET_MODE_ALIGNMENT (mode);
6282
6283 if (TREE_CODE (arg1) == INTEGER_CST)
6284 {
6285 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6286
6287 /* Either this argument is null, or it's a fake pointer encoding
6288 the alignment of the object. */
6289 val = least_bit_hwi (val);
6290 val *= BITS_PER_UNIT;
6291
6292 if (val == 0 || mode_align < val)
6293 type_align = mode_align;
6294 else
6295 type_align = val;
6296 }
6297 else
6298 {
6299 tree ttype = TREE_TYPE (arg1);
6300
6301 /* This function is usually invoked and folded immediately by the front
6302 end before anything else has a chance to look at it. The pointer
6303 parameter at this point is usually cast to a void *, so check for that
6304 and look past the cast. */
6305 if (CONVERT_EXPR_P (arg1)
6306 && POINTER_TYPE_P (ttype)
6307 && VOID_TYPE_P (TREE_TYPE (ttype))
6308 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6309 arg1 = TREE_OPERAND (arg1, 0);
6310
6311 ttype = TREE_TYPE (arg1);
6312 gcc_assert (POINTER_TYPE_P (ttype));
6313
6314 /* Get the underlying type of the object. */
6315 ttype = TREE_TYPE (ttype);
6316 type_align = TYPE_ALIGN (ttype);
6317 }
6318
6319 /* If the object has smaller alignment, the lock free routines cannot
6320 be used. */
6321 if (type_align < mode_align)
6322 return boolean_false_node;
6323
6324 /* Check if a compare_and_swap pattern exists for the mode which represents
6325 the required size. The pattern is not allowed to fail, so the existence
6326 of the pattern indicates support is present. Also require that an
6327 atomic load exists for the required size. */
6328 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6329 return boolean_true_node;
6330 else
6331 return boolean_false_node;
6332 }
6333
6334 /* Return true if the parameters to call EXP represent an object which will
6335 always generate lock free instructions. The first argument represents the
6336 size of the object, and the second parameter is a pointer to the object
6337 itself. If NULL is passed for the object, then the result is based on
6338 typical alignment for an object of the specified size. Otherwise return
6339 false. */
6340
6341 static rtx
6342 expand_builtin_atomic_always_lock_free (tree exp)
6343 {
6344 tree size;
6345 tree arg0 = CALL_EXPR_ARG (exp, 0);
6346 tree arg1 = CALL_EXPR_ARG (exp, 1);
6347
6348 if (TREE_CODE (arg0) != INTEGER_CST)
6349 {
6350 error ("non-constant argument 1 to __atomic_always_lock_free");
6351 return const0_rtx;
6352 }
6353
6354 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6355 if (size == boolean_true_node)
6356 return const1_rtx;
6357 return const0_rtx;
6358 }
6359
6360 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6361 is lock free on this architecture. */
6362
6363 static tree
6364 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6365 {
6366 if (!flag_inline_atomics)
6367 return NULL_TREE;
6368
6369 /* If it isn't always lock free, don't generate a result. */
6370 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6371 return boolean_true_node;
6372
6373 return NULL_TREE;
6374 }
6375
6376 /* Return true if the parameters to call EXP represent an object which will
6377 always generate lock free instructions. The first argument represents the
6378 size of the object, and the second parameter is a pointer to the object
6379 itself. If NULL is passed for the object, then the result is based on
6380 typical alignment for an object of the specified size. Otherwise return
6381 NULL*/
6382
6383 static rtx
6384 expand_builtin_atomic_is_lock_free (tree exp)
6385 {
6386 tree size;
6387 tree arg0 = CALL_EXPR_ARG (exp, 0);
6388 tree arg1 = CALL_EXPR_ARG (exp, 1);
6389
6390 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6391 {
6392 error ("non-integer argument 1 to __atomic_is_lock_free");
6393 return NULL_RTX;
6394 }
6395
6396 if (!flag_inline_atomics)
6397 return NULL_RTX;
6398
6399 /* If the value is known at compile time, return the RTX for it. */
6400 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6401 if (size == boolean_true_node)
6402 return const1_rtx;
6403
6404 return NULL_RTX;
6405 }
6406
6407 /* Expand the __atomic_thread_fence intrinsic:
6408 void __atomic_thread_fence (enum memmodel)
6409 EXP is the CALL_EXPR. */
6410
6411 static void
6412 expand_builtin_atomic_thread_fence (tree exp)
6413 {
6414 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6415 expand_mem_thread_fence (model);
6416 }
6417
6418 /* Expand the __atomic_signal_fence intrinsic:
6419 void __atomic_signal_fence (enum memmodel)
6420 EXP is the CALL_EXPR. */
6421
6422 static void
6423 expand_builtin_atomic_signal_fence (tree exp)
6424 {
6425 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6426 expand_mem_signal_fence (model);
6427 }
6428
6429 /* Expand the __sync_synchronize intrinsic. */
6430
6431 static void
6432 expand_builtin_sync_synchronize (void)
6433 {
6434 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6435 }
6436
6437 static rtx
6438 expand_builtin_thread_pointer (tree exp, rtx target)
6439 {
6440 enum insn_code icode;
6441 if (!validate_arglist (exp, VOID_TYPE))
6442 return const0_rtx;
6443 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6444 if (icode != CODE_FOR_nothing)
6445 {
6446 struct expand_operand op;
6447 /* If the target is not sutitable then create a new target. */
6448 if (target == NULL_RTX
6449 || !REG_P (target)
6450 || GET_MODE (target) != Pmode)
6451 target = gen_reg_rtx (Pmode);
6452 create_output_operand (&op, target, Pmode);
6453 expand_insn (icode, 1, &op);
6454 return target;
6455 }
6456 error ("__builtin_thread_pointer is not supported on this target");
6457 return const0_rtx;
6458 }
6459
6460 static void
6461 expand_builtin_set_thread_pointer (tree exp)
6462 {
6463 enum insn_code icode;
6464 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6465 return;
6466 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6467 if (icode != CODE_FOR_nothing)
6468 {
6469 struct expand_operand op;
6470 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6471 Pmode, EXPAND_NORMAL);
6472 create_input_operand (&op, val, Pmode);
6473 expand_insn (icode, 1, &op);
6474 return;
6475 }
6476 error ("__builtin_set_thread_pointer is not supported on this target");
6477 }
6478
6479 \f
6480 /* Emit code to restore the current value of stack. */
6481
6482 static void
6483 expand_stack_restore (tree var)
6484 {
6485 rtx_insn *prev;
6486 rtx sa = expand_normal (var);
6487
6488 sa = convert_memory_address (Pmode, sa);
6489
6490 prev = get_last_insn ();
6491 emit_stack_restore (SAVE_BLOCK, sa);
6492
6493 record_new_stack_level ();
6494
6495 fixup_args_size_notes (prev, get_last_insn (), 0);
6496 }
6497
6498 /* Emit code to save the current value of stack. */
6499
6500 static rtx
6501 expand_stack_save (void)
6502 {
6503 rtx ret = NULL_RTX;
6504
6505 emit_stack_save (SAVE_BLOCK, &ret);
6506 return ret;
6507 }
6508
6509
6510 /* Expand an expression EXP that calls a built-in function,
6511 with result going to TARGET if that's convenient
6512 (and in mode MODE if that's convenient).
6513 SUBTARGET may be used as the target for computing one of EXP's operands.
6514 IGNORE is nonzero if the value is to be ignored. */
6515
6516 rtx
6517 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6518 int ignore)
6519 {
6520 tree fndecl = get_callee_fndecl (exp);
6521 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6522 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6523 int flags;
6524
6525 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6526 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6527
6528 /* When ASan is enabled, we don't want to expand some memory/string
6529 builtins and rely on libsanitizer's hooks. This allows us to avoid
6530 redundant checks and be sure, that possible overflow will be detected
6531 by ASan. */
6532
6533 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6534 return expand_call (exp, target, ignore);
6535
6536 /* When not optimizing, generate calls to library functions for a certain
6537 set of builtins. */
6538 if (!optimize
6539 && !called_as_built_in (fndecl)
6540 && fcode != BUILT_IN_FORK
6541 && fcode != BUILT_IN_EXECL
6542 && fcode != BUILT_IN_EXECV
6543 && fcode != BUILT_IN_EXECLP
6544 && fcode != BUILT_IN_EXECLE
6545 && fcode != BUILT_IN_EXECVP
6546 && fcode != BUILT_IN_EXECVE
6547 && fcode != BUILT_IN_ALLOCA
6548 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6549 && fcode != BUILT_IN_FREE
6550 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6551 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6552 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6553 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6554 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6555 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6556 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6557 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6558 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6559 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6560 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6561 && fcode != BUILT_IN_CHKP_BNDRET)
6562 return expand_call (exp, target, ignore);
6563
6564 /* The built-in function expanders test for target == const0_rtx
6565 to determine whether the function's result will be ignored. */
6566 if (ignore)
6567 target = const0_rtx;
6568
6569 /* If the result of a pure or const built-in function is ignored, and
6570 none of its arguments are volatile, we can avoid expanding the
6571 built-in call and just evaluate the arguments for side-effects. */
6572 if (target == const0_rtx
6573 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6574 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6575 {
6576 bool volatilep = false;
6577 tree arg;
6578 call_expr_arg_iterator iter;
6579
6580 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6581 if (TREE_THIS_VOLATILE (arg))
6582 {
6583 volatilep = true;
6584 break;
6585 }
6586
6587 if (! volatilep)
6588 {
6589 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6590 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6591 return const0_rtx;
6592 }
6593 }
6594
6595 /* expand_builtin_with_bounds is supposed to be used for
6596 instrumented builtin calls. */
6597 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6598
6599 switch (fcode)
6600 {
6601 CASE_FLT_FN (BUILT_IN_FABS):
6602 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6603 case BUILT_IN_FABSD32:
6604 case BUILT_IN_FABSD64:
6605 case BUILT_IN_FABSD128:
6606 target = expand_builtin_fabs (exp, target, subtarget);
6607 if (target)
6608 return target;
6609 break;
6610
6611 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6612 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6613 target = expand_builtin_copysign (exp, target, subtarget);
6614 if (target)
6615 return target;
6616 break;
6617
6618 /* Just do a normal library call if we were unable to fold
6619 the values. */
6620 CASE_FLT_FN (BUILT_IN_CABS):
6621 break;
6622
6623 CASE_FLT_FN (BUILT_IN_FMA):
6624 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6625 if (target)
6626 return target;
6627 break;
6628
6629 CASE_FLT_FN (BUILT_IN_ILOGB):
6630 if (! flag_unsafe_math_optimizations)
6631 break;
6632 gcc_fallthrough ();
6633 CASE_FLT_FN (BUILT_IN_ISINF):
6634 CASE_FLT_FN (BUILT_IN_FINITE):
6635 case BUILT_IN_ISFINITE:
6636 case BUILT_IN_ISNORMAL:
6637 target = expand_builtin_interclass_mathfn (exp, target);
6638 if (target)
6639 return target;
6640 break;
6641
6642 CASE_FLT_FN (BUILT_IN_ICEIL):
6643 CASE_FLT_FN (BUILT_IN_LCEIL):
6644 CASE_FLT_FN (BUILT_IN_LLCEIL):
6645 CASE_FLT_FN (BUILT_IN_LFLOOR):
6646 CASE_FLT_FN (BUILT_IN_IFLOOR):
6647 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6648 target = expand_builtin_int_roundingfn (exp, target);
6649 if (target)
6650 return target;
6651 break;
6652
6653 CASE_FLT_FN (BUILT_IN_IRINT):
6654 CASE_FLT_FN (BUILT_IN_LRINT):
6655 CASE_FLT_FN (BUILT_IN_LLRINT):
6656 CASE_FLT_FN (BUILT_IN_IROUND):
6657 CASE_FLT_FN (BUILT_IN_LROUND):
6658 CASE_FLT_FN (BUILT_IN_LLROUND):
6659 target = expand_builtin_int_roundingfn_2 (exp, target);
6660 if (target)
6661 return target;
6662 break;
6663
6664 CASE_FLT_FN (BUILT_IN_POWI):
6665 target = expand_builtin_powi (exp, target);
6666 if (target)
6667 return target;
6668 break;
6669
6670 CASE_FLT_FN (BUILT_IN_CEXPI):
6671 target = expand_builtin_cexpi (exp, target);
6672 gcc_assert (target);
6673 return target;
6674
6675 CASE_FLT_FN (BUILT_IN_SIN):
6676 CASE_FLT_FN (BUILT_IN_COS):
6677 if (! flag_unsafe_math_optimizations)
6678 break;
6679 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6680 if (target)
6681 return target;
6682 break;
6683
6684 CASE_FLT_FN (BUILT_IN_SINCOS):
6685 if (! flag_unsafe_math_optimizations)
6686 break;
6687 target = expand_builtin_sincos (exp);
6688 if (target)
6689 return target;
6690 break;
6691
6692 case BUILT_IN_APPLY_ARGS:
6693 return expand_builtin_apply_args ();
6694
6695 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6696 FUNCTION with a copy of the parameters described by
6697 ARGUMENTS, and ARGSIZE. It returns a block of memory
6698 allocated on the stack into which is stored all the registers
6699 that might possibly be used for returning the result of a
6700 function. ARGUMENTS is the value returned by
6701 __builtin_apply_args. ARGSIZE is the number of bytes of
6702 arguments that must be copied. ??? How should this value be
6703 computed? We'll also need a safe worst case value for varargs
6704 functions. */
6705 case BUILT_IN_APPLY:
6706 if (!validate_arglist (exp, POINTER_TYPE,
6707 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6708 && !validate_arglist (exp, REFERENCE_TYPE,
6709 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6710 return const0_rtx;
6711 else
6712 {
6713 rtx ops[3];
6714
6715 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6716 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6717 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6718
6719 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6720 }
6721
6722 /* __builtin_return (RESULT) causes the function to return the
6723 value described by RESULT. RESULT is address of the block of
6724 memory returned by __builtin_apply. */
6725 case BUILT_IN_RETURN:
6726 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6727 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6728 return const0_rtx;
6729
6730 case BUILT_IN_SAVEREGS:
6731 return expand_builtin_saveregs ();
6732
6733 case BUILT_IN_VA_ARG_PACK:
6734 /* All valid uses of __builtin_va_arg_pack () are removed during
6735 inlining. */
6736 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6737 return const0_rtx;
6738
6739 case BUILT_IN_VA_ARG_PACK_LEN:
6740 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6741 inlining. */
6742 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6743 return const0_rtx;
6744
6745 /* Return the address of the first anonymous stack arg. */
6746 case BUILT_IN_NEXT_ARG:
6747 if (fold_builtin_next_arg (exp, false))
6748 return const0_rtx;
6749 return expand_builtin_next_arg ();
6750
6751 case BUILT_IN_CLEAR_CACHE:
6752 target = expand_builtin___clear_cache (exp);
6753 if (target)
6754 return target;
6755 break;
6756
6757 case BUILT_IN_CLASSIFY_TYPE:
6758 return expand_builtin_classify_type (exp);
6759
6760 case BUILT_IN_CONSTANT_P:
6761 return const0_rtx;
6762
6763 case BUILT_IN_FRAME_ADDRESS:
6764 case BUILT_IN_RETURN_ADDRESS:
6765 return expand_builtin_frame_address (fndecl, exp);
6766
6767 /* Returns the address of the area where the structure is returned.
6768 0 otherwise. */
6769 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6770 if (call_expr_nargs (exp) != 0
6771 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6772 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6773 return const0_rtx;
6774 else
6775 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6776
6777 case BUILT_IN_ALLOCA:
6778 case BUILT_IN_ALLOCA_WITH_ALIGN:
6779 target = expand_builtin_alloca (exp);
6780 if (target)
6781 return target;
6782 break;
6783
6784 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6785 return expand_asan_emit_allocas_unpoison (exp);
6786
6787 case BUILT_IN_STACK_SAVE:
6788 return expand_stack_save ();
6789
6790 case BUILT_IN_STACK_RESTORE:
6791 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6792 return const0_rtx;
6793
6794 case BUILT_IN_BSWAP16:
6795 case BUILT_IN_BSWAP32:
6796 case BUILT_IN_BSWAP64:
6797 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6798 if (target)
6799 return target;
6800 break;
6801
6802 CASE_INT_FN (BUILT_IN_FFS):
6803 target = expand_builtin_unop (target_mode, exp, target,
6804 subtarget, ffs_optab);
6805 if (target)
6806 return target;
6807 break;
6808
6809 CASE_INT_FN (BUILT_IN_CLZ):
6810 target = expand_builtin_unop (target_mode, exp, target,
6811 subtarget, clz_optab);
6812 if (target)
6813 return target;
6814 break;
6815
6816 CASE_INT_FN (BUILT_IN_CTZ):
6817 target = expand_builtin_unop (target_mode, exp, target,
6818 subtarget, ctz_optab);
6819 if (target)
6820 return target;
6821 break;
6822
6823 CASE_INT_FN (BUILT_IN_CLRSB):
6824 target = expand_builtin_unop (target_mode, exp, target,
6825 subtarget, clrsb_optab);
6826 if (target)
6827 return target;
6828 break;
6829
6830 CASE_INT_FN (BUILT_IN_POPCOUNT):
6831 target = expand_builtin_unop (target_mode, exp, target,
6832 subtarget, popcount_optab);
6833 if (target)
6834 return target;
6835 break;
6836
6837 CASE_INT_FN (BUILT_IN_PARITY):
6838 target = expand_builtin_unop (target_mode, exp, target,
6839 subtarget, parity_optab);
6840 if (target)
6841 return target;
6842 break;
6843
6844 case BUILT_IN_STRLEN:
6845 target = expand_builtin_strlen (exp, target, target_mode);
6846 if (target)
6847 return target;
6848 break;
6849
6850 case BUILT_IN_STRCAT:
6851 target = expand_builtin_strcat (exp, target);
6852 if (target)
6853 return target;
6854 break;
6855
6856 case BUILT_IN_STRCPY:
6857 target = expand_builtin_strcpy (exp, target);
6858 if (target)
6859 return target;
6860 break;
6861
6862 case BUILT_IN_STRNCAT:
6863 target = expand_builtin_strncat (exp, target);
6864 if (target)
6865 return target;
6866 break;
6867
6868 case BUILT_IN_STRNCPY:
6869 target = expand_builtin_strncpy (exp, target);
6870 if (target)
6871 return target;
6872 break;
6873
6874 case BUILT_IN_STPCPY:
6875 target = expand_builtin_stpcpy (exp, target, mode);
6876 if (target)
6877 return target;
6878 break;
6879
6880 case BUILT_IN_STPNCPY:
6881 target = expand_builtin_stpncpy (exp, target);
6882 if (target)
6883 return target;
6884 break;
6885
6886 case BUILT_IN_MEMCHR:
6887 target = expand_builtin_memchr (exp, target);
6888 if (target)
6889 return target;
6890 break;
6891
6892 case BUILT_IN_MEMCPY:
6893 target = expand_builtin_memcpy (exp, target);
6894 if (target)
6895 return target;
6896 break;
6897
6898 case BUILT_IN_MEMMOVE:
6899 target = expand_builtin_memmove (exp, target);
6900 if (target)
6901 return target;
6902 break;
6903
6904 case BUILT_IN_MEMPCPY:
6905 target = expand_builtin_mempcpy (exp, target, mode);
6906 if (target)
6907 return target;
6908 break;
6909
6910 case BUILT_IN_MEMSET:
6911 target = expand_builtin_memset (exp, target, mode);
6912 if (target)
6913 return target;
6914 break;
6915
6916 case BUILT_IN_BZERO:
6917 target = expand_builtin_bzero (exp);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_STRCMP:
6923 target = expand_builtin_strcmp (exp, target);
6924 if (target)
6925 return target;
6926 break;
6927
6928 case BUILT_IN_STRNCMP:
6929 target = expand_builtin_strncmp (exp, target, mode);
6930 if (target)
6931 return target;
6932 break;
6933
6934 case BUILT_IN_BCMP:
6935 case BUILT_IN_MEMCMP:
6936 case BUILT_IN_MEMCMP_EQ:
6937 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6938 if (target)
6939 return target;
6940 if (fcode == BUILT_IN_MEMCMP_EQ)
6941 {
6942 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6943 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6944 }
6945 break;
6946
6947 case BUILT_IN_SETJMP:
6948 /* This should have been lowered to the builtins below. */
6949 gcc_unreachable ();
6950
6951 case BUILT_IN_SETJMP_SETUP:
6952 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6953 and the receiver label. */
6954 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6955 {
6956 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6957 VOIDmode, EXPAND_NORMAL);
6958 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6959 rtx_insn *label_r = label_rtx (label);
6960
6961 /* This is copied from the handling of non-local gotos. */
6962 expand_builtin_setjmp_setup (buf_addr, label_r);
6963 nonlocal_goto_handler_labels
6964 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6965 nonlocal_goto_handler_labels);
6966 /* ??? Do not let expand_label treat us as such since we would
6967 not want to be both on the list of non-local labels and on
6968 the list of forced labels. */
6969 FORCED_LABEL (label) = 0;
6970 return const0_rtx;
6971 }
6972 break;
6973
6974 case BUILT_IN_SETJMP_RECEIVER:
6975 /* __builtin_setjmp_receiver is passed the receiver label. */
6976 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6977 {
6978 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6979 rtx_insn *label_r = label_rtx (label);
6980
6981 expand_builtin_setjmp_receiver (label_r);
6982 return const0_rtx;
6983 }
6984 break;
6985
6986 /* __builtin_longjmp is passed a pointer to an array of five words.
6987 It's similar to the C library longjmp function but works with
6988 __builtin_setjmp above. */
6989 case BUILT_IN_LONGJMP:
6990 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6991 {
6992 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6993 VOIDmode, EXPAND_NORMAL);
6994 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6995
6996 if (value != const1_rtx)
6997 {
6998 error ("%<__builtin_longjmp%> second argument must be 1");
6999 return const0_rtx;
7000 }
7001
7002 expand_builtin_longjmp (buf_addr, value);
7003 return const0_rtx;
7004 }
7005 break;
7006
7007 case BUILT_IN_NONLOCAL_GOTO:
7008 target = expand_builtin_nonlocal_goto (exp);
7009 if (target)
7010 return target;
7011 break;
7012
7013 /* This updates the setjmp buffer that is its argument with the value
7014 of the current stack pointer. */
7015 case BUILT_IN_UPDATE_SETJMP_BUF:
7016 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7017 {
7018 rtx buf_addr
7019 = expand_normal (CALL_EXPR_ARG (exp, 0));
7020
7021 expand_builtin_update_setjmp_buf (buf_addr);
7022 return const0_rtx;
7023 }
7024 break;
7025
7026 case BUILT_IN_TRAP:
7027 expand_builtin_trap ();
7028 return const0_rtx;
7029
7030 case BUILT_IN_UNREACHABLE:
7031 expand_builtin_unreachable ();
7032 return const0_rtx;
7033
7034 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7035 case BUILT_IN_SIGNBITD32:
7036 case BUILT_IN_SIGNBITD64:
7037 case BUILT_IN_SIGNBITD128:
7038 target = expand_builtin_signbit (exp, target);
7039 if (target)
7040 return target;
7041 break;
7042
7043 /* Various hooks for the DWARF 2 __throw routine. */
7044 case BUILT_IN_UNWIND_INIT:
7045 expand_builtin_unwind_init ();
7046 return const0_rtx;
7047 case BUILT_IN_DWARF_CFA:
7048 return virtual_cfa_rtx;
7049 #ifdef DWARF2_UNWIND_INFO
7050 case BUILT_IN_DWARF_SP_COLUMN:
7051 return expand_builtin_dwarf_sp_column ();
7052 case BUILT_IN_INIT_DWARF_REG_SIZES:
7053 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7054 return const0_rtx;
7055 #endif
7056 case BUILT_IN_FROB_RETURN_ADDR:
7057 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7058 case BUILT_IN_EXTRACT_RETURN_ADDR:
7059 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7060 case BUILT_IN_EH_RETURN:
7061 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7062 CALL_EXPR_ARG (exp, 1));
7063 return const0_rtx;
7064 case BUILT_IN_EH_RETURN_DATA_REGNO:
7065 return expand_builtin_eh_return_data_regno (exp);
7066 case BUILT_IN_EXTEND_POINTER:
7067 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7068 case BUILT_IN_EH_POINTER:
7069 return expand_builtin_eh_pointer (exp);
7070 case BUILT_IN_EH_FILTER:
7071 return expand_builtin_eh_filter (exp);
7072 case BUILT_IN_EH_COPY_VALUES:
7073 return expand_builtin_eh_copy_values (exp);
7074
7075 case BUILT_IN_VA_START:
7076 return expand_builtin_va_start (exp);
7077 case BUILT_IN_VA_END:
7078 return expand_builtin_va_end (exp);
7079 case BUILT_IN_VA_COPY:
7080 return expand_builtin_va_copy (exp);
7081 case BUILT_IN_EXPECT:
7082 return expand_builtin_expect (exp, target);
7083 case BUILT_IN_ASSUME_ALIGNED:
7084 return expand_builtin_assume_aligned (exp, target);
7085 case BUILT_IN_PREFETCH:
7086 expand_builtin_prefetch (exp);
7087 return const0_rtx;
7088
7089 case BUILT_IN_INIT_TRAMPOLINE:
7090 return expand_builtin_init_trampoline (exp, true);
7091 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7092 return expand_builtin_init_trampoline (exp, false);
7093 case BUILT_IN_ADJUST_TRAMPOLINE:
7094 return expand_builtin_adjust_trampoline (exp);
7095
7096 case BUILT_IN_INIT_DESCRIPTOR:
7097 return expand_builtin_init_descriptor (exp);
7098 case BUILT_IN_ADJUST_DESCRIPTOR:
7099 return expand_builtin_adjust_descriptor (exp);
7100
7101 case BUILT_IN_FORK:
7102 case BUILT_IN_EXECL:
7103 case BUILT_IN_EXECV:
7104 case BUILT_IN_EXECLP:
7105 case BUILT_IN_EXECLE:
7106 case BUILT_IN_EXECVP:
7107 case BUILT_IN_EXECVE:
7108 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7109 if (target)
7110 return target;
7111 break;
7112
7113 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7114 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7115 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7116 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7117 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7118 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7119 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7120 if (target)
7121 return target;
7122 break;
7123
7124 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7125 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7126 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7127 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7128 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7129 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7130 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7131 if (target)
7132 return target;
7133 break;
7134
7135 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7136 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7137 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7138 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7139 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7140 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7141 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7142 if (target)
7143 return target;
7144 break;
7145
7146 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7147 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7148 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7149 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7150 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7151 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7152 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7153 if (target)
7154 return target;
7155 break;
7156
7157 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7158 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7159 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7160 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7161 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7162 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7163 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7164 if (target)
7165 return target;
7166 break;
7167
7168 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7169 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7170 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7171 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7172 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7173 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7174 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7175 if (target)
7176 return target;
7177 break;
7178
7179 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7180 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7181 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7182 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7183 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7184 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7185 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7186 if (target)
7187 return target;
7188 break;
7189
7190 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7191 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7192 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7193 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7194 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7195 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7196 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7197 if (target)
7198 return target;
7199 break;
7200
7201 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7202 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7203 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7204 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7205 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7206 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7207 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7208 if (target)
7209 return target;
7210 break;
7211
7212 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7213 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7214 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7215 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7216 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7217 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7218 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7219 if (target)
7220 return target;
7221 break;
7222
7223 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7224 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7225 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7226 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7227 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7228 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7229 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7230 if (target)
7231 return target;
7232 break;
7233
7234 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7235 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7236 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7237 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7238 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7239 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7240 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7241 if (target)
7242 return target;
7243 break;
7244
7245 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7246 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7247 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7248 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7249 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7250 if (mode == VOIDmode)
7251 mode = TYPE_MODE (boolean_type_node);
7252 if (!target || !register_operand (target, mode))
7253 target = gen_reg_rtx (mode);
7254
7255 mode = get_builtin_sync_mode
7256 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7257 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7258 if (target)
7259 return target;
7260 break;
7261
7262 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7263 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7264 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7265 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7266 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7267 mode = get_builtin_sync_mode
7268 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7269 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7270 if (target)
7271 return target;
7272 break;
7273
7274 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7275 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7276 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7277 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7278 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7279 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7280 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7281 if (target)
7282 return target;
7283 break;
7284
7285 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7286 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7287 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7288 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7289 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7290 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7291 expand_builtin_sync_lock_release (mode, exp);
7292 return const0_rtx;
7293
7294 case BUILT_IN_SYNC_SYNCHRONIZE:
7295 expand_builtin_sync_synchronize ();
7296 return const0_rtx;
7297
7298 case BUILT_IN_ATOMIC_EXCHANGE_1:
7299 case BUILT_IN_ATOMIC_EXCHANGE_2:
7300 case BUILT_IN_ATOMIC_EXCHANGE_4:
7301 case BUILT_IN_ATOMIC_EXCHANGE_8:
7302 case BUILT_IN_ATOMIC_EXCHANGE_16:
7303 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7304 target = expand_builtin_atomic_exchange (mode, exp, target);
7305 if (target)
7306 return target;
7307 break;
7308
7309 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7310 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7311 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7312 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7313 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7314 {
7315 unsigned int nargs, z;
7316 vec<tree, va_gc> *vec;
7317
7318 mode =
7319 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7320 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7321 if (target)
7322 return target;
7323
7324 /* If this is turned into an external library call, the weak parameter
7325 must be dropped to match the expected parameter list. */
7326 nargs = call_expr_nargs (exp);
7327 vec_alloc (vec, nargs - 1);
7328 for (z = 0; z < 3; z++)
7329 vec->quick_push (CALL_EXPR_ARG (exp, z));
7330 /* Skip the boolean weak parameter. */
7331 for (z = 4; z < 6; z++)
7332 vec->quick_push (CALL_EXPR_ARG (exp, z));
7333 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7334 break;
7335 }
7336
7337 case BUILT_IN_ATOMIC_LOAD_1:
7338 case BUILT_IN_ATOMIC_LOAD_2:
7339 case BUILT_IN_ATOMIC_LOAD_4:
7340 case BUILT_IN_ATOMIC_LOAD_8:
7341 case BUILT_IN_ATOMIC_LOAD_16:
7342 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7343 target = expand_builtin_atomic_load (mode, exp, target);
7344 if (target)
7345 return target;
7346 break;
7347
7348 case BUILT_IN_ATOMIC_STORE_1:
7349 case BUILT_IN_ATOMIC_STORE_2:
7350 case BUILT_IN_ATOMIC_STORE_4:
7351 case BUILT_IN_ATOMIC_STORE_8:
7352 case BUILT_IN_ATOMIC_STORE_16:
7353 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7354 target = expand_builtin_atomic_store (mode, exp);
7355 if (target)
7356 return const0_rtx;
7357 break;
7358
7359 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7360 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7361 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7362 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7363 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7364 {
7365 enum built_in_function lib;
7366 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7367 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7368 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7369 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7370 ignore, lib);
7371 if (target)
7372 return target;
7373 break;
7374 }
7375 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7376 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7377 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7378 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7379 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7380 {
7381 enum built_in_function lib;
7382 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7383 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7384 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7385 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7386 ignore, lib);
7387 if (target)
7388 return target;
7389 break;
7390 }
7391 case BUILT_IN_ATOMIC_AND_FETCH_1:
7392 case BUILT_IN_ATOMIC_AND_FETCH_2:
7393 case BUILT_IN_ATOMIC_AND_FETCH_4:
7394 case BUILT_IN_ATOMIC_AND_FETCH_8:
7395 case BUILT_IN_ATOMIC_AND_FETCH_16:
7396 {
7397 enum built_in_function lib;
7398 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7399 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7400 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7401 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7402 ignore, lib);
7403 if (target)
7404 return target;
7405 break;
7406 }
7407 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7408 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7409 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7410 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7411 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7412 {
7413 enum built_in_function lib;
7414 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7415 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7416 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7417 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7418 ignore, lib);
7419 if (target)
7420 return target;
7421 break;
7422 }
7423 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7424 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7425 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7426 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7427 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7428 {
7429 enum built_in_function lib;
7430 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7431 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7432 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7433 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7434 ignore, lib);
7435 if (target)
7436 return target;
7437 break;
7438 }
7439 case BUILT_IN_ATOMIC_OR_FETCH_1:
7440 case BUILT_IN_ATOMIC_OR_FETCH_2:
7441 case BUILT_IN_ATOMIC_OR_FETCH_4:
7442 case BUILT_IN_ATOMIC_OR_FETCH_8:
7443 case BUILT_IN_ATOMIC_OR_FETCH_16:
7444 {
7445 enum built_in_function lib;
7446 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7447 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7448 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7449 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7450 ignore, lib);
7451 if (target)
7452 return target;
7453 break;
7454 }
7455 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7456 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7457 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7458 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7459 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7460 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7461 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7462 ignore, BUILT_IN_NONE);
7463 if (target)
7464 return target;
7465 break;
7466
7467 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7468 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7469 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7470 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7471 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7472 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7473 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7474 ignore, BUILT_IN_NONE);
7475 if (target)
7476 return target;
7477 break;
7478
7479 case BUILT_IN_ATOMIC_FETCH_AND_1:
7480 case BUILT_IN_ATOMIC_FETCH_AND_2:
7481 case BUILT_IN_ATOMIC_FETCH_AND_4:
7482 case BUILT_IN_ATOMIC_FETCH_AND_8:
7483 case BUILT_IN_ATOMIC_FETCH_AND_16:
7484 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7485 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7486 ignore, BUILT_IN_NONE);
7487 if (target)
7488 return target;
7489 break;
7490
7491 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7492 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7493 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7494 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7495 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7496 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7497 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7498 ignore, BUILT_IN_NONE);
7499 if (target)
7500 return target;
7501 break;
7502
7503 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7504 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7505 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7506 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7507 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7508 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7509 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7510 ignore, BUILT_IN_NONE);
7511 if (target)
7512 return target;
7513 break;
7514
7515 case BUILT_IN_ATOMIC_FETCH_OR_1:
7516 case BUILT_IN_ATOMIC_FETCH_OR_2:
7517 case BUILT_IN_ATOMIC_FETCH_OR_4:
7518 case BUILT_IN_ATOMIC_FETCH_OR_8:
7519 case BUILT_IN_ATOMIC_FETCH_OR_16:
7520 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7521 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7522 ignore, BUILT_IN_NONE);
7523 if (target)
7524 return target;
7525 break;
7526
7527 case BUILT_IN_ATOMIC_TEST_AND_SET:
7528 return expand_builtin_atomic_test_and_set (exp, target);
7529
7530 case BUILT_IN_ATOMIC_CLEAR:
7531 return expand_builtin_atomic_clear (exp);
7532
7533 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7534 return expand_builtin_atomic_always_lock_free (exp);
7535
7536 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7537 target = expand_builtin_atomic_is_lock_free (exp);
7538 if (target)
7539 return target;
7540 break;
7541
7542 case BUILT_IN_ATOMIC_THREAD_FENCE:
7543 expand_builtin_atomic_thread_fence (exp);
7544 return const0_rtx;
7545
7546 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7547 expand_builtin_atomic_signal_fence (exp);
7548 return const0_rtx;
7549
7550 case BUILT_IN_OBJECT_SIZE:
7551 return expand_builtin_object_size (exp);
7552
7553 case BUILT_IN_MEMCPY_CHK:
7554 case BUILT_IN_MEMPCPY_CHK:
7555 case BUILT_IN_MEMMOVE_CHK:
7556 case BUILT_IN_MEMSET_CHK:
7557 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7558 if (target)
7559 return target;
7560 break;
7561
7562 case BUILT_IN_STRCPY_CHK:
7563 case BUILT_IN_STPCPY_CHK:
7564 case BUILT_IN_STRNCPY_CHK:
7565 case BUILT_IN_STPNCPY_CHK:
7566 case BUILT_IN_STRCAT_CHK:
7567 case BUILT_IN_STRNCAT_CHK:
7568 case BUILT_IN_SNPRINTF_CHK:
7569 case BUILT_IN_VSNPRINTF_CHK:
7570 maybe_emit_chk_warning (exp, fcode);
7571 break;
7572
7573 case BUILT_IN_SPRINTF_CHK:
7574 case BUILT_IN_VSPRINTF_CHK:
7575 maybe_emit_sprintf_chk_warning (exp, fcode);
7576 break;
7577
7578 case BUILT_IN_FREE:
7579 if (warn_free_nonheap_object)
7580 maybe_emit_free_warning (exp);
7581 break;
7582
7583 case BUILT_IN_THREAD_POINTER:
7584 return expand_builtin_thread_pointer (exp, target);
7585
7586 case BUILT_IN_SET_THREAD_POINTER:
7587 expand_builtin_set_thread_pointer (exp);
7588 return const0_rtx;
7589
7590 case BUILT_IN_CILK_DETACH:
7591 expand_builtin_cilk_detach (exp);
7592 return const0_rtx;
7593
7594 case BUILT_IN_CILK_POP_FRAME:
7595 expand_builtin_cilk_pop_frame (exp);
7596 return const0_rtx;
7597
7598 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7599 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7600 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7601 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7602 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7603 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7604 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7605 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7606 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7607 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7608 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7609 /* We allow user CHKP builtins if Pointer Bounds
7610 Checker is off. */
7611 if (!chkp_function_instrumented_p (current_function_decl))
7612 {
7613 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7614 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7615 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7616 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7617 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7618 return expand_normal (CALL_EXPR_ARG (exp, 0));
7619 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7620 return expand_normal (size_zero_node);
7621 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7622 return expand_normal (size_int (-1));
7623 else
7624 return const0_rtx;
7625 }
7626 /* FALLTHROUGH */
7627
7628 case BUILT_IN_CHKP_BNDMK:
7629 case BUILT_IN_CHKP_BNDSTX:
7630 case BUILT_IN_CHKP_BNDCL:
7631 case BUILT_IN_CHKP_BNDCU:
7632 case BUILT_IN_CHKP_BNDLDX:
7633 case BUILT_IN_CHKP_BNDRET:
7634 case BUILT_IN_CHKP_INTERSECT:
7635 case BUILT_IN_CHKP_NARROW:
7636 case BUILT_IN_CHKP_EXTRACT_LOWER:
7637 case BUILT_IN_CHKP_EXTRACT_UPPER:
7638 /* Software implementation of Pointer Bounds Checker is NYI.
7639 Target support is required. */
7640 error ("Your target platform does not support -fcheck-pointer-bounds");
7641 break;
7642
7643 case BUILT_IN_ACC_ON_DEVICE:
7644 /* Do library call, if we failed to expand the builtin when
7645 folding. */
7646 break;
7647
7648 default: /* just do library call, if unknown builtin */
7649 break;
7650 }
7651
7652 /* The switch statement above can drop through to cause the function
7653 to be called normally. */
7654 return expand_call (exp, target, ignore);
7655 }
7656
7657 /* Similar to expand_builtin but is used for instrumented calls. */
7658
7659 rtx
7660 expand_builtin_with_bounds (tree exp, rtx target,
7661 rtx subtarget ATTRIBUTE_UNUSED,
7662 machine_mode mode, int ignore)
7663 {
7664 tree fndecl = get_callee_fndecl (exp);
7665 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7666
7667 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7668
7669 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7670 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7671
7672 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7673 && fcode < END_CHKP_BUILTINS);
7674
7675 switch (fcode)
7676 {
7677 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7678 target = expand_builtin_memcpy_with_bounds (exp, target);
7679 if (target)
7680 return target;
7681 break;
7682
7683 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7684 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7685 if (target)
7686 return target;
7687 break;
7688
7689 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7690 target = expand_builtin_memset_with_bounds (exp, target, mode);
7691 if (target)
7692 return target;
7693 break;
7694
7695 default:
7696 break;
7697 }
7698
7699 /* The switch statement above can drop through to cause the function
7700 to be called normally. */
7701 return expand_call (exp, target, ignore);
7702 }
7703
7704 /* Determine whether a tree node represents a call to a built-in
7705 function. If the tree T is a call to a built-in function with
7706 the right number of arguments of the appropriate types, return
7707 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7708 Otherwise the return value is END_BUILTINS. */
7709
7710 enum built_in_function
7711 builtin_mathfn_code (const_tree t)
7712 {
7713 const_tree fndecl, arg, parmlist;
7714 const_tree argtype, parmtype;
7715 const_call_expr_arg_iterator iter;
7716
7717 if (TREE_CODE (t) != CALL_EXPR
7718 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7719 return END_BUILTINS;
7720
7721 fndecl = get_callee_fndecl (t);
7722 if (fndecl == NULL_TREE
7723 || TREE_CODE (fndecl) != FUNCTION_DECL
7724 || ! DECL_BUILT_IN (fndecl)
7725 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7726 return END_BUILTINS;
7727
7728 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7729 init_const_call_expr_arg_iterator (t, &iter);
7730 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7731 {
7732 /* If a function doesn't take a variable number of arguments,
7733 the last element in the list will have type `void'. */
7734 parmtype = TREE_VALUE (parmlist);
7735 if (VOID_TYPE_P (parmtype))
7736 {
7737 if (more_const_call_expr_args_p (&iter))
7738 return END_BUILTINS;
7739 return DECL_FUNCTION_CODE (fndecl);
7740 }
7741
7742 if (! more_const_call_expr_args_p (&iter))
7743 return END_BUILTINS;
7744
7745 arg = next_const_call_expr_arg (&iter);
7746 argtype = TREE_TYPE (arg);
7747
7748 if (SCALAR_FLOAT_TYPE_P (parmtype))
7749 {
7750 if (! SCALAR_FLOAT_TYPE_P (argtype))
7751 return END_BUILTINS;
7752 }
7753 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7754 {
7755 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7756 return END_BUILTINS;
7757 }
7758 else if (POINTER_TYPE_P (parmtype))
7759 {
7760 if (! POINTER_TYPE_P (argtype))
7761 return END_BUILTINS;
7762 }
7763 else if (INTEGRAL_TYPE_P (parmtype))
7764 {
7765 if (! INTEGRAL_TYPE_P (argtype))
7766 return END_BUILTINS;
7767 }
7768 else
7769 return END_BUILTINS;
7770 }
7771
7772 /* Variable-length argument list. */
7773 return DECL_FUNCTION_CODE (fndecl);
7774 }
7775
7776 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7777 evaluate to a constant. */
7778
7779 static tree
7780 fold_builtin_constant_p (tree arg)
7781 {
7782 /* We return 1 for a numeric type that's known to be a constant
7783 value at compile-time or for an aggregate type that's a
7784 literal constant. */
7785 STRIP_NOPS (arg);
7786
7787 /* If we know this is a constant, emit the constant of one. */
7788 if (CONSTANT_CLASS_P (arg)
7789 || (TREE_CODE (arg) == CONSTRUCTOR
7790 && TREE_CONSTANT (arg)))
7791 return integer_one_node;
7792 if (TREE_CODE (arg) == ADDR_EXPR)
7793 {
7794 tree op = TREE_OPERAND (arg, 0);
7795 if (TREE_CODE (op) == STRING_CST
7796 || (TREE_CODE (op) == ARRAY_REF
7797 && integer_zerop (TREE_OPERAND (op, 1))
7798 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7799 return integer_one_node;
7800 }
7801
7802 /* If this expression has side effects, show we don't know it to be a
7803 constant. Likewise if it's a pointer or aggregate type since in
7804 those case we only want literals, since those are only optimized
7805 when generating RTL, not later.
7806 And finally, if we are compiling an initializer, not code, we
7807 need to return a definite result now; there's not going to be any
7808 more optimization done. */
7809 if (TREE_SIDE_EFFECTS (arg)
7810 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7811 || POINTER_TYPE_P (TREE_TYPE (arg))
7812 || cfun == 0
7813 || folding_initializer
7814 || force_folding_builtin_constant_p)
7815 return integer_zero_node;
7816
7817 return NULL_TREE;
7818 }
7819
7820 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7821 return it as a truthvalue. */
7822
7823 static tree
7824 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7825 tree predictor)
7826 {
7827 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7828
7829 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7830 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7831 ret_type = TREE_TYPE (TREE_TYPE (fn));
7832 pred_type = TREE_VALUE (arg_types);
7833 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7834
7835 pred = fold_convert_loc (loc, pred_type, pred);
7836 expected = fold_convert_loc (loc, expected_type, expected);
7837 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7838 predictor);
7839
7840 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7841 build_int_cst (ret_type, 0));
7842 }
7843
7844 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7845 NULL_TREE if no simplification is possible. */
7846
7847 tree
7848 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7849 {
7850 tree inner, fndecl, inner_arg0;
7851 enum tree_code code;
7852
7853 /* Distribute the expected value over short-circuiting operators.
7854 See through the cast from truthvalue_type_node to long. */
7855 inner_arg0 = arg0;
7856 while (CONVERT_EXPR_P (inner_arg0)
7857 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7858 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7859 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7860
7861 /* If this is a builtin_expect within a builtin_expect keep the
7862 inner one. See through a comparison against a constant. It
7863 might have been added to create a thruthvalue. */
7864 inner = inner_arg0;
7865
7866 if (COMPARISON_CLASS_P (inner)
7867 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7868 inner = TREE_OPERAND (inner, 0);
7869
7870 if (TREE_CODE (inner) == CALL_EXPR
7871 && (fndecl = get_callee_fndecl (inner))
7872 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7873 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7874 return arg0;
7875
7876 inner = inner_arg0;
7877 code = TREE_CODE (inner);
7878 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7879 {
7880 tree op0 = TREE_OPERAND (inner, 0);
7881 tree op1 = TREE_OPERAND (inner, 1);
7882
7883 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7884 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7885 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7886
7887 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7888 }
7889
7890 /* If the argument isn't invariant then there's nothing else we can do. */
7891 if (!TREE_CONSTANT (inner_arg0))
7892 return NULL_TREE;
7893
7894 /* If we expect that a comparison against the argument will fold to
7895 a constant return the constant. In practice, this means a true
7896 constant or the address of a non-weak symbol. */
7897 inner = inner_arg0;
7898 STRIP_NOPS (inner);
7899 if (TREE_CODE (inner) == ADDR_EXPR)
7900 {
7901 do
7902 {
7903 inner = TREE_OPERAND (inner, 0);
7904 }
7905 while (TREE_CODE (inner) == COMPONENT_REF
7906 || TREE_CODE (inner) == ARRAY_REF);
7907 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7908 return NULL_TREE;
7909 }
7910
7911 /* Otherwise, ARG0 already has the proper type for the return value. */
7912 return arg0;
7913 }
7914
7915 /* Fold a call to __builtin_classify_type with argument ARG. */
7916
7917 static tree
7918 fold_builtin_classify_type (tree arg)
7919 {
7920 if (arg == 0)
7921 return build_int_cst (integer_type_node, no_type_class);
7922
7923 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7924 }
7925
7926 /* Fold a call to __builtin_strlen with argument ARG. */
7927
7928 static tree
7929 fold_builtin_strlen (location_t loc, tree type, tree arg)
7930 {
7931 if (!validate_arg (arg, POINTER_TYPE))
7932 return NULL_TREE;
7933 else
7934 {
7935 tree len = c_strlen (arg, 0);
7936
7937 if (len)
7938 return fold_convert_loc (loc, type, len);
7939
7940 return NULL_TREE;
7941 }
7942 }
7943
7944 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7945
7946 static tree
7947 fold_builtin_inf (location_t loc, tree type, int warn)
7948 {
7949 REAL_VALUE_TYPE real;
7950
7951 /* __builtin_inff is intended to be usable to define INFINITY on all
7952 targets. If an infinity is not available, INFINITY expands "to a
7953 positive constant of type float that overflows at translation
7954 time", footnote "In this case, using INFINITY will violate the
7955 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7956 Thus we pedwarn to ensure this constraint violation is
7957 diagnosed. */
7958 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7959 pedwarn (loc, 0, "target format does not support infinity");
7960
7961 real_inf (&real);
7962 return build_real (type, real);
7963 }
7964
7965 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7966 NULL_TREE if no simplification can be made. */
7967
7968 static tree
7969 fold_builtin_sincos (location_t loc,
7970 tree arg0, tree arg1, tree arg2)
7971 {
7972 tree type;
7973 tree fndecl, call = NULL_TREE;
7974
7975 if (!validate_arg (arg0, REAL_TYPE)
7976 || !validate_arg (arg1, POINTER_TYPE)
7977 || !validate_arg (arg2, POINTER_TYPE))
7978 return NULL_TREE;
7979
7980 type = TREE_TYPE (arg0);
7981
7982 /* Calculate the result when the argument is a constant. */
7983 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7984 if (fn == END_BUILTINS)
7985 return NULL_TREE;
7986
7987 /* Canonicalize sincos to cexpi. */
7988 if (TREE_CODE (arg0) == REAL_CST)
7989 {
7990 tree complex_type = build_complex_type (type);
7991 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7992 }
7993 if (!call)
7994 {
7995 if (!targetm.libc_has_function (function_c99_math_complex)
7996 || !builtin_decl_implicit_p (fn))
7997 return NULL_TREE;
7998 fndecl = builtin_decl_explicit (fn);
7999 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8000 call = builtin_save_expr (call);
8001 }
8002
8003 return build2 (COMPOUND_EXPR, void_type_node,
8004 build2 (MODIFY_EXPR, void_type_node,
8005 build_fold_indirect_ref_loc (loc, arg1),
8006 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8007 build2 (MODIFY_EXPR, void_type_node,
8008 build_fold_indirect_ref_loc (loc, arg2),
8009 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8010 }
8011
8012 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8013 Return NULL_TREE if no simplification can be made. */
8014
8015 static tree
8016 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8017 {
8018 if (!validate_arg (arg1, POINTER_TYPE)
8019 || !validate_arg (arg2, POINTER_TYPE)
8020 || !validate_arg (len, INTEGER_TYPE))
8021 return NULL_TREE;
8022
8023 /* If the LEN parameter is zero, return zero. */
8024 if (integer_zerop (len))
8025 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8026 arg1, arg2);
8027
8028 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8029 if (operand_equal_p (arg1, arg2, 0))
8030 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8031
8032 /* If len parameter is one, return an expression corresponding to
8033 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8034 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8035 {
8036 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8037 tree cst_uchar_ptr_node
8038 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8039
8040 tree ind1
8041 = fold_convert_loc (loc, integer_type_node,
8042 build1 (INDIRECT_REF, cst_uchar_node,
8043 fold_convert_loc (loc,
8044 cst_uchar_ptr_node,
8045 arg1)));
8046 tree ind2
8047 = fold_convert_loc (loc, integer_type_node,
8048 build1 (INDIRECT_REF, cst_uchar_node,
8049 fold_convert_loc (loc,
8050 cst_uchar_ptr_node,
8051 arg2)));
8052 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8053 }
8054
8055 return NULL_TREE;
8056 }
8057
8058 /* Fold a call to builtin isascii with argument ARG. */
8059
8060 static tree
8061 fold_builtin_isascii (location_t loc, tree arg)
8062 {
8063 if (!validate_arg (arg, INTEGER_TYPE))
8064 return NULL_TREE;
8065 else
8066 {
8067 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8068 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8069 build_int_cst (integer_type_node,
8070 ~ (unsigned HOST_WIDE_INT) 0x7f));
8071 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8072 arg, integer_zero_node);
8073 }
8074 }
8075
8076 /* Fold a call to builtin toascii with argument ARG. */
8077
8078 static tree
8079 fold_builtin_toascii (location_t loc, tree arg)
8080 {
8081 if (!validate_arg (arg, INTEGER_TYPE))
8082 return NULL_TREE;
8083
8084 /* Transform toascii(c) -> (c & 0x7f). */
8085 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8086 build_int_cst (integer_type_node, 0x7f));
8087 }
8088
8089 /* Fold a call to builtin isdigit with argument ARG. */
8090
8091 static tree
8092 fold_builtin_isdigit (location_t loc, tree arg)
8093 {
8094 if (!validate_arg (arg, INTEGER_TYPE))
8095 return NULL_TREE;
8096 else
8097 {
8098 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8099 /* According to the C standard, isdigit is unaffected by locale.
8100 However, it definitely is affected by the target character set. */
8101 unsigned HOST_WIDE_INT target_digit0
8102 = lang_hooks.to_target_charset ('0');
8103
8104 if (target_digit0 == 0)
8105 return NULL_TREE;
8106
8107 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8108 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8109 build_int_cst (unsigned_type_node, target_digit0));
8110 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8111 build_int_cst (unsigned_type_node, 9));
8112 }
8113 }
8114
8115 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8116
8117 static tree
8118 fold_builtin_fabs (location_t loc, tree arg, tree type)
8119 {
8120 if (!validate_arg (arg, REAL_TYPE))
8121 return NULL_TREE;
8122
8123 arg = fold_convert_loc (loc, type, arg);
8124 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8125 }
8126
8127 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8128
8129 static tree
8130 fold_builtin_abs (location_t loc, tree arg, tree type)
8131 {
8132 if (!validate_arg (arg, INTEGER_TYPE))
8133 return NULL_TREE;
8134
8135 arg = fold_convert_loc (loc, type, arg);
8136 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8137 }
8138
8139 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8140
8141 static tree
8142 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8143 {
8144 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8145 if (validate_arg (arg0, REAL_TYPE)
8146 && validate_arg (arg1, REAL_TYPE)
8147 && validate_arg (arg2, REAL_TYPE)
8148 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8149 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8150
8151 return NULL_TREE;
8152 }
8153
8154 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8155
8156 static tree
8157 fold_builtin_carg (location_t loc, tree arg, tree type)
8158 {
8159 if (validate_arg (arg, COMPLEX_TYPE)
8160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8161 {
8162 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8163
8164 if (atan2_fn)
8165 {
8166 tree new_arg = builtin_save_expr (arg);
8167 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8168 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8169 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8170 }
8171 }
8172
8173 return NULL_TREE;
8174 }
8175
8176 /* Fold a call to builtin frexp, we can assume the base is 2. */
8177
8178 static tree
8179 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8180 {
8181 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8182 return NULL_TREE;
8183
8184 STRIP_NOPS (arg0);
8185
8186 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8187 return NULL_TREE;
8188
8189 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8190
8191 /* Proceed if a valid pointer type was passed in. */
8192 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8193 {
8194 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8195 tree frac, exp;
8196
8197 switch (value->cl)
8198 {
8199 case rvc_zero:
8200 /* For +-0, return (*exp = 0, +-0). */
8201 exp = integer_zero_node;
8202 frac = arg0;
8203 break;
8204 case rvc_nan:
8205 case rvc_inf:
8206 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8207 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8208 case rvc_normal:
8209 {
8210 /* Since the frexp function always expects base 2, and in
8211 GCC normalized significands are already in the range
8212 [0.5, 1.0), we have exactly what frexp wants. */
8213 REAL_VALUE_TYPE frac_rvt = *value;
8214 SET_REAL_EXP (&frac_rvt, 0);
8215 frac = build_real (rettype, frac_rvt);
8216 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8217 }
8218 break;
8219 default:
8220 gcc_unreachable ();
8221 }
8222
8223 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8224 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8225 TREE_SIDE_EFFECTS (arg1) = 1;
8226 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8227 }
8228
8229 return NULL_TREE;
8230 }
8231
8232 /* Fold a call to builtin modf. */
8233
8234 static tree
8235 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8236 {
8237 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8238 return NULL_TREE;
8239
8240 STRIP_NOPS (arg0);
8241
8242 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8243 return NULL_TREE;
8244
8245 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8246
8247 /* Proceed if a valid pointer type was passed in. */
8248 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8249 {
8250 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8251 REAL_VALUE_TYPE trunc, frac;
8252
8253 switch (value->cl)
8254 {
8255 case rvc_nan:
8256 case rvc_zero:
8257 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8258 trunc = frac = *value;
8259 break;
8260 case rvc_inf:
8261 /* For +-Inf, return (*arg1 = arg0, +-0). */
8262 frac = dconst0;
8263 frac.sign = value->sign;
8264 trunc = *value;
8265 break;
8266 case rvc_normal:
8267 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8268 real_trunc (&trunc, VOIDmode, value);
8269 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8270 /* If the original number was negative and already
8271 integral, then the fractional part is -0.0. */
8272 if (value->sign && frac.cl == rvc_zero)
8273 frac.sign = value->sign;
8274 break;
8275 }
8276
8277 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8278 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8279 build_real (rettype, trunc));
8280 TREE_SIDE_EFFECTS (arg1) = 1;
8281 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8282 build_real (rettype, frac));
8283 }
8284
8285 return NULL_TREE;
8286 }
8287
8288 /* Given a location LOC, an interclass builtin function decl FNDECL
8289 and its single argument ARG, return an folded expression computing
8290 the same, or NULL_TREE if we either couldn't or didn't want to fold
8291 (the latter happen if there's an RTL instruction available). */
8292
8293 static tree
8294 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8295 {
8296 machine_mode mode;
8297
8298 if (!validate_arg (arg, REAL_TYPE))
8299 return NULL_TREE;
8300
8301 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8302 return NULL_TREE;
8303
8304 mode = TYPE_MODE (TREE_TYPE (arg));
8305
8306 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8307
8308 /* If there is no optab, try generic code. */
8309 switch (DECL_FUNCTION_CODE (fndecl))
8310 {
8311 tree result;
8312
8313 CASE_FLT_FN (BUILT_IN_ISINF):
8314 {
8315 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8316 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8317 tree type = TREE_TYPE (arg);
8318 REAL_VALUE_TYPE r;
8319 char buf[128];
8320
8321 if (is_ibm_extended)
8322 {
8323 /* NaN and Inf are encoded in the high-order double value
8324 only. The low-order value is not significant. */
8325 type = double_type_node;
8326 mode = DFmode;
8327 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8328 }
8329 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8330 real_from_string (&r, buf);
8331 result = build_call_expr (isgr_fn, 2,
8332 fold_build1_loc (loc, ABS_EXPR, type, arg),
8333 build_real (type, r));
8334 return result;
8335 }
8336 CASE_FLT_FN (BUILT_IN_FINITE):
8337 case BUILT_IN_ISFINITE:
8338 {
8339 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8340 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8341 tree type = TREE_TYPE (arg);
8342 REAL_VALUE_TYPE r;
8343 char buf[128];
8344
8345 if (is_ibm_extended)
8346 {
8347 /* NaN and Inf are encoded in the high-order double value
8348 only. The low-order value is not significant. */
8349 type = double_type_node;
8350 mode = DFmode;
8351 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8352 }
8353 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8354 real_from_string (&r, buf);
8355 result = build_call_expr (isle_fn, 2,
8356 fold_build1_loc (loc, ABS_EXPR, type, arg),
8357 build_real (type, r));
8358 /*result = fold_build2_loc (loc, UNGT_EXPR,
8359 TREE_TYPE (TREE_TYPE (fndecl)),
8360 fold_build1_loc (loc, ABS_EXPR, type, arg),
8361 build_real (type, r));
8362 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8363 TREE_TYPE (TREE_TYPE (fndecl)),
8364 result);*/
8365 return result;
8366 }
8367 case BUILT_IN_ISNORMAL:
8368 {
8369 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8370 islessequal(fabs(x),DBL_MAX). */
8371 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8372 tree type = TREE_TYPE (arg);
8373 tree orig_arg, max_exp, min_exp;
8374 machine_mode orig_mode = mode;
8375 REAL_VALUE_TYPE rmax, rmin;
8376 char buf[128];
8377
8378 orig_arg = arg = builtin_save_expr (arg);
8379 if (is_ibm_extended)
8380 {
8381 /* Use double to test the normal range of IBM extended
8382 precision. Emin for IBM extended precision is
8383 different to emin for IEEE double, being 53 higher
8384 since the low double exponent is at least 53 lower
8385 than the high double exponent. */
8386 type = double_type_node;
8387 mode = DFmode;
8388 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8389 }
8390 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8391
8392 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8393 real_from_string (&rmax, buf);
8394 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8395 real_from_string (&rmin, buf);
8396 max_exp = build_real (type, rmax);
8397 min_exp = build_real (type, rmin);
8398
8399 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8400 if (is_ibm_extended)
8401 {
8402 /* Testing the high end of the range is done just using
8403 the high double, using the same test as isfinite().
8404 For the subnormal end of the range we first test the
8405 high double, then if its magnitude is equal to the
8406 limit of 0x1p-969, we test whether the low double is
8407 non-zero and opposite sign to the high double. */
8408 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8409 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8410 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8411 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8412 arg, min_exp);
8413 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8414 complex_double_type_node, orig_arg);
8415 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8416 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8417 tree zero = build_real (type, dconst0);
8418 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8419 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8420 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8421 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8422 fold_build3 (COND_EXPR,
8423 integer_type_node,
8424 hilt, logt, lolt));
8425 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8426 eq_min, ok_lo);
8427 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8428 gt_min, eq_min);
8429 }
8430 else
8431 {
8432 tree const isge_fn
8433 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8434 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8435 }
8436 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8437 max_exp, min_exp);
8438 return result;
8439 }
8440 default:
8441 break;
8442 }
8443
8444 return NULL_TREE;
8445 }
8446
8447 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8448 ARG is the argument for the call. */
8449
8450 static tree
8451 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8452 {
8453 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8454
8455 if (!validate_arg (arg, REAL_TYPE))
8456 return NULL_TREE;
8457
8458 switch (builtin_index)
8459 {
8460 case BUILT_IN_ISINF:
8461 if (!HONOR_INFINITIES (arg))
8462 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8463
8464 return NULL_TREE;
8465
8466 case BUILT_IN_ISINF_SIGN:
8467 {
8468 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8469 /* In a boolean context, GCC will fold the inner COND_EXPR to
8470 1. So e.g. "if (isinf_sign(x))" would be folded to just
8471 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8472 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8473 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8474 tree tmp = NULL_TREE;
8475
8476 arg = builtin_save_expr (arg);
8477
8478 if (signbit_fn && isinf_fn)
8479 {
8480 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8481 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8482
8483 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8484 signbit_call, integer_zero_node);
8485 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8486 isinf_call, integer_zero_node);
8487
8488 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8489 integer_minus_one_node, integer_one_node);
8490 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8491 isinf_call, tmp,
8492 integer_zero_node);
8493 }
8494
8495 return tmp;
8496 }
8497
8498 case BUILT_IN_ISFINITE:
8499 if (!HONOR_NANS (arg)
8500 && !HONOR_INFINITIES (arg))
8501 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8502
8503 return NULL_TREE;
8504
8505 case BUILT_IN_ISNAN:
8506 if (!HONOR_NANS (arg))
8507 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8508
8509 {
8510 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8511 if (is_ibm_extended)
8512 {
8513 /* NaN and Inf are encoded in the high-order double value
8514 only. The low-order value is not significant. */
8515 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8516 }
8517 }
8518 arg = builtin_save_expr (arg);
8519 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8520
8521 default:
8522 gcc_unreachable ();
8523 }
8524 }
8525
8526 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8527 This builtin will generate code to return the appropriate floating
8528 point classification depending on the value of the floating point
8529 number passed in. The possible return values must be supplied as
8530 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8531 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8532 one floating point argument which is "type generic". */
8533
8534 static tree
8535 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8536 {
8537 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8538 arg, type, res, tmp;
8539 machine_mode mode;
8540 REAL_VALUE_TYPE r;
8541 char buf[128];
8542
8543 /* Verify the required arguments in the original call. */
8544 if (nargs != 6
8545 || !validate_arg (args[0], INTEGER_TYPE)
8546 || !validate_arg (args[1], INTEGER_TYPE)
8547 || !validate_arg (args[2], INTEGER_TYPE)
8548 || !validate_arg (args[3], INTEGER_TYPE)
8549 || !validate_arg (args[4], INTEGER_TYPE)
8550 || !validate_arg (args[5], REAL_TYPE))
8551 return NULL_TREE;
8552
8553 fp_nan = args[0];
8554 fp_infinite = args[1];
8555 fp_normal = args[2];
8556 fp_subnormal = args[3];
8557 fp_zero = args[4];
8558 arg = args[5];
8559 type = TREE_TYPE (arg);
8560 mode = TYPE_MODE (type);
8561 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8562
8563 /* fpclassify(x) ->
8564 isnan(x) ? FP_NAN :
8565 (fabs(x) == Inf ? FP_INFINITE :
8566 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8567 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8568
8569 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8570 build_real (type, dconst0));
8571 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8572 tmp, fp_zero, fp_subnormal);
8573
8574 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8575 real_from_string (&r, buf);
8576 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8577 arg, build_real (type, r));
8578 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8579
8580 if (HONOR_INFINITIES (mode))
8581 {
8582 real_inf (&r);
8583 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8584 build_real (type, r));
8585 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8586 fp_infinite, res);
8587 }
8588
8589 if (HONOR_NANS (mode))
8590 {
8591 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8592 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8593 }
8594
8595 return res;
8596 }
8597
8598 /* Fold a call to an unordered comparison function such as
8599 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8600 being called and ARG0 and ARG1 are the arguments for the call.
8601 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8602 the opposite of the desired result. UNORDERED_CODE is used
8603 for modes that can hold NaNs and ORDERED_CODE is used for
8604 the rest. */
8605
8606 static tree
8607 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8608 enum tree_code unordered_code,
8609 enum tree_code ordered_code)
8610 {
8611 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8612 enum tree_code code;
8613 tree type0, type1;
8614 enum tree_code code0, code1;
8615 tree cmp_type = NULL_TREE;
8616
8617 type0 = TREE_TYPE (arg0);
8618 type1 = TREE_TYPE (arg1);
8619
8620 code0 = TREE_CODE (type0);
8621 code1 = TREE_CODE (type1);
8622
8623 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8624 /* Choose the wider of two real types. */
8625 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8626 ? type0 : type1;
8627 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8628 cmp_type = type0;
8629 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8630 cmp_type = type1;
8631
8632 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8633 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8634
8635 if (unordered_code == UNORDERED_EXPR)
8636 {
8637 if (!HONOR_NANS (arg0))
8638 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8639 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8640 }
8641
8642 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8643 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8644 fold_build2_loc (loc, code, type, arg0, arg1));
8645 }
8646
8647 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8648 arithmetics if it can never overflow, or into internal functions that
8649 return both result of arithmetics and overflowed boolean flag in
8650 a complex integer result, or some other check for overflow.
8651 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8652 checking part of that. */
8653
8654 static tree
8655 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8656 tree arg0, tree arg1, tree arg2)
8657 {
8658 enum internal_fn ifn = IFN_LAST;
8659 /* The code of the expression corresponding to the type-generic
8660 built-in, or ERROR_MARK for the type-specific ones. */
8661 enum tree_code opcode = ERROR_MARK;
8662 bool ovf_only = false;
8663
8664 switch (fcode)
8665 {
8666 case BUILT_IN_ADD_OVERFLOW_P:
8667 ovf_only = true;
8668 /* FALLTHRU */
8669 case BUILT_IN_ADD_OVERFLOW:
8670 opcode = PLUS_EXPR;
8671 /* FALLTHRU */
8672 case BUILT_IN_SADD_OVERFLOW:
8673 case BUILT_IN_SADDL_OVERFLOW:
8674 case BUILT_IN_SADDLL_OVERFLOW:
8675 case BUILT_IN_UADD_OVERFLOW:
8676 case BUILT_IN_UADDL_OVERFLOW:
8677 case BUILT_IN_UADDLL_OVERFLOW:
8678 ifn = IFN_ADD_OVERFLOW;
8679 break;
8680 case BUILT_IN_SUB_OVERFLOW_P:
8681 ovf_only = true;
8682 /* FALLTHRU */
8683 case BUILT_IN_SUB_OVERFLOW:
8684 opcode = MINUS_EXPR;
8685 /* FALLTHRU */
8686 case BUILT_IN_SSUB_OVERFLOW:
8687 case BUILT_IN_SSUBL_OVERFLOW:
8688 case BUILT_IN_SSUBLL_OVERFLOW:
8689 case BUILT_IN_USUB_OVERFLOW:
8690 case BUILT_IN_USUBL_OVERFLOW:
8691 case BUILT_IN_USUBLL_OVERFLOW:
8692 ifn = IFN_SUB_OVERFLOW;
8693 break;
8694 case BUILT_IN_MUL_OVERFLOW_P:
8695 ovf_only = true;
8696 /* FALLTHRU */
8697 case BUILT_IN_MUL_OVERFLOW:
8698 opcode = MULT_EXPR;
8699 /* FALLTHRU */
8700 case BUILT_IN_SMUL_OVERFLOW:
8701 case BUILT_IN_SMULL_OVERFLOW:
8702 case BUILT_IN_SMULLL_OVERFLOW:
8703 case BUILT_IN_UMUL_OVERFLOW:
8704 case BUILT_IN_UMULL_OVERFLOW:
8705 case BUILT_IN_UMULLL_OVERFLOW:
8706 ifn = IFN_MUL_OVERFLOW;
8707 break;
8708 default:
8709 gcc_unreachable ();
8710 }
8711
8712 /* For the "generic" overloads, the first two arguments can have different
8713 types and the last argument determines the target type to use to check
8714 for overflow. The arguments of the other overloads all have the same
8715 type. */
8716 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8717
8718 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8719 arguments are constant, attempt to fold the built-in call into a constant
8720 expression indicating whether or not it detected an overflow. */
8721 if (ovf_only
8722 && TREE_CODE (arg0) == INTEGER_CST
8723 && TREE_CODE (arg1) == INTEGER_CST)
8724 /* Perform the computation in the target type and check for overflow. */
8725 return omit_one_operand_loc (loc, boolean_type_node,
8726 arith_overflowed_p (opcode, type, arg0, arg1)
8727 ? boolean_true_node : boolean_false_node,
8728 arg2);
8729
8730 tree ctype = build_complex_type (type);
8731 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8732 2, arg0, arg1);
8733 tree tgt = save_expr (call);
8734 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8735 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8736 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8737
8738 if (ovf_only)
8739 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8740
8741 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8742 tree store
8743 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8744 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8745 }
8746
8747 /* Fold a call to __builtin_FILE to a constant string. */
8748
8749 static inline tree
8750 fold_builtin_FILE (location_t loc)
8751 {
8752 if (const char *fname = LOCATION_FILE (loc))
8753 return build_string_literal (strlen (fname) + 1, fname);
8754
8755 return build_string_literal (1, "");
8756 }
8757
8758 /* Fold a call to __builtin_FUNCTION to a constant string. */
8759
8760 static inline tree
8761 fold_builtin_FUNCTION ()
8762 {
8763 const char *name = "";
8764
8765 if (current_function_decl)
8766 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8767
8768 return build_string_literal (strlen (name) + 1, name);
8769 }
8770
8771 /* Fold a call to __builtin_LINE to an integer constant. */
8772
8773 static inline tree
8774 fold_builtin_LINE (location_t loc, tree type)
8775 {
8776 return build_int_cst (type, LOCATION_LINE (loc));
8777 }
8778
8779 /* Fold a call to built-in function FNDECL with 0 arguments.
8780 This function returns NULL_TREE if no simplification was possible. */
8781
8782 static tree
8783 fold_builtin_0 (location_t loc, tree fndecl)
8784 {
8785 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8786 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8787 switch (fcode)
8788 {
8789 case BUILT_IN_FILE:
8790 return fold_builtin_FILE (loc);
8791
8792 case BUILT_IN_FUNCTION:
8793 return fold_builtin_FUNCTION ();
8794
8795 case BUILT_IN_LINE:
8796 return fold_builtin_LINE (loc, type);
8797
8798 CASE_FLT_FN (BUILT_IN_INF):
8799 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8800 case BUILT_IN_INFD32:
8801 case BUILT_IN_INFD64:
8802 case BUILT_IN_INFD128:
8803 return fold_builtin_inf (loc, type, true);
8804
8805 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8806 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8807 return fold_builtin_inf (loc, type, false);
8808
8809 case BUILT_IN_CLASSIFY_TYPE:
8810 return fold_builtin_classify_type (NULL_TREE);
8811
8812 default:
8813 break;
8814 }
8815 return NULL_TREE;
8816 }
8817
8818 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8819 This function returns NULL_TREE if no simplification was possible. */
8820
8821 static tree
8822 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8823 {
8824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8825 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8826
8827 if (TREE_CODE (arg0) == ERROR_MARK)
8828 return NULL_TREE;
8829
8830 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8831 return ret;
8832
8833 switch (fcode)
8834 {
8835 case BUILT_IN_CONSTANT_P:
8836 {
8837 tree val = fold_builtin_constant_p (arg0);
8838
8839 /* Gimplification will pull the CALL_EXPR for the builtin out of
8840 an if condition. When not optimizing, we'll not CSE it back.
8841 To avoid link error types of regressions, return false now. */
8842 if (!val && !optimize)
8843 val = integer_zero_node;
8844
8845 return val;
8846 }
8847
8848 case BUILT_IN_CLASSIFY_TYPE:
8849 return fold_builtin_classify_type (arg0);
8850
8851 case BUILT_IN_STRLEN:
8852 return fold_builtin_strlen (loc, type, arg0);
8853
8854 CASE_FLT_FN (BUILT_IN_FABS):
8855 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8856 case BUILT_IN_FABSD32:
8857 case BUILT_IN_FABSD64:
8858 case BUILT_IN_FABSD128:
8859 return fold_builtin_fabs (loc, arg0, type);
8860
8861 case BUILT_IN_ABS:
8862 case BUILT_IN_LABS:
8863 case BUILT_IN_LLABS:
8864 case BUILT_IN_IMAXABS:
8865 return fold_builtin_abs (loc, arg0, type);
8866
8867 CASE_FLT_FN (BUILT_IN_CONJ):
8868 if (validate_arg (arg0, COMPLEX_TYPE)
8869 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8870 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8871 break;
8872
8873 CASE_FLT_FN (BUILT_IN_CREAL):
8874 if (validate_arg (arg0, COMPLEX_TYPE)
8875 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8876 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8877 break;
8878
8879 CASE_FLT_FN (BUILT_IN_CIMAG):
8880 if (validate_arg (arg0, COMPLEX_TYPE)
8881 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8882 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8883 break;
8884
8885 CASE_FLT_FN (BUILT_IN_CARG):
8886 return fold_builtin_carg (loc, arg0, type);
8887
8888 case BUILT_IN_ISASCII:
8889 return fold_builtin_isascii (loc, arg0);
8890
8891 case BUILT_IN_TOASCII:
8892 return fold_builtin_toascii (loc, arg0);
8893
8894 case BUILT_IN_ISDIGIT:
8895 return fold_builtin_isdigit (loc, arg0);
8896
8897 CASE_FLT_FN (BUILT_IN_FINITE):
8898 case BUILT_IN_FINITED32:
8899 case BUILT_IN_FINITED64:
8900 case BUILT_IN_FINITED128:
8901 case BUILT_IN_ISFINITE:
8902 {
8903 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8904 if (ret)
8905 return ret;
8906 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8907 }
8908
8909 CASE_FLT_FN (BUILT_IN_ISINF):
8910 case BUILT_IN_ISINFD32:
8911 case BUILT_IN_ISINFD64:
8912 case BUILT_IN_ISINFD128:
8913 {
8914 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8915 if (ret)
8916 return ret;
8917 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8918 }
8919
8920 case BUILT_IN_ISNORMAL:
8921 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8922
8923 case BUILT_IN_ISINF_SIGN:
8924 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8925
8926 CASE_FLT_FN (BUILT_IN_ISNAN):
8927 case BUILT_IN_ISNAND32:
8928 case BUILT_IN_ISNAND64:
8929 case BUILT_IN_ISNAND128:
8930 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8931
8932 case BUILT_IN_FREE:
8933 if (integer_zerop (arg0))
8934 return build_empty_stmt (loc);
8935 break;
8936
8937 default:
8938 break;
8939 }
8940
8941 return NULL_TREE;
8942
8943 }
8944
8945 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8946 This function returns NULL_TREE if no simplification was possible. */
8947
8948 static tree
8949 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8950 {
8951 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8952 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8953
8954 if (TREE_CODE (arg0) == ERROR_MARK
8955 || TREE_CODE (arg1) == ERROR_MARK)
8956 return NULL_TREE;
8957
8958 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8959 return ret;
8960
8961 switch (fcode)
8962 {
8963 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8964 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8965 if (validate_arg (arg0, REAL_TYPE)
8966 && validate_arg (arg1, POINTER_TYPE))
8967 return do_mpfr_lgamma_r (arg0, arg1, type);
8968 break;
8969
8970 CASE_FLT_FN (BUILT_IN_FREXP):
8971 return fold_builtin_frexp (loc, arg0, arg1, type);
8972
8973 CASE_FLT_FN (BUILT_IN_MODF):
8974 return fold_builtin_modf (loc, arg0, arg1, type);
8975
8976 case BUILT_IN_STRSPN:
8977 return fold_builtin_strspn (loc, arg0, arg1);
8978
8979 case BUILT_IN_STRCSPN:
8980 return fold_builtin_strcspn (loc, arg0, arg1);
8981
8982 case BUILT_IN_STRPBRK:
8983 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8984
8985 case BUILT_IN_EXPECT:
8986 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8987
8988 case BUILT_IN_ISGREATER:
8989 return fold_builtin_unordered_cmp (loc, fndecl,
8990 arg0, arg1, UNLE_EXPR, LE_EXPR);
8991 case BUILT_IN_ISGREATEREQUAL:
8992 return fold_builtin_unordered_cmp (loc, fndecl,
8993 arg0, arg1, UNLT_EXPR, LT_EXPR);
8994 case BUILT_IN_ISLESS:
8995 return fold_builtin_unordered_cmp (loc, fndecl,
8996 arg0, arg1, UNGE_EXPR, GE_EXPR);
8997 case BUILT_IN_ISLESSEQUAL:
8998 return fold_builtin_unordered_cmp (loc, fndecl,
8999 arg0, arg1, UNGT_EXPR, GT_EXPR);
9000 case BUILT_IN_ISLESSGREATER:
9001 return fold_builtin_unordered_cmp (loc, fndecl,
9002 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9003 case BUILT_IN_ISUNORDERED:
9004 return fold_builtin_unordered_cmp (loc, fndecl,
9005 arg0, arg1, UNORDERED_EXPR,
9006 NOP_EXPR);
9007
9008 /* We do the folding for va_start in the expander. */
9009 case BUILT_IN_VA_START:
9010 break;
9011
9012 case BUILT_IN_OBJECT_SIZE:
9013 return fold_builtin_object_size (arg0, arg1);
9014
9015 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9016 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9017
9018 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9019 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9020
9021 default:
9022 break;
9023 }
9024 return NULL_TREE;
9025 }
9026
9027 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9028 and ARG2.
9029 This function returns NULL_TREE if no simplification was possible. */
9030
9031 static tree
9032 fold_builtin_3 (location_t loc, tree fndecl,
9033 tree arg0, tree arg1, tree arg2)
9034 {
9035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9036 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9037
9038 if (TREE_CODE (arg0) == ERROR_MARK
9039 || TREE_CODE (arg1) == ERROR_MARK
9040 || TREE_CODE (arg2) == ERROR_MARK)
9041 return NULL_TREE;
9042
9043 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9044 arg0, arg1, arg2))
9045 return ret;
9046
9047 switch (fcode)
9048 {
9049
9050 CASE_FLT_FN (BUILT_IN_SINCOS):
9051 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9052
9053 CASE_FLT_FN (BUILT_IN_FMA):
9054 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9055
9056 CASE_FLT_FN (BUILT_IN_REMQUO):
9057 if (validate_arg (arg0, REAL_TYPE)
9058 && validate_arg (arg1, REAL_TYPE)
9059 && validate_arg (arg2, POINTER_TYPE))
9060 return do_mpfr_remquo (arg0, arg1, arg2);
9061 break;
9062
9063 case BUILT_IN_MEMCMP:
9064 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9065
9066 case BUILT_IN_EXPECT:
9067 return fold_builtin_expect (loc, arg0, arg1, arg2);
9068
9069 case BUILT_IN_ADD_OVERFLOW:
9070 case BUILT_IN_SUB_OVERFLOW:
9071 case BUILT_IN_MUL_OVERFLOW:
9072 case BUILT_IN_ADD_OVERFLOW_P:
9073 case BUILT_IN_SUB_OVERFLOW_P:
9074 case BUILT_IN_MUL_OVERFLOW_P:
9075 case BUILT_IN_SADD_OVERFLOW:
9076 case BUILT_IN_SADDL_OVERFLOW:
9077 case BUILT_IN_SADDLL_OVERFLOW:
9078 case BUILT_IN_SSUB_OVERFLOW:
9079 case BUILT_IN_SSUBL_OVERFLOW:
9080 case BUILT_IN_SSUBLL_OVERFLOW:
9081 case BUILT_IN_SMUL_OVERFLOW:
9082 case BUILT_IN_SMULL_OVERFLOW:
9083 case BUILT_IN_SMULLL_OVERFLOW:
9084 case BUILT_IN_UADD_OVERFLOW:
9085 case BUILT_IN_UADDL_OVERFLOW:
9086 case BUILT_IN_UADDLL_OVERFLOW:
9087 case BUILT_IN_USUB_OVERFLOW:
9088 case BUILT_IN_USUBL_OVERFLOW:
9089 case BUILT_IN_USUBLL_OVERFLOW:
9090 case BUILT_IN_UMUL_OVERFLOW:
9091 case BUILT_IN_UMULL_OVERFLOW:
9092 case BUILT_IN_UMULLL_OVERFLOW:
9093 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9094
9095 default:
9096 break;
9097 }
9098 return NULL_TREE;
9099 }
9100
9101 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9102 arguments. IGNORE is true if the result of the
9103 function call is ignored. This function returns NULL_TREE if no
9104 simplification was possible. */
9105
9106 tree
9107 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9108 {
9109 tree ret = NULL_TREE;
9110
9111 switch (nargs)
9112 {
9113 case 0:
9114 ret = fold_builtin_0 (loc, fndecl);
9115 break;
9116 case 1:
9117 ret = fold_builtin_1 (loc, fndecl, args[0]);
9118 break;
9119 case 2:
9120 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9121 break;
9122 case 3:
9123 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9124 break;
9125 default:
9126 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9127 break;
9128 }
9129 if (ret)
9130 {
9131 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9132 SET_EXPR_LOCATION (ret, loc);
9133 TREE_NO_WARNING (ret) = 1;
9134 return ret;
9135 }
9136 return NULL_TREE;
9137 }
9138
9139 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9140 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9141 of arguments in ARGS to be omitted. OLDNARGS is the number of
9142 elements in ARGS. */
9143
9144 static tree
9145 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9146 int skip, tree fndecl, int n, va_list newargs)
9147 {
9148 int nargs = oldnargs - skip + n;
9149 tree *buffer;
9150
9151 if (n > 0)
9152 {
9153 int i, j;
9154
9155 buffer = XALLOCAVEC (tree, nargs);
9156 for (i = 0; i < n; i++)
9157 buffer[i] = va_arg (newargs, tree);
9158 for (j = skip; j < oldnargs; j++, i++)
9159 buffer[i] = args[j];
9160 }
9161 else
9162 buffer = args + skip;
9163
9164 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9165 }
9166
9167 /* Return true if FNDECL shouldn't be folded right now.
9168 If a built-in function has an inline attribute always_inline
9169 wrapper, defer folding it after always_inline functions have
9170 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9171 might not be performed. */
9172
9173 bool
9174 avoid_folding_inline_builtin (tree fndecl)
9175 {
9176 return (DECL_DECLARED_INLINE_P (fndecl)
9177 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9178 && cfun
9179 && !cfun->always_inline_functions_inlined
9180 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9181 }
9182
9183 /* A wrapper function for builtin folding that prevents warnings for
9184 "statement without effect" and the like, caused by removing the
9185 call node earlier than the warning is generated. */
9186
9187 tree
9188 fold_call_expr (location_t loc, tree exp, bool ignore)
9189 {
9190 tree ret = NULL_TREE;
9191 tree fndecl = get_callee_fndecl (exp);
9192 if (fndecl
9193 && TREE_CODE (fndecl) == FUNCTION_DECL
9194 && DECL_BUILT_IN (fndecl)
9195 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9196 yet. Defer folding until we see all the arguments
9197 (after inlining). */
9198 && !CALL_EXPR_VA_ARG_PACK (exp))
9199 {
9200 int nargs = call_expr_nargs (exp);
9201
9202 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9203 instead last argument is __builtin_va_arg_pack (). Defer folding
9204 even in that case, until arguments are finalized. */
9205 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9206 {
9207 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9208 if (fndecl2
9209 && TREE_CODE (fndecl2) == FUNCTION_DECL
9210 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9211 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9212 return NULL_TREE;
9213 }
9214
9215 if (avoid_folding_inline_builtin (fndecl))
9216 return NULL_TREE;
9217
9218 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9219 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9220 CALL_EXPR_ARGP (exp), ignore);
9221 else
9222 {
9223 tree *args = CALL_EXPR_ARGP (exp);
9224 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9225 if (ret)
9226 return ret;
9227 }
9228 }
9229 return NULL_TREE;
9230 }
9231
9232 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9233 N arguments are passed in the array ARGARRAY. Return a folded
9234 expression or NULL_TREE if no simplification was possible. */
9235
9236 tree
9237 fold_builtin_call_array (location_t loc, tree,
9238 tree fn,
9239 int n,
9240 tree *argarray)
9241 {
9242 if (TREE_CODE (fn) != ADDR_EXPR)
9243 return NULL_TREE;
9244
9245 tree fndecl = TREE_OPERAND (fn, 0);
9246 if (TREE_CODE (fndecl) == FUNCTION_DECL
9247 && DECL_BUILT_IN (fndecl))
9248 {
9249 /* If last argument is __builtin_va_arg_pack (), arguments to this
9250 function are not finalized yet. Defer folding until they are. */
9251 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9252 {
9253 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9254 if (fndecl2
9255 && TREE_CODE (fndecl2) == FUNCTION_DECL
9256 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9257 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9258 return NULL_TREE;
9259 }
9260 if (avoid_folding_inline_builtin (fndecl))
9261 return NULL_TREE;
9262 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9263 return targetm.fold_builtin (fndecl, n, argarray, false);
9264 else
9265 return fold_builtin_n (loc, fndecl, argarray, n, false);
9266 }
9267
9268 return NULL_TREE;
9269 }
9270
9271 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9272 along with N new arguments specified as the "..." parameters. SKIP
9273 is the number of arguments in EXP to be omitted. This function is used
9274 to do varargs-to-varargs transformations. */
9275
9276 static tree
9277 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9278 {
9279 va_list ap;
9280 tree t;
9281
9282 va_start (ap, n);
9283 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9284 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9285 va_end (ap);
9286
9287 return t;
9288 }
9289
9290 /* Validate a single argument ARG against a tree code CODE representing
9291 a type. Return true when argument is valid. */
9292
9293 static bool
9294 validate_arg (const_tree arg, enum tree_code code)
9295 {
9296 if (!arg)
9297 return false;
9298 else if (code == POINTER_TYPE)
9299 return POINTER_TYPE_P (TREE_TYPE (arg));
9300 else if (code == INTEGER_TYPE)
9301 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9302 return code == TREE_CODE (TREE_TYPE (arg));
9303 }
9304
9305 /* This function validates the types of a function call argument list
9306 against a specified list of tree_codes. If the last specifier is a 0,
9307 that represents an ellipses, otherwise the last specifier must be a
9308 VOID_TYPE.
9309
9310 This is the GIMPLE version of validate_arglist. Eventually we want to
9311 completely convert builtins.c to work from GIMPLEs and the tree based
9312 validate_arglist will then be removed. */
9313
9314 bool
9315 validate_gimple_arglist (const gcall *call, ...)
9316 {
9317 enum tree_code code;
9318 bool res = 0;
9319 va_list ap;
9320 const_tree arg;
9321 size_t i;
9322
9323 va_start (ap, call);
9324 i = 0;
9325
9326 do
9327 {
9328 code = (enum tree_code) va_arg (ap, int);
9329 switch (code)
9330 {
9331 case 0:
9332 /* This signifies an ellipses, any further arguments are all ok. */
9333 res = true;
9334 goto end;
9335 case VOID_TYPE:
9336 /* This signifies an endlink, if no arguments remain, return
9337 true, otherwise return false. */
9338 res = (i == gimple_call_num_args (call));
9339 goto end;
9340 default:
9341 /* If no parameters remain or the parameter's code does not
9342 match the specified code, return false. Otherwise continue
9343 checking any remaining arguments. */
9344 arg = gimple_call_arg (call, i++);
9345 if (!validate_arg (arg, code))
9346 goto end;
9347 break;
9348 }
9349 }
9350 while (1);
9351
9352 /* We need gotos here since we can only have one VA_CLOSE in a
9353 function. */
9354 end: ;
9355 va_end (ap);
9356
9357 return res;
9358 }
9359
9360 /* Default target-specific builtin expander that does nothing. */
9361
9362 rtx
9363 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9364 rtx target ATTRIBUTE_UNUSED,
9365 rtx subtarget ATTRIBUTE_UNUSED,
9366 machine_mode mode ATTRIBUTE_UNUSED,
9367 int ignore ATTRIBUTE_UNUSED)
9368 {
9369 return NULL_RTX;
9370 }
9371
9372 /* Returns true is EXP represents data that would potentially reside
9373 in a readonly section. */
9374
9375 bool
9376 readonly_data_expr (tree exp)
9377 {
9378 STRIP_NOPS (exp);
9379
9380 if (TREE_CODE (exp) != ADDR_EXPR)
9381 return false;
9382
9383 exp = get_base_address (TREE_OPERAND (exp, 0));
9384 if (!exp)
9385 return false;
9386
9387 /* Make sure we call decl_readonly_section only for trees it
9388 can handle (since it returns true for everything it doesn't
9389 understand). */
9390 if (TREE_CODE (exp) == STRING_CST
9391 || TREE_CODE (exp) == CONSTRUCTOR
9392 || (VAR_P (exp) && TREE_STATIC (exp)))
9393 return decl_readonly_section (exp, 0);
9394 else
9395 return false;
9396 }
9397
9398 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9399 to the call, and TYPE is its return type.
9400
9401 Return NULL_TREE if no simplification was possible, otherwise return the
9402 simplified form of the call as a tree.
9403
9404 The simplified form may be a constant or other expression which
9405 computes the same value, but in a more efficient manner (including
9406 calls to other builtin functions).
9407
9408 The call may contain arguments which need to be evaluated, but
9409 which are not useful to determine the result of the call. In
9410 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9411 COMPOUND_EXPR will be an argument which must be evaluated.
9412 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9413 COMPOUND_EXPR in the chain will contain the tree for the simplified
9414 form of the builtin function call. */
9415
9416 static tree
9417 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9418 {
9419 if (!validate_arg (s1, POINTER_TYPE)
9420 || !validate_arg (s2, POINTER_TYPE))
9421 return NULL_TREE;
9422 else
9423 {
9424 tree fn;
9425 const char *p1, *p2;
9426
9427 p2 = c_getstr (s2);
9428 if (p2 == NULL)
9429 return NULL_TREE;
9430
9431 p1 = c_getstr (s1);
9432 if (p1 != NULL)
9433 {
9434 const char *r = strpbrk (p1, p2);
9435 tree tem;
9436
9437 if (r == NULL)
9438 return build_int_cst (TREE_TYPE (s1), 0);
9439
9440 /* Return an offset into the constant string argument. */
9441 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9442 return fold_convert_loc (loc, type, tem);
9443 }
9444
9445 if (p2[0] == '\0')
9446 /* strpbrk(x, "") == NULL.
9447 Evaluate and ignore s1 in case it had side-effects. */
9448 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9449
9450 if (p2[1] != '\0')
9451 return NULL_TREE; /* Really call strpbrk. */
9452
9453 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9454 if (!fn)
9455 return NULL_TREE;
9456
9457 /* New argument list transforming strpbrk(s1, s2) to
9458 strchr(s1, s2[0]). */
9459 return build_call_expr_loc (loc, fn, 2, s1,
9460 build_int_cst (integer_type_node, p2[0]));
9461 }
9462 }
9463
9464 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9465 to the call.
9466
9467 Return NULL_TREE if no simplification was possible, otherwise return the
9468 simplified form of the call as a tree.
9469
9470 The simplified form may be a constant or other expression which
9471 computes the same value, but in a more efficient manner (including
9472 calls to other builtin functions).
9473
9474 The call may contain arguments which need to be evaluated, but
9475 which are not useful to determine the result of the call. In
9476 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9477 COMPOUND_EXPR will be an argument which must be evaluated.
9478 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9479 COMPOUND_EXPR in the chain will contain the tree for the simplified
9480 form of the builtin function call. */
9481
9482 static tree
9483 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9484 {
9485 if (!validate_arg (s1, POINTER_TYPE)
9486 || !validate_arg (s2, POINTER_TYPE))
9487 return NULL_TREE;
9488 else
9489 {
9490 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9491
9492 /* If either argument is "", return NULL_TREE. */
9493 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9494 /* Evaluate and ignore both arguments in case either one has
9495 side-effects. */
9496 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9497 s1, s2);
9498 return NULL_TREE;
9499 }
9500 }
9501
9502 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9503 to the call.
9504
9505 Return NULL_TREE if no simplification was possible, otherwise return the
9506 simplified form of the call as a tree.
9507
9508 The simplified form may be a constant or other expression which
9509 computes the same value, but in a more efficient manner (including
9510 calls to other builtin functions).
9511
9512 The call may contain arguments which need to be evaluated, but
9513 which are not useful to determine the result of the call. In
9514 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9515 COMPOUND_EXPR will be an argument which must be evaluated.
9516 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9517 COMPOUND_EXPR in the chain will contain the tree for the simplified
9518 form of the builtin function call. */
9519
9520 static tree
9521 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9522 {
9523 if (!validate_arg (s1, POINTER_TYPE)
9524 || !validate_arg (s2, POINTER_TYPE))
9525 return NULL_TREE;
9526 else
9527 {
9528 /* If the first argument is "", return NULL_TREE. */
9529 const char *p1 = c_getstr (s1);
9530 if (p1 && *p1 == '\0')
9531 {
9532 /* Evaluate and ignore argument s2 in case it has
9533 side-effects. */
9534 return omit_one_operand_loc (loc, size_type_node,
9535 size_zero_node, s2);
9536 }
9537
9538 /* If the second argument is "", return __builtin_strlen(s1). */
9539 const char *p2 = c_getstr (s2);
9540 if (p2 && *p2 == '\0')
9541 {
9542 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9543
9544 /* If the replacement _DECL isn't initialized, don't do the
9545 transformation. */
9546 if (!fn)
9547 return NULL_TREE;
9548
9549 return build_call_expr_loc (loc, fn, 1, s1);
9550 }
9551 return NULL_TREE;
9552 }
9553 }
9554
9555 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9556 produced. False otherwise. This is done so that we don't output the error
9557 or warning twice or three times. */
9558
9559 bool
9560 fold_builtin_next_arg (tree exp, bool va_start_p)
9561 {
9562 tree fntype = TREE_TYPE (current_function_decl);
9563 int nargs = call_expr_nargs (exp);
9564 tree arg;
9565 /* There is good chance the current input_location points inside the
9566 definition of the va_start macro (perhaps on the token for
9567 builtin) in a system header, so warnings will not be emitted.
9568 Use the location in real source code. */
9569 source_location current_location =
9570 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9571 NULL);
9572
9573 if (!stdarg_p (fntype))
9574 {
9575 error ("%<va_start%> used in function with fixed args");
9576 return true;
9577 }
9578
9579 if (va_start_p)
9580 {
9581 if (va_start_p && (nargs != 2))
9582 {
9583 error ("wrong number of arguments to function %<va_start%>");
9584 return true;
9585 }
9586 arg = CALL_EXPR_ARG (exp, 1);
9587 }
9588 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9589 when we checked the arguments and if needed issued a warning. */
9590 else
9591 {
9592 if (nargs == 0)
9593 {
9594 /* Evidently an out of date version of <stdarg.h>; can't validate
9595 va_start's second argument, but can still work as intended. */
9596 warning_at (current_location,
9597 OPT_Wvarargs,
9598 "%<__builtin_next_arg%> called without an argument");
9599 return true;
9600 }
9601 else if (nargs > 1)
9602 {
9603 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9604 return true;
9605 }
9606 arg = CALL_EXPR_ARG (exp, 0);
9607 }
9608
9609 if (TREE_CODE (arg) == SSA_NAME)
9610 arg = SSA_NAME_VAR (arg);
9611
9612 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9613 or __builtin_next_arg (0) the first time we see it, after checking
9614 the arguments and if needed issuing a warning. */
9615 if (!integer_zerop (arg))
9616 {
9617 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9618
9619 /* Strip off all nops for the sake of the comparison. This
9620 is not quite the same as STRIP_NOPS. It does more.
9621 We must also strip off INDIRECT_EXPR for C++ reference
9622 parameters. */
9623 while (CONVERT_EXPR_P (arg)
9624 || TREE_CODE (arg) == INDIRECT_REF)
9625 arg = TREE_OPERAND (arg, 0);
9626 if (arg != last_parm)
9627 {
9628 /* FIXME: Sometimes with the tree optimizers we can get the
9629 not the last argument even though the user used the last
9630 argument. We just warn and set the arg to be the last
9631 argument so that we will get wrong-code because of
9632 it. */
9633 warning_at (current_location,
9634 OPT_Wvarargs,
9635 "second parameter of %<va_start%> not last named argument");
9636 }
9637
9638 /* Undefined by C99 7.15.1.4p4 (va_start):
9639 "If the parameter parmN is declared with the register storage
9640 class, with a function or array type, or with a type that is
9641 not compatible with the type that results after application of
9642 the default argument promotions, the behavior is undefined."
9643 */
9644 else if (DECL_REGISTER (arg))
9645 {
9646 warning_at (current_location,
9647 OPT_Wvarargs,
9648 "undefined behavior when second parameter of "
9649 "%<va_start%> is declared with %<register%> storage");
9650 }
9651
9652 /* We want to verify the second parameter just once before the tree
9653 optimizers are run and then avoid keeping it in the tree,
9654 as otherwise we could warn even for correct code like:
9655 void foo (int i, ...)
9656 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9657 if (va_start_p)
9658 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9659 else
9660 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9661 }
9662 return false;
9663 }
9664
9665
9666 /* Expand a call EXP to __builtin_object_size. */
9667
9668 static rtx
9669 expand_builtin_object_size (tree exp)
9670 {
9671 tree ost;
9672 int object_size_type;
9673 tree fndecl = get_callee_fndecl (exp);
9674
9675 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9676 {
9677 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9678 exp, fndecl);
9679 expand_builtin_trap ();
9680 return const0_rtx;
9681 }
9682
9683 ost = CALL_EXPR_ARG (exp, 1);
9684 STRIP_NOPS (ost);
9685
9686 if (TREE_CODE (ost) != INTEGER_CST
9687 || tree_int_cst_sgn (ost) < 0
9688 || compare_tree_int (ost, 3) > 0)
9689 {
9690 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9691 exp, fndecl);
9692 expand_builtin_trap ();
9693 return const0_rtx;
9694 }
9695
9696 object_size_type = tree_to_shwi (ost);
9697
9698 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9699 }
9700
9701 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9702 FCODE is the BUILT_IN_* to use.
9703 Return NULL_RTX if we failed; the caller should emit a normal call,
9704 otherwise try to get the result in TARGET, if convenient (and in
9705 mode MODE if that's convenient). */
9706
9707 static rtx
9708 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9709 enum built_in_function fcode)
9710 {
9711 tree dest, src, len, size;
9712
9713 if (!validate_arglist (exp,
9714 POINTER_TYPE,
9715 fcode == BUILT_IN_MEMSET_CHK
9716 ? INTEGER_TYPE : POINTER_TYPE,
9717 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9718 return NULL_RTX;
9719
9720 dest = CALL_EXPR_ARG (exp, 0);
9721 src = CALL_EXPR_ARG (exp, 1);
9722 len = CALL_EXPR_ARG (exp, 2);
9723 size = CALL_EXPR_ARG (exp, 3);
9724
9725 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9726 exp, len, /*maxlen=*/NULL_TREE,
9727 /*str=*/NULL_TREE, size);
9728
9729 if (!tree_fits_uhwi_p (size))
9730 return NULL_RTX;
9731
9732 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9733 {
9734 /* Avoid transforming the checking call to an ordinary one when
9735 an overflow has been detected or when the call couldn't be
9736 validated because the size is not constant. */
9737 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9738 return NULL_RTX;
9739
9740 tree fn = NULL_TREE;
9741 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9742 mem{cpy,pcpy,move,set} is available. */
9743 switch (fcode)
9744 {
9745 case BUILT_IN_MEMCPY_CHK:
9746 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9747 break;
9748 case BUILT_IN_MEMPCPY_CHK:
9749 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9750 break;
9751 case BUILT_IN_MEMMOVE_CHK:
9752 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9753 break;
9754 case BUILT_IN_MEMSET_CHK:
9755 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9756 break;
9757 default:
9758 break;
9759 }
9760
9761 if (! fn)
9762 return NULL_RTX;
9763
9764 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9765 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9766 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9767 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9768 }
9769 else if (fcode == BUILT_IN_MEMSET_CHK)
9770 return NULL_RTX;
9771 else
9772 {
9773 unsigned int dest_align = get_pointer_alignment (dest);
9774
9775 /* If DEST is not a pointer type, call the normal function. */
9776 if (dest_align == 0)
9777 return NULL_RTX;
9778
9779 /* If SRC and DEST are the same (and not volatile), do nothing. */
9780 if (operand_equal_p (src, dest, 0))
9781 {
9782 tree expr;
9783
9784 if (fcode != BUILT_IN_MEMPCPY_CHK)
9785 {
9786 /* Evaluate and ignore LEN in case it has side-effects. */
9787 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9788 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9789 }
9790
9791 expr = fold_build_pointer_plus (dest, len);
9792 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9793 }
9794
9795 /* __memmove_chk special case. */
9796 if (fcode == BUILT_IN_MEMMOVE_CHK)
9797 {
9798 unsigned int src_align = get_pointer_alignment (src);
9799
9800 if (src_align == 0)
9801 return NULL_RTX;
9802
9803 /* If src is categorized for a readonly section we can use
9804 normal __memcpy_chk. */
9805 if (readonly_data_expr (src))
9806 {
9807 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9808 if (!fn)
9809 return NULL_RTX;
9810 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9811 dest, src, len, size);
9812 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9813 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9814 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9815 }
9816 }
9817 return NULL_RTX;
9818 }
9819 }
9820
9821 /* Emit warning if a buffer overflow is detected at compile time. */
9822
9823 static void
9824 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9825 {
9826 /* The source string. */
9827 tree srcstr = NULL_TREE;
9828 /* The size of the destination object. */
9829 tree objsize = NULL_TREE;
9830 /* The string that is being concatenated with (as in __strcat_chk)
9831 or null if it isn't. */
9832 tree catstr = NULL_TREE;
9833 /* The maximum length of the source sequence in a bounded operation
9834 (such as __strncat_chk) or null if the operation isn't bounded
9835 (such as __strcat_chk). */
9836 tree maxlen = NULL_TREE;
9837
9838 switch (fcode)
9839 {
9840 case BUILT_IN_STRCPY_CHK:
9841 case BUILT_IN_STPCPY_CHK:
9842 srcstr = CALL_EXPR_ARG (exp, 1);
9843 objsize = CALL_EXPR_ARG (exp, 2);
9844 break;
9845
9846 case BUILT_IN_STRCAT_CHK:
9847 /* For __strcat_chk the warning will be emitted only if overflowing
9848 by at least strlen (dest) + 1 bytes. */
9849 catstr = CALL_EXPR_ARG (exp, 0);
9850 srcstr = CALL_EXPR_ARG (exp, 1);
9851 objsize = CALL_EXPR_ARG (exp, 2);
9852 break;
9853
9854 case BUILT_IN_STRNCAT_CHK:
9855 catstr = CALL_EXPR_ARG (exp, 0);
9856 srcstr = CALL_EXPR_ARG (exp, 1);
9857 maxlen = CALL_EXPR_ARG (exp, 2);
9858 objsize = CALL_EXPR_ARG (exp, 3);
9859 break;
9860
9861 case BUILT_IN_STRNCPY_CHK:
9862 case BUILT_IN_STPNCPY_CHK:
9863 srcstr = CALL_EXPR_ARG (exp, 1);
9864 maxlen = CALL_EXPR_ARG (exp, 2);
9865 objsize = CALL_EXPR_ARG (exp, 3);
9866 break;
9867
9868 case BUILT_IN_SNPRINTF_CHK:
9869 case BUILT_IN_VSNPRINTF_CHK:
9870 maxlen = CALL_EXPR_ARG (exp, 1);
9871 objsize = CALL_EXPR_ARG (exp, 3);
9872 break;
9873 default:
9874 gcc_unreachable ();
9875 }
9876
9877 if (catstr && maxlen)
9878 {
9879 /* Check __strncat_chk. There is no way to determine the length
9880 of the string to which the source string is being appended so
9881 just warn when the length of the source string is not known. */
9882 check_strncat_sizes (exp, objsize);
9883 return;
9884 }
9885
9886 check_sizes (OPT_Wstringop_overflow_, exp,
9887 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9888 }
9889
9890 /* Emit warning if a buffer overflow is detected at compile time
9891 in __sprintf_chk/__vsprintf_chk calls. */
9892
9893 static void
9894 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9895 {
9896 tree size, len, fmt;
9897 const char *fmt_str;
9898 int nargs = call_expr_nargs (exp);
9899
9900 /* Verify the required arguments in the original call. */
9901
9902 if (nargs < 4)
9903 return;
9904 size = CALL_EXPR_ARG (exp, 2);
9905 fmt = CALL_EXPR_ARG (exp, 3);
9906
9907 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9908 return;
9909
9910 /* Check whether the format is a literal string constant. */
9911 fmt_str = c_getstr (fmt);
9912 if (fmt_str == NULL)
9913 return;
9914
9915 if (!init_target_chars ())
9916 return;
9917
9918 /* If the format doesn't contain % args or %%, we know its size. */
9919 if (strchr (fmt_str, target_percent) == 0)
9920 len = build_int_cstu (size_type_node, strlen (fmt_str));
9921 /* If the format is "%s" and first ... argument is a string literal,
9922 we know it too. */
9923 else if (fcode == BUILT_IN_SPRINTF_CHK
9924 && strcmp (fmt_str, target_percent_s) == 0)
9925 {
9926 tree arg;
9927
9928 if (nargs < 5)
9929 return;
9930 arg = CALL_EXPR_ARG (exp, 4);
9931 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9932 return;
9933
9934 len = c_strlen (arg, 1);
9935 if (!len || ! tree_fits_uhwi_p (len))
9936 return;
9937 }
9938 else
9939 return;
9940
9941 /* Add one for the terminating nul. */
9942 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9943 check_sizes (OPT_Wstringop_overflow_,
9944 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9945 }
9946
9947 /* Emit warning if a free is called with address of a variable. */
9948
9949 static void
9950 maybe_emit_free_warning (tree exp)
9951 {
9952 tree arg = CALL_EXPR_ARG (exp, 0);
9953
9954 STRIP_NOPS (arg);
9955 if (TREE_CODE (arg) != ADDR_EXPR)
9956 return;
9957
9958 arg = get_base_address (TREE_OPERAND (arg, 0));
9959 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9960 return;
9961
9962 if (SSA_VAR_P (arg))
9963 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9964 "%Kattempt to free a non-heap object %qD", exp, arg);
9965 else
9966 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9967 "%Kattempt to free a non-heap object", exp);
9968 }
9969
9970 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9971 if possible. */
9972
9973 static tree
9974 fold_builtin_object_size (tree ptr, tree ost)
9975 {
9976 unsigned HOST_WIDE_INT bytes;
9977 int object_size_type;
9978
9979 if (!validate_arg (ptr, POINTER_TYPE)
9980 || !validate_arg (ost, INTEGER_TYPE))
9981 return NULL_TREE;
9982
9983 STRIP_NOPS (ost);
9984
9985 if (TREE_CODE (ost) != INTEGER_CST
9986 || tree_int_cst_sgn (ost) < 0
9987 || compare_tree_int (ost, 3) > 0)
9988 return NULL_TREE;
9989
9990 object_size_type = tree_to_shwi (ost);
9991
9992 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9993 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9994 and (size_t) 0 for types 2 and 3. */
9995 if (TREE_SIDE_EFFECTS (ptr))
9996 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9997
9998 if (TREE_CODE (ptr) == ADDR_EXPR)
9999 {
10000 compute_builtin_object_size (ptr, object_size_type, &bytes);
10001 if (wi::fits_to_tree_p (bytes, size_type_node))
10002 return build_int_cstu (size_type_node, bytes);
10003 }
10004 else if (TREE_CODE (ptr) == SSA_NAME)
10005 {
10006 /* If object size is not known yet, delay folding until
10007 later. Maybe subsequent passes will help determining
10008 it. */
10009 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10010 && wi::fits_to_tree_p (bytes, size_type_node))
10011 return build_int_cstu (size_type_node, bytes);
10012 }
10013
10014 return NULL_TREE;
10015 }
10016
10017 /* Builtins with folding operations that operate on "..." arguments
10018 need special handling; we need to store the arguments in a convenient
10019 data structure before attempting any folding. Fortunately there are
10020 only a few builtins that fall into this category. FNDECL is the
10021 function, EXP is the CALL_EXPR for the call. */
10022
10023 static tree
10024 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10025 {
10026 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10027 tree ret = NULL_TREE;
10028
10029 switch (fcode)
10030 {
10031 case BUILT_IN_FPCLASSIFY:
10032 ret = fold_builtin_fpclassify (loc, args, nargs);
10033 break;
10034
10035 default:
10036 break;
10037 }
10038 if (ret)
10039 {
10040 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10041 SET_EXPR_LOCATION (ret, loc);
10042 TREE_NO_WARNING (ret) = 1;
10043 return ret;
10044 }
10045 return NULL_TREE;
10046 }
10047
10048 /* Initialize format string characters in the target charset. */
10049
10050 bool
10051 init_target_chars (void)
10052 {
10053 static bool init;
10054 if (!init)
10055 {
10056 target_newline = lang_hooks.to_target_charset ('\n');
10057 target_percent = lang_hooks.to_target_charset ('%');
10058 target_c = lang_hooks.to_target_charset ('c');
10059 target_s = lang_hooks.to_target_charset ('s');
10060 if (target_newline == 0 || target_percent == 0 || target_c == 0
10061 || target_s == 0)
10062 return false;
10063
10064 target_percent_c[0] = target_percent;
10065 target_percent_c[1] = target_c;
10066 target_percent_c[2] = '\0';
10067
10068 target_percent_s[0] = target_percent;
10069 target_percent_s[1] = target_s;
10070 target_percent_s[2] = '\0';
10071
10072 target_percent_s_newline[0] = target_percent;
10073 target_percent_s_newline[1] = target_s;
10074 target_percent_s_newline[2] = target_newline;
10075 target_percent_s_newline[3] = '\0';
10076
10077 init = true;
10078 }
10079 return true;
10080 }
10081
10082 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10083 and no overflow/underflow occurred. INEXACT is true if M was not
10084 exactly calculated. TYPE is the tree type for the result. This
10085 function assumes that you cleared the MPFR flags and then
10086 calculated M to see if anything subsequently set a flag prior to
10087 entering this function. Return NULL_TREE if any checks fail. */
10088
10089 static tree
10090 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10091 {
10092 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10093 overflow/underflow occurred. If -frounding-math, proceed iff the
10094 result of calling FUNC was exact. */
10095 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10096 && (!flag_rounding_math || !inexact))
10097 {
10098 REAL_VALUE_TYPE rr;
10099
10100 real_from_mpfr (&rr, m, type, GMP_RNDN);
10101 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10102 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10103 but the mpft_t is not, then we underflowed in the
10104 conversion. */
10105 if (real_isfinite (&rr)
10106 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10107 {
10108 REAL_VALUE_TYPE rmode;
10109
10110 real_convert (&rmode, TYPE_MODE (type), &rr);
10111 /* Proceed iff the specified mode can hold the value. */
10112 if (real_identical (&rmode, &rr))
10113 return build_real (type, rmode);
10114 }
10115 }
10116 return NULL_TREE;
10117 }
10118
10119 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10120 number and no overflow/underflow occurred. INEXACT is true if M
10121 was not exactly calculated. TYPE is the tree type for the result.
10122 This function assumes that you cleared the MPFR flags and then
10123 calculated M to see if anything subsequently set a flag prior to
10124 entering this function. Return NULL_TREE if any checks fail, if
10125 FORCE_CONVERT is true, then bypass the checks. */
10126
10127 static tree
10128 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10129 {
10130 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10131 overflow/underflow occurred. If -frounding-math, proceed iff the
10132 result of calling FUNC was exact. */
10133 if (force_convert
10134 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10135 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10136 && (!flag_rounding_math || !inexact)))
10137 {
10138 REAL_VALUE_TYPE re, im;
10139
10140 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10141 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10142 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10143 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10144 but the mpft_t is not, then we underflowed in the
10145 conversion. */
10146 if (force_convert
10147 || (real_isfinite (&re) && real_isfinite (&im)
10148 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10149 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10150 {
10151 REAL_VALUE_TYPE re_mode, im_mode;
10152
10153 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10154 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10155 /* Proceed iff the specified mode can hold the value. */
10156 if (force_convert
10157 || (real_identical (&re_mode, &re)
10158 && real_identical (&im_mode, &im)))
10159 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10160 build_real (TREE_TYPE (type), im_mode));
10161 }
10162 }
10163 return NULL_TREE;
10164 }
10165
10166 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10167 the pointer *(ARG_QUO) and return the result. The type is taken
10168 from the type of ARG0 and is used for setting the precision of the
10169 calculation and results. */
10170
10171 static tree
10172 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10173 {
10174 tree const type = TREE_TYPE (arg0);
10175 tree result = NULL_TREE;
10176
10177 STRIP_NOPS (arg0);
10178 STRIP_NOPS (arg1);
10179
10180 /* To proceed, MPFR must exactly represent the target floating point
10181 format, which only happens when the target base equals two. */
10182 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10183 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10184 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10185 {
10186 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10187 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10188
10189 if (real_isfinite (ra0) && real_isfinite (ra1))
10190 {
10191 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10192 const int prec = fmt->p;
10193 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10194 tree result_rem;
10195 long integer_quo;
10196 mpfr_t m0, m1;
10197
10198 mpfr_inits2 (prec, m0, m1, NULL);
10199 mpfr_from_real (m0, ra0, GMP_RNDN);
10200 mpfr_from_real (m1, ra1, GMP_RNDN);
10201 mpfr_clear_flags ();
10202 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10203 /* Remquo is independent of the rounding mode, so pass
10204 inexact=0 to do_mpfr_ckconv(). */
10205 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10206 mpfr_clears (m0, m1, NULL);
10207 if (result_rem)
10208 {
10209 /* MPFR calculates quo in the host's long so it may
10210 return more bits in quo than the target int can hold
10211 if sizeof(host long) > sizeof(target int). This can
10212 happen even for native compilers in LP64 mode. In
10213 these cases, modulo the quo value with the largest
10214 number that the target int can hold while leaving one
10215 bit for the sign. */
10216 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10217 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10218
10219 /* Dereference the quo pointer argument. */
10220 arg_quo = build_fold_indirect_ref (arg_quo);
10221 /* Proceed iff a valid pointer type was passed in. */
10222 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10223 {
10224 /* Set the value. */
10225 tree result_quo
10226 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10227 build_int_cst (TREE_TYPE (arg_quo),
10228 integer_quo));
10229 TREE_SIDE_EFFECTS (result_quo) = 1;
10230 /* Combine the quo assignment with the rem. */
10231 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10232 result_quo, result_rem));
10233 }
10234 }
10235 }
10236 }
10237 return result;
10238 }
10239
10240 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10241 resulting value as a tree with type TYPE. The mpfr precision is
10242 set to the precision of TYPE. We assume that this mpfr function
10243 returns zero if the result could be calculated exactly within the
10244 requested precision. In addition, the integer pointer represented
10245 by ARG_SG will be dereferenced and set to the appropriate signgam
10246 (-1,1) value. */
10247
10248 static tree
10249 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10250 {
10251 tree result = NULL_TREE;
10252
10253 STRIP_NOPS (arg);
10254
10255 /* To proceed, MPFR must exactly represent the target floating point
10256 format, which only happens when the target base equals two. Also
10257 verify ARG is a constant and that ARG_SG is an int pointer. */
10258 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10259 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10260 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10261 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10262 {
10263 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10264
10265 /* In addition to NaN and Inf, the argument cannot be zero or a
10266 negative integer. */
10267 if (real_isfinite (ra)
10268 && ra->cl != rvc_zero
10269 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10270 {
10271 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10272 const int prec = fmt->p;
10273 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10274 int inexact, sg;
10275 mpfr_t m;
10276 tree result_lg;
10277
10278 mpfr_init2 (m, prec);
10279 mpfr_from_real (m, ra, GMP_RNDN);
10280 mpfr_clear_flags ();
10281 inexact = mpfr_lgamma (m, &sg, m, rnd);
10282 result_lg = do_mpfr_ckconv (m, type, inexact);
10283 mpfr_clear (m);
10284 if (result_lg)
10285 {
10286 tree result_sg;
10287
10288 /* Dereference the arg_sg pointer argument. */
10289 arg_sg = build_fold_indirect_ref (arg_sg);
10290 /* Assign the signgam value into *arg_sg. */
10291 result_sg = fold_build2 (MODIFY_EXPR,
10292 TREE_TYPE (arg_sg), arg_sg,
10293 build_int_cst (TREE_TYPE (arg_sg), sg));
10294 TREE_SIDE_EFFECTS (result_sg) = 1;
10295 /* Combine the signgam assignment with the lgamma result. */
10296 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10297 result_sg, result_lg));
10298 }
10299 }
10300 }
10301
10302 return result;
10303 }
10304
10305 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10306 mpc function FUNC on it and return the resulting value as a tree
10307 with type TYPE. The mpfr precision is set to the precision of
10308 TYPE. We assume that function FUNC returns zero if the result
10309 could be calculated exactly within the requested precision. If
10310 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10311 in the arguments and/or results. */
10312
10313 tree
10314 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10315 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10316 {
10317 tree result = NULL_TREE;
10318
10319 STRIP_NOPS (arg0);
10320 STRIP_NOPS (arg1);
10321
10322 /* To proceed, MPFR must exactly represent the target floating point
10323 format, which only happens when the target base equals two. */
10324 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10325 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10326 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10328 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10329 {
10330 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10331 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10332 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10333 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10334
10335 if (do_nonfinite
10336 || (real_isfinite (re0) && real_isfinite (im0)
10337 && real_isfinite (re1) && real_isfinite (im1)))
10338 {
10339 const struct real_format *const fmt =
10340 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10341 const int prec = fmt->p;
10342 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10343 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10344 int inexact;
10345 mpc_t m0, m1;
10346
10347 mpc_init2 (m0, prec);
10348 mpc_init2 (m1, prec);
10349 mpfr_from_real (mpc_realref (m0), re0, rnd);
10350 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10351 mpfr_from_real (mpc_realref (m1), re1, rnd);
10352 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10353 mpfr_clear_flags ();
10354 inexact = func (m0, m0, m1, crnd);
10355 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10356 mpc_clear (m0);
10357 mpc_clear (m1);
10358 }
10359 }
10360
10361 return result;
10362 }
10363
10364 /* A wrapper function for builtin folding that prevents warnings for
10365 "statement without effect" and the like, caused by removing the
10366 call node earlier than the warning is generated. */
10367
10368 tree
10369 fold_call_stmt (gcall *stmt, bool ignore)
10370 {
10371 tree ret = NULL_TREE;
10372 tree fndecl = gimple_call_fndecl (stmt);
10373 location_t loc = gimple_location (stmt);
10374 if (fndecl
10375 && TREE_CODE (fndecl) == FUNCTION_DECL
10376 && DECL_BUILT_IN (fndecl)
10377 && !gimple_call_va_arg_pack_p (stmt))
10378 {
10379 int nargs = gimple_call_num_args (stmt);
10380 tree *args = (nargs > 0
10381 ? gimple_call_arg_ptr (stmt, 0)
10382 : &error_mark_node);
10383
10384 if (avoid_folding_inline_builtin (fndecl))
10385 return NULL_TREE;
10386 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10387 {
10388 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10389 }
10390 else
10391 {
10392 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10393 if (ret)
10394 {
10395 /* Propagate location information from original call to
10396 expansion of builtin. Otherwise things like
10397 maybe_emit_chk_warning, that operate on the expansion
10398 of a builtin, will use the wrong location information. */
10399 if (gimple_has_location (stmt))
10400 {
10401 tree realret = ret;
10402 if (TREE_CODE (ret) == NOP_EXPR)
10403 realret = TREE_OPERAND (ret, 0);
10404 if (CAN_HAVE_LOCATION_P (realret)
10405 && !EXPR_HAS_LOCATION (realret))
10406 SET_EXPR_LOCATION (realret, loc);
10407 return realret;
10408 }
10409 return ret;
10410 }
10411 }
10412 }
10413 return NULL_TREE;
10414 }
10415
10416 /* Look up the function in builtin_decl that corresponds to DECL
10417 and set ASMSPEC as its user assembler name. DECL must be a
10418 function decl that declares a builtin. */
10419
10420 void
10421 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10422 {
10423 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10424 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10425 && asmspec != 0);
10426
10427 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10428 set_user_assembler_name (builtin, asmspec);
10429
10430 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10431 && INT_TYPE_SIZE < BITS_PER_WORD)
10432 {
10433 set_user_assembler_libfunc ("ffs", asmspec);
10434 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10435 "ffs");
10436 }
10437 }
10438
10439 /* Return true if DECL is a builtin that expands to a constant or similarly
10440 simple code. */
10441 bool
10442 is_simple_builtin (tree decl)
10443 {
10444 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10445 switch (DECL_FUNCTION_CODE (decl))
10446 {
10447 /* Builtins that expand to constants. */
10448 case BUILT_IN_CONSTANT_P:
10449 case BUILT_IN_EXPECT:
10450 case BUILT_IN_OBJECT_SIZE:
10451 case BUILT_IN_UNREACHABLE:
10452 /* Simple register moves or loads from stack. */
10453 case BUILT_IN_ASSUME_ALIGNED:
10454 case BUILT_IN_RETURN_ADDRESS:
10455 case BUILT_IN_EXTRACT_RETURN_ADDR:
10456 case BUILT_IN_FROB_RETURN_ADDR:
10457 case BUILT_IN_RETURN:
10458 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10459 case BUILT_IN_FRAME_ADDRESS:
10460 case BUILT_IN_VA_END:
10461 case BUILT_IN_STACK_SAVE:
10462 case BUILT_IN_STACK_RESTORE:
10463 /* Exception state returns or moves registers around. */
10464 case BUILT_IN_EH_FILTER:
10465 case BUILT_IN_EH_POINTER:
10466 case BUILT_IN_EH_COPY_VALUES:
10467 return true;
10468
10469 default:
10470 return false;
10471 }
10472
10473 return false;
10474 }
10475
10476 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10477 most probably expanded inline into reasonably simple code. This is a
10478 superset of is_simple_builtin. */
10479 bool
10480 is_inexpensive_builtin (tree decl)
10481 {
10482 if (!decl)
10483 return false;
10484 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10485 return true;
10486 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10487 switch (DECL_FUNCTION_CODE (decl))
10488 {
10489 case BUILT_IN_ABS:
10490 case BUILT_IN_ALLOCA:
10491 case BUILT_IN_ALLOCA_WITH_ALIGN:
10492 case BUILT_IN_BSWAP16:
10493 case BUILT_IN_BSWAP32:
10494 case BUILT_IN_BSWAP64:
10495 case BUILT_IN_CLZ:
10496 case BUILT_IN_CLZIMAX:
10497 case BUILT_IN_CLZL:
10498 case BUILT_IN_CLZLL:
10499 case BUILT_IN_CTZ:
10500 case BUILT_IN_CTZIMAX:
10501 case BUILT_IN_CTZL:
10502 case BUILT_IN_CTZLL:
10503 case BUILT_IN_FFS:
10504 case BUILT_IN_FFSIMAX:
10505 case BUILT_IN_FFSL:
10506 case BUILT_IN_FFSLL:
10507 case BUILT_IN_IMAXABS:
10508 case BUILT_IN_FINITE:
10509 case BUILT_IN_FINITEF:
10510 case BUILT_IN_FINITEL:
10511 case BUILT_IN_FINITED32:
10512 case BUILT_IN_FINITED64:
10513 case BUILT_IN_FINITED128:
10514 case BUILT_IN_FPCLASSIFY:
10515 case BUILT_IN_ISFINITE:
10516 case BUILT_IN_ISINF_SIGN:
10517 case BUILT_IN_ISINF:
10518 case BUILT_IN_ISINFF:
10519 case BUILT_IN_ISINFL:
10520 case BUILT_IN_ISINFD32:
10521 case BUILT_IN_ISINFD64:
10522 case BUILT_IN_ISINFD128:
10523 case BUILT_IN_ISNAN:
10524 case BUILT_IN_ISNANF:
10525 case BUILT_IN_ISNANL:
10526 case BUILT_IN_ISNAND32:
10527 case BUILT_IN_ISNAND64:
10528 case BUILT_IN_ISNAND128:
10529 case BUILT_IN_ISNORMAL:
10530 case BUILT_IN_ISGREATER:
10531 case BUILT_IN_ISGREATEREQUAL:
10532 case BUILT_IN_ISLESS:
10533 case BUILT_IN_ISLESSEQUAL:
10534 case BUILT_IN_ISLESSGREATER:
10535 case BUILT_IN_ISUNORDERED:
10536 case BUILT_IN_VA_ARG_PACK:
10537 case BUILT_IN_VA_ARG_PACK_LEN:
10538 case BUILT_IN_VA_COPY:
10539 case BUILT_IN_TRAP:
10540 case BUILT_IN_SAVEREGS:
10541 case BUILT_IN_POPCOUNTL:
10542 case BUILT_IN_POPCOUNTLL:
10543 case BUILT_IN_POPCOUNTIMAX:
10544 case BUILT_IN_POPCOUNT:
10545 case BUILT_IN_PARITYL:
10546 case BUILT_IN_PARITYLL:
10547 case BUILT_IN_PARITYIMAX:
10548 case BUILT_IN_PARITY:
10549 case BUILT_IN_LABS:
10550 case BUILT_IN_LLABS:
10551 case BUILT_IN_PREFETCH:
10552 case BUILT_IN_ACC_ON_DEVICE:
10553 return true;
10554
10555 default:
10556 return is_simple_builtin (decl);
10557 }
10558
10559 return false;
10560 }
10561
10562 /* Return true if T is a constant and the value cast to a target char
10563 can be represented by a host char.
10564 Store the casted char constant in *P if so. */
10565
10566 bool
10567 target_char_cst_p (tree t, char *p)
10568 {
10569 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10570 return false;
10571
10572 *p = (char)tree_to_uhwi (t);
10573 return true;
10574 }