]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Don't call targetm.calls.static_chain in non-static function
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
78
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
82
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
85 {
86 #include "builtins.def"
87 };
88
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
92
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
95
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
175
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194
195 /* Return true if NAME starts with __builtin_ or __sync_. */
196
197 static bool
198 is_builtin_name (const char *name)
199 {
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
207 }
208
209
210 /* Return true if DECL is a function symbol representing a built-in. */
211
212 bool
213 is_builtin_fn (tree decl)
214 {
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 }
217
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
221
222 bool
223 called_as_built_in (tree node)
224 {
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
230 }
231
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
236
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
243
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
246
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 {
251 HOST_WIDE_INT bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
257
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep);
262
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
266 {
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
273 }
274 else if (TREE_CODE (exp) == LABEL_DECL)
275 ;
276 else if (TREE_CODE (exp) == CONST_DECL)
277 {
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = targetm.constant_alignment (exp, align);
283
284 known_alignment = true;
285 }
286 else if (DECL_P (exp))
287 {
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
290 }
291 else if (TREE_CODE (exp) == INDIRECT_REF
292 || TREE_CODE (exp) == MEM_REF
293 || TREE_CODE (exp) == TARGET_MEM_REF)
294 {
295 tree addr = TREE_OPERAND (exp, 0);
296 unsigned ptr_align;
297 unsigned HOST_WIDE_INT ptr_bitpos;
298 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
299
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
303 {
304 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
305 ptr_bitmask *= BITS_PER_UNIT;
306 align = least_bit_hwi (ptr_bitmask);
307 addr = TREE_OPERAND (addr, 0);
308 }
309
310 known_alignment
311 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
312 align = MAX (ptr_align, align);
313
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos &= ptr_bitmask;
316
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp) == TARGET_MEM_REF)
320 {
321 if (TMR_INDEX (exp))
322 {
323 unsigned HOST_WIDE_INT step = 1;
324 if (TMR_STEP (exp))
325 step = TREE_INT_CST_LOW (TMR_STEP (exp));
326 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
327 }
328 if (TMR_INDEX2 (exp))
329 align = BITS_PER_UNIT;
330 known_alignment = false;
331 }
332
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
338 unsigned int talign;
339 if (!addr_p && !known_alignment
340 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
341 && talign > align)
342 align = talign;
343 else
344 {
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
350 }
351 }
352 else if (TREE_CODE (exp) == STRING_CST)
353 {
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = targetm.constant_alignment (exp, align);
359
360 known_alignment = true;
361 }
362
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
366 {
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
369 {
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
373 }
374 }
375
376 *alignp = align;
377 *bitposp = bitpos & (*alignp - 1);
378 return known_alignment;
379 }
380
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389 {
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392
393 /* Return the alignment in bits of EXP, an object. */
394
395 unsigned int
396 get_object_alignment (tree exp)
397 {
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
401 get_object_alignment_1 (exp, &align, &bitpos);
402
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
409 }
410
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
415
416 If EXP is not a pointer, false is returned too. */
417
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
421 {
422 STRIP_NOPS (exp);
423
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 {
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
506
507 return align;
508 }
509
510 /* Return the number of non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513
514 static unsigned
515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 {
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518
519 unsigned n;
520
521 if (eltsize == 1)
522 {
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
525 {
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
529 }
530 }
531 else
532 {
533 for (n = 0; n < maxelts; n++)
534 {
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
538 }
539 }
540 return n;
541 }
542
543 /* Compute the length of a null-terminated character string or wide
544 character string handling character sizes of 1, 2, and 4 bytes.
545 TREE_STRING_LENGTH is not the right way because it evaluates to
546 the size of the character array in bytes (as opposed to characters)
547 and because it can contain a zero byte in the middle.
548
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
555
556 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
557 accesses. Note that this implies the result is not going to be emitted
558 into the instruction stream.
559
560 The value returned is of type `ssizetype'.
561
562 Unfortunately, string_constant can't access the values of const char
563 arrays with initializers, so neither can we do so here. */
564
565 tree
566 c_strlen (tree src, int only_value)
567 {
568 STRIP_NOPS (src);
569 if (TREE_CODE (src) == COND_EXPR
570 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
571 {
572 tree len1, len2;
573
574 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
575 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
576 if (tree_int_cst_equal (len1, len2))
577 return len1;
578 }
579
580 if (TREE_CODE (src) == COMPOUND_EXPR
581 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 return c_strlen (TREE_OPERAND (src, 1), only_value);
583
584 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
585
586 /* Offset from the beginning of the string in bytes. */
587 tree byteoff;
588 src = string_constant (src, &byteoff);
589 if (src == 0)
590 return NULL_TREE;
591
592 /* Determine the size of the string element. */
593 unsigned eltsize
594 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
595
596 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
597 length of SRC. */
598 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
599
600 /* PTR can point to the byte representation of any string type, including
601 char* and wchar_t*. */
602 const char *ptr = TREE_STRING_POINTER (src);
603
604 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
605 {
606 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
607 compute the offset to the following null if we don't know where to
608 start searching for it. */
609 if (string_length (ptr, eltsize, maxelts) < maxelts)
610 {
611 /* Return when an embedded null character is found. */
612 return NULL_TREE;
613 }
614
615 /* We don't know the starting offset, but we do know that the string
616 has no internal zero bytes. We can assume that the offset falls
617 within the bounds of the string; otherwise, the programmer deserves
618 what he gets. Subtract the offset from the length of the string,
619 and return that. This would perhaps not be valid if we were dealing
620 with named arrays in addition to literal string constants. */
621
622 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
623 }
624
625 /* Offset from the beginning of the string in elements. */
626 HOST_WIDE_INT eltoff;
627
628 /* We have a known offset into the string. Start searching there for
629 a null character if we can represent it as a single HOST_WIDE_INT. */
630 if (byteoff == 0)
631 eltoff = 0;
632 else if (! tree_fits_shwi_p (byteoff))
633 eltoff = -1;
634 else
635 eltoff = tree_to_shwi (byteoff) / eltsize;
636
637 /* If the offset is known to be out of bounds, warn, and call strlen at
638 runtime. */
639 if (eltoff < 0 || eltoff > maxelts)
640 {
641 /* Suppress multiple warnings for propagated constant strings. */
642 if (only_value != 2
643 && !TREE_NO_WARNING (src))
644 {
645 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
646 eltoff);
647 TREE_NO_WARNING (src) = 1;
648 }
649 return NULL_TREE;
650 }
651
652 /* Use strlen to search for the first zero byte. Since any strings
653 constructed with build_string will have nulls appended, we win even
654 if we get handed something like (char[4])"abcd".
655
656 Since ELTOFF is our starting index into the string, no further
657 calculation is needed. */
658 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
659 maxelts - eltoff);
660
661 return ssize_int (len);
662 }
663
664 /* Return a constant integer corresponding to target reading
665 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
666
667 static rtx
668 c_readstr (const char *str, scalar_int_mode mode)
669 {
670 HOST_WIDE_INT ch;
671 unsigned int i, j;
672 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
673
674 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
675 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
676 / HOST_BITS_PER_WIDE_INT;
677
678 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
679 for (i = 0; i < len; i++)
680 tmp[i] = 0;
681
682 ch = 1;
683 for (i = 0; i < GET_MODE_SIZE (mode); i++)
684 {
685 j = i;
686 if (WORDS_BIG_ENDIAN)
687 j = GET_MODE_SIZE (mode) - i - 1;
688 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
689 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
690 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
691 j *= BITS_PER_UNIT;
692
693 if (ch)
694 ch = (unsigned char) str[i];
695 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
696 }
697
698 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
699 return immed_wide_int_const (c, mode);
700 }
701
702 /* Cast a target constant CST to target CHAR and if that value fits into
703 host char type, return zero and put that value into variable pointed to by
704 P. */
705
706 static int
707 target_char_cast (tree cst, char *p)
708 {
709 unsigned HOST_WIDE_INT val, hostval;
710
711 if (TREE_CODE (cst) != INTEGER_CST
712 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
713 return 1;
714
715 /* Do not care if it fits or not right here. */
716 val = TREE_INT_CST_LOW (cst);
717
718 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
719 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
720
721 hostval = val;
722 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
723 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
724
725 if (val != hostval)
726 return 1;
727
728 *p = hostval;
729 return 0;
730 }
731
732 /* Similar to save_expr, but assumes that arbitrary code is not executed
733 in between the multiple evaluations. In particular, we assume that a
734 non-addressable local variable will not be modified. */
735
736 static tree
737 builtin_save_expr (tree exp)
738 {
739 if (TREE_CODE (exp) == SSA_NAME
740 || (TREE_ADDRESSABLE (exp) == 0
741 && (TREE_CODE (exp) == PARM_DECL
742 || (VAR_P (exp) && !TREE_STATIC (exp)))))
743 return exp;
744
745 return save_expr (exp);
746 }
747
748 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
749 times to get the address of either a higher stack frame, or a return
750 address located within it (depending on FNDECL_CODE). */
751
752 static rtx
753 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
754 {
755 int i;
756 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
757 if (tem == NULL_RTX)
758 {
759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
763
764 For a nonzero count, or a zero count with __builtin_frame_address,
765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
767 we must disable frame pointer elimination. */
768 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
769 tem = frame_pointer_rtx;
770 else
771 {
772 tem = hard_frame_pointer_rtx;
773
774 /* Tell reload not to eliminate the frame pointer. */
775 crtl->accesses_prior_frames = 1;
776 }
777 }
778
779 if (count > 0)
780 SETUP_FRAME_ADDRESSES ();
781
782 /* On the SPARC, the return address is not in the frame, it is in a
783 register. There is no way to access it off of the current frame
784 pointer, but it can be accessed off the previous frame pointer by
785 reading the value from the register window save area. */
786 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
787 count--;
788
789 /* Scan back COUNT frames to the specified frame. */
790 for (i = 0; i < count; i++)
791 {
792 /* Assume the dynamic chain pointer is in the word that the
793 frame address points to, unless otherwise specified. */
794 tem = DYNAMIC_CHAIN_ADDRESS (tem);
795 tem = memory_address (Pmode, tem);
796 tem = gen_frame_mem (Pmode, tem);
797 tem = copy_to_reg (tem);
798 }
799
800 /* For __builtin_frame_address, return what we've got. But, on
801 the SPARC for example, we may have to add a bias. */
802 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
803 return FRAME_ADDR_RTX (tem);
804
805 /* For __builtin_return_address, get the return address from that frame. */
806 #ifdef RETURN_ADDR_RTX
807 tem = RETURN_ADDR_RTX (count, tem);
808 #else
809 tem = memory_address (Pmode,
810 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
811 tem = gen_frame_mem (Pmode, tem);
812 #endif
813 return tem;
814 }
815
816 /* Alias set used for setjmp buffer. */
817 static alias_set_type setjmp_alias_set = -1;
818
819 /* Construct the leading half of a __builtin_setjmp call. Control will
820 return to RECEIVER_LABEL. This is also called directly by the SJLJ
821 exception handling code. */
822
823 void
824 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
825 {
826 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
827 rtx stack_save;
828 rtx mem;
829
830 if (setjmp_alias_set == -1)
831 setjmp_alias_set = new_alias_set ();
832
833 buf_addr = convert_memory_address (Pmode, buf_addr);
834
835 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
836
837 /* We store the frame pointer and the address of receiver_label in
838 the buffer and use the rest of it for the stack save area, which
839 is machine-dependent. */
840
841 mem = gen_rtx_MEM (Pmode, buf_addr);
842 set_mem_alias_set (mem, setjmp_alias_set);
843 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
844
845 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
846 GET_MODE_SIZE (Pmode))),
847 set_mem_alias_set (mem, setjmp_alias_set);
848
849 emit_move_insn (validize_mem (mem),
850 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851
852 stack_save = gen_rtx_MEM (sa_mode,
853 plus_constant (Pmode, buf_addr,
854 2 * GET_MODE_SIZE (Pmode)));
855 set_mem_alias_set (stack_save, setjmp_alias_set);
856 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857
858 /* If there is further processing to do, do it. */
859 if (targetm.have_builtin_setjmp_setup ())
860 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
861
862 /* We have a nonlocal label. */
863 cfun->has_nonlocal_label = 1;
864 }
865
866 /* Construct the trailing part of a __builtin_setjmp call. This is
867 also called directly by the SJLJ exception handling code.
868 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
869
870 void
871 expand_builtin_setjmp_receiver (rtx receiver_label)
872 {
873 rtx chain;
874
875 /* Mark the FP as used when we get here, so we have to make sure it's
876 marked as used by this function. */
877 emit_use (hard_frame_pointer_rtx);
878
879 /* Mark the static chain as clobbered here so life information
880 doesn't get messed up for it. */
881 chain = rtx_for_static_chain (current_function_decl, true);
882 if (chain && REG_P (chain))
883 emit_clobber (chain);
884
885 /* Now put in the code to restore the frame pointer, and argument
886 pointer, if needed. */
887 if (! targetm.have_nonlocal_goto ())
888 {
889 /* First adjust our frame pointer to its actual value. It was
890 previously set to the start of the virtual area corresponding to
891 the stacked variables when we branched here and now needs to be
892 adjusted to the actual hardware fp value.
893
894 Assignments to virtual registers are converted by
895 instantiate_virtual_regs into the corresponding assignment
896 to the underlying register (fp in this case) that makes
897 the original assignment true.
898 So the following insn will actually be decrementing fp by
899 TARGET_STARTING_FRAME_OFFSET. */
900 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
901
902 /* Restoring the frame pointer also modifies the hard frame pointer.
903 Mark it used (so that the previous assignment remains live once
904 the frame pointer is eliminated) and clobbered (to represent the
905 implicit update from the assignment). */
906 emit_use (hard_frame_pointer_rtx);
907 emit_clobber (hard_frame_pointer_rtx);
908 }
909
910 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
911 {
912 /* If the argument pointer can be eliminated in favor of the
913 frame pointer, we don't need to restore it. We assume here
914 that if such an elimination is present, it can always be used.
915 This is the case on all known machines; if we don't make this
916 assumption, we do unnecessary saving on many machines. */
917 size_t i;
918 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
919
920 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
921 if (elim_regs[i].from == ARG_POINTER_REGNUM
922 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
923 break;
924
925 if (i == ARRAY_SIZE (elim_regs))
926 {
927 /* Now restore our arg pointer from the address at which it
928 was saved in our stack frame. */
929 emit_move_insn (crtl->args.internal_arg_pointer,
930 copy_to_reg (get_arg_pointer_save_area ()));
931 }
932 }
933
934 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
935 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
936 else if (targetm.have_nonlocal_goto_receiver ())
937 emit_insn (targetm.gen_nonlocal_goto_receiver ());
938 else
939 { /* Nothing */ }
940
941 /* We must not allow the code we just generated to be reordered by
942 scheduling. Specifically, the update of the frame pointer must
943 happen immediately, not later. */
944 emit_insn (gen_blockage ());
945 }
946
947 /* __builtin_longjmp is passed a pointer to an array of five words (not
948 all will be used on all machines). It operates similarly to the C
949 library function of the same name, but is more efficient. Much of
950 the code below is copied from the handling of non-local gotos. */
951
952 static void
953 expand_builtin_longjmp (rtx buf_addr, rtx value)
954 {
955 rtx fp, lab, stack;
956 rtx_insn *insn, *last;
957 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
958
959 /* DRAP is needed for stack realign if longjmp is expanded to current
960 function */
961 if (SUPPORTS_STACK_ALIGNMENT)
962 crtl->need_drap = true;
963
964 if (setjmp_alias_set == -1)
965 setjmp_alias_set = new_alias_set ();
966
967 buf_addr = convert_memory_address (Pmode, buf_addr);
968
969 buf_addr = force_reg (Pmode, buf_addr);
970
971 /* We require that the user must pass a second argument of 1, because
972 that is what builtin_setjmp will return. */
973 gcc_assert (value == const1_rtx);
974
975 last = get_last_insn ();
976 if (targetm.have_builtin_longjmp ())
977 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
978 else
979 {
980 fp = gen_rtx_MEM (Pmode, buf_addr);
981 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
982 GET_MODE_SIZE (Pmode)));
983
984 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
985 2 * GET_MODE_SIZE (Pmode)));
986 set_mem_alias_set (fp, setjmp_alias_set);
987 set_mem_alias_set (lab, setjmp_alias_set);
988 set_mem_alias_set (stack, setjmp_alias_set);
989
990 /* Pick up FP, label, and SP from the block and jump. This code is
991 from expand_goto in stmt.c; see there for detailed comments. */
992 if (targetm.have_nonlocal_goto ())
993 /* We have to pass a value to the nonlocal_goto pattern that will
994 get copied into the static_chain pointer, but it does not matter
995 what that value is, because builtin_setjmp does not use it. */
996 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
997 else
998 {
999 lab = copy_to_reg (lab);
1000
1001 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1002 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1003
1004 emit_move_insn (hard_frame_pointer_rtx, fp);
1005 emit_stack_restore (SAVE_NONLOCAL, stack);
1006
1007 emit_use (hard_frame_pointer_rtx);
1008 emit_use (stack_pointer_rtx);
1009 emit_indirect_jump (lab);
1010 }
1011 }
1012
1013 /* Search backwards and mark the jump insn as a non-local goto.
1014 Note that this precludes the use of __builtin_longjmp to a
1015 __builtin_setjmp target in the same function. However, we've
1016 already cautioned the user that these functions are for
1017 internal exception handling use only. */
1018 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1019 {
1020 gcc_assert (insn != last);
1021
1022 if (JUMP_P (insn))
1023 {
1024 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1025 break;
1026 }
1027 else if (CALL_P (insn))
1028 break;
1029 }
1030 }
1031
1032 static inline bool
1033 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1034 {
1035 return (iter->i < iter->n);
1036 }
1037
1038 /* This function validates the types of a function call argument list
1039 against a specified list of tree_codes. If the last specifier is a 0,
1040 that represents an ellipsis, otherwise the last specifier must be a
1041 VOID_TYPE. */
1042
1043 static bool
1044 validate_arglist (const_tree callexpr, ...)
1045 {
1046 enum tree_code code;
1047 bool res = 0;
1048 va_list ap;
1049 const_call_expr_arg_iterator iter;
1050 const_tree arg;
1051
1052 va_start (ap, callexpr);
1053 init_const_call_expr_arg_iterator (callexpr, &iter);
1054
1055 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1056 tree fn = CALL_EXPR_FN (callexpr);
1057 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1058
1059 for (unsigned argno = 1; ; ++argno)
1060 {
1061 code = (enum tree_code) va_arg (ap, int);
1062
1063 switch (code)
1064 {
1065 case 0:
1066 /* This signifies an ellipses, any further arguments are all ok. */
1067 res = true;
1068 goto end;
1069 case VOID_TYPE:
1070 /* This signifies an endlink, if no arguments remain, return
1071 true, otherwise return false. */
1072 res = !more_const_call_expr_args_p (&iter);
1073 goto end;
1074 case POINTER_TYPE:
1075 /* The actual argument must be nonnull when either the whole
1076 called function has been declared nonnull, or when the formal
1077 argument corresponding to the actual argument has been. */
1078 if (argmap
1079 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1080 {
1081 arg = next_const_call_expr_arg (&iter);
1082 if (!validate_arg (arg, code) || integer_zerop (arg))
1083 goto end;
1084 break;
1085 }
1086 /* FALLTHRU */
1087 default:
1088 /* If no parameters remain or the parameter's code does not
1089 match the specified code, return false. Otherwise continue
1090 checking any remaining arguments. */
1091 arg = next_const_call_expr_arg (&iter);
1092 if (!validate_arg (arg, code))
1093 goto end;
1094 break;
1095 }
1096 }
1097
1098 /* We need gotos here since we can only have one VA_CLOSE in a
1099 function. */
1100 end: ;
1101 va_end (ap);
1102
1103 BITMAP_FREE (argmap);
1104
1105 return res;
1106 }
1107
1108 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1109 and the address of the save area. */
1110
1111 static rtx
1112 expand_builtin_nonlocal_goto (tree exp)
1113 {
1114 tree t_label, t_save_area;
1115 rtx r_label, r_save_area, r_fp, r_sp;
1116 rtx_insn *insn;
1117
1118 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1119 return NULL_RTX;
1120
1121 t_label = CALL_EXPR_ARG (exp, 0);
1122 t_save_area = CALL_EXPR_ARG (exp, 1);
1123
1124 r_label = expand_normal (t_label);
1125 r_label = convert_memory_address (Pmode, r_label);
1126 r_save_area = expand_normal (t_save_area);
1127 r_save_area = convert_memory_address (Pmode, r_save_area);
1128 /* Copy the address of the save location to a register just in case it was
1129 based on the frame pointer. */
1130 r_save_area = copy_to_reg (r_save_area);
1131 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1132 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1133 plus_constant (Pmode, r_save_area,
1134 GET_MODE_SIZE (Pmode)));
1135
1136 crtl->has_nonlocal_goto = 1;
1137
1138 /* ??? We no longer need to pass the static chain value, afaik. */
1139 if (targetm.have_nonlocal_goto ())
1140 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1141 else
1142 {
1143 r_label = copy_to_reg (r_label);
1144
1145 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1146 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1147
1148 /* Restore frame pointer for containing function. */
1149 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1150 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1151
1152 /* USE of hard_frame_pointer_rtx added for consistency;
1153 not clear if really needed. */
1154 emit_use (hard_frame_pointer_rtx);
1155 emit_use (stack_pointer_rtx);
1156
1157 /* If the architecture is using a GP register, we must
1158 conservatively assume that the target function makes use of it.
1159 The prologue of functions with nonlocal gotos must therefore
1160 initialize the GP register to the appropriate value, and we
1161 must then make sure that this value is live at the point
1162 of the jump. (Note that this doesn't necessarily apply
1163 to targets with a nonlocal_goto pattern; they are free
1164 to implement it in their own way. Note also that this is
1165 a no-op if the GP register is a global invariant.) */
1166 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1167 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1168 emit_use (pic_offset_table_rtx);
1169
1170 emit_indirect_jump (r_label);
1171 }
1172
1173 /* Search backwards to the jump insn and mark it as a
1174 non-local goto. */
1175 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1176 {
1177 if (JUMP_P (insn))
1178 {
1179 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1180 break;
1181 }
1182 else if (CALL_P (insn))
1183 break;
1184 }
1185
1186 return const0_rtx;
1187 }
1188
1189 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1190 (not all will be used on all machines) that was passed to __builtin_setjmp.
1191 It updates the stack pointer in that block to the current value. This is
1192 also called directly by the SJLJ exception handling code. */
1193
1194 void
1195 expand_builtin_update_setjmp_buf (rtx buf_addr)
1196 {
1197 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1198 buf_addr = convert_memory_address (Pmode, buf_addr);
1199 rtx stack_save
1200 = gen_rtx_MEM (sa_mode,
1201 memory_address
1202 (sa_mode,
1203 plus_constant (Pmode, buf_addr,
1204 2 * GET_MODE_SIZE (Pmode))));
1205
1206 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1207 }
1208
1209 /* Expand a call to __builtin_prefetch. For a target that does not support
1210 data prefetch, evaluate the memory address argument in case it has side
1211 effects. */
1212
1213 static void
1214 expand_builtin_prefetch (tree exp)
1215 {
1216 tree arg0, arg1, arg2;
1217 int nargs;
1218 rtx op0, op1, op2;
1219
1220 if (!validate_arglist (exp, POINTER_TYPE, 0))
1221 return;
1222
1223 arg0 = CALL_EXPR_ARG (exp, 0);
1224
1225 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1226 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1227 locality). */
1228 nargs = call_expr_nargs (exp);
1229 if (nargs > 1)
1230 arg1 = CALL_EXPR_ARG (exp, 1);
1231 else
1232 arg1 = integer_zero_node;
1233 if (nargs > 2)
1234 arg2 = CALL_EXPR_ARG (exp, 2);
1235 else
1236 arg2 = integer_three_node;
1237
1238 /* Argument 0 is an address. */
1239 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1240
1241 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1242 if (TREE_CODE (arg1) != INTEGER_CST)
1243 {
1244 error ("second argument to %<__builtin_prefetch%> must be a constant");
1245 arg1 = integer_zero_node;
1246 }
1247 op1 = expand_normal (arg1);
1248 /* Argument 1 must be either zero or one. */
1249 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1250 {
1251 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1252 " using zero");
1253 op1 = const0_rtx;
1254 }
1255
1256 /* Argument 2 (locality) must be a compile-time constant int. */
1257 if (TREE_CODE (arg2) != INTEGER_CST)
1258 {
1259 error ("third argument to %<__builtin_prefetch%> must be a constant");
1260 arg2 = integer_zero_node;
1261 }
1262 op2 = expand_normal (arg2);
1263 /* Argument 2 must be 0, 1, 2, or 3. */
1264 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1265 {
1266 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1267 op2 = const0_rtx;
1268 }
1269
1270 if (targetm.have_prefetch ())
1271 {
1272 struct expand_operand ops[3];
1273
1274 create_address_operand (&ops[0], op0);
1275 create_integer_operand (&ops[1], INTVAL (op1));
1276 create_integer_operand (&ops[2], INTVAL (op2));
1277 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1278 return;
1279 }
1280
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0) && side_effects_p (op0))
1284 emit_insn (op0);
1285 }
1286
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1290 NULL if unknown. */
1291
1292 static rtx
1293 get_memory_rtx (tree exp, tree len)
1294 {
1295 tree orig_exp = exp;
1296 rtx addr, mem;
1297
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1301 exp = TREE_OPERAND (exp, 0);
1302
1303 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1304 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1305
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1310 exp = TREE_OPERAND (exp, 0);
1311
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp = fold_build2 (MEM_REF,
1315 build_array_type (char_type_node,
1316 build_range_type (sizetype,
1317 size_one_node, len)),
1318 exp, build_int_cst (ptr_type_node, 0));
1319
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1324 set_mem_attributes (mem, exp, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1326 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1327 0))))
1328 {
1329 exp = build_fold_addr_expr (exp);
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_zero_node,
1334 NULL)),
1335 exp, build_int_cst (ptr_type_node, 0));
1336 set_mem_attributes (mem, exp, 0);
1337 }
1338 set_mem_alias_set (mem, 0);
1339 return mem;
1340 }
1341 \f
1342 /* Built-in functions to perform an untyped call and return. */
1343
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1348
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1351
1352 static int
1353 apply_args_size (void)
1354 {
1355 static int size = -1;
1356 int align;
1357 unsigned int regno;
1358 machine_mode mode;
1359
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1362 {
1363 /* The first value is the incoming arg-pointer. */
1364 size = GET_MODE_SIZE (Pmode);
1365
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1369 size += GET_MODE_SIZE (Pmode);
1370
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if (FUNCTION_ARG_REGNO_P (regno))
1373 {
1374 mode = targetm.calls.get_raw_arg_mode (regno);
1375
1376 gcc_assert (mode != VOIDmode);
1377
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 size += GET_MODE_SIZE (mode);
1382 apply_args_mode[regno] = mode;
1383 }
1384 else
1385 {
1386 apply_args_mode[regno] = VOIDmode;
1387 }
1388 }
1389 return size;
1390 }
1391
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1394
1395 static int
1396 apply_result_size (void)
1397 {
1398 static int size = -1;
1399 int align, regno;
1400 machine_mode mode;
1401
1402 /* The values computed by this function never change. */
1403 if (size < 0)
1404 {
1405 size = 0;
1406
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if (targetm.calls.function_value_regno_p (regno))
1409 {
1410 mode = targetm.calls.get_raw_result_mode (regno);
1411
1412 gcc_assert (mode != VOIDmode);
1413
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417 size += GET_MODE_SIZE (mode);
1418 apply_result_mode[regno] = mode;
1419 }
1420 else
1421 apply_result_mode[regno] = VOIDmode;
1422
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size = APPLY_RESULT_SIZE;
1427 #endif
1428 }
1429 return size;
1430 }
1431
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1435
1436 static rtx
1437 result_vector (int savep, rtx result)
1438 {
1439 int regno, size, align, nelts;
1440 machine_mode mode;
1441 rtx reg, mem;
1442 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1443
1444 size = nelts = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 {
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1452 mem = adjust_address (result, mode, size);
1453 savevec[nelts++] = (savep
1454 ? gen_rtx_SET (mem, reg)
1455 : gen_rtx_SET (reg, mem));
1456 size += GET_MODE_SIZE (mode);
1457 }
1458 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1459 }
1460
1461 /* Save the state required to perform an untyped call with the same
1462 arguments as were passed to the current function. */
1463
1464 static rtx
1465 expand_builtin_apply_args_1 (void)
1466 {
1467 rtx registers, tem;
1468 int size, align, regno;
1469 machine_mode mode;
1470 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1471
1472 /* Create a block where the arg-pointer, structure value address,
1473 and argument registers can be saved. */
1474 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1475
1476 /* Walk past the arg-pointer and structure value address. */
1477 size = GET_MODE_SIZE (Pmode);
1478 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1479 size += GET_MODE_SIZE (Pmode);
1480
1481 /* Save each register used in calling a function to the block. */
1482 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1483 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 {
1485 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1486 if (size % align != 0)
1487 size = CEIL (size, align) * align;
1488
1489 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1490
1491 emit_move_insn (adjust_address (registers, mode, size), tem);
1492 size += GET_MODE_SIZE (mode);
1493 }
1494
1495 /* Save the arg pointer to the block. */
1496 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1497 /* We need the pointer as the caller actually passed them to us, not
1498 as we might have pretended they were passed. Make sure it's a valid
1499 operand, as emit_move_insn isn't expected to handle a PLUS. */
1500 if (STACK_GROWS_DOWNWARD)
1501 tem
1502 = force_operand (plus_constant (Pmode, tem,
1503 crtl->args.pretend_args_size),
1504 NULL_RTX);
1505 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1506
1507 size = GET_MODE_SIZE (Pmode);
1508
1509 /* Save the structure value address unless this is passed as an
1510 "invisible" first argument. */
1511 if (struct_incoming_value)
1512 {
1513 emit_move_insn (adjust_address (registers, Pmode, size),
1514 copy_to_reg (struct_incoming_value));
1515 size += GET_MODE_SIZE (Pmode);
1516 }
1517
1518 /* Return the address of the block. */
1519 return copy_addr_to_reg (XEXP (registers, 0));
1520 }
1521
1522 /* __builtin_apply_args returns block of memory allocated on
1523 the stack into which is stored the arg pointer, structure
1524 value address, static chain, and all the registers that might
1525 possibly be used in performing a function call. The code is
1526 moved to the start of the function so the incoming values are
1527 saved. */
1528
1529 static rtx
1530 expand_builtin_apply_args (void)
1531 {
1532 /* Don't do __builtin_apply_args more than once in a function.
1533 Save the result of the first call and reuse it. */
1534 if (apply_args_value != 0)
1535 return apply_args_value;
1536 {
1537 /* When this function is called, it means that registers must be
1538 saved on entry to this function. So we migrate the
1539 call to the first insn of this function. */
1540 rtx temp;
1541
1542 start_sequence ();
1543 temp = expand_builtin_apply_args_1 ();
1544 rtx_insn *seq = get_insns ();
1545 end_sequence ();
1546
1547 apply_args_value = temp;
1548
1549 /* Put the insns after the NOTE that starts the function.
1550 If this is inside a start_sequence, make the outer-level insn
1551 chain current, so the code is placed at the start of the
1552 function. If internal_arg_pointer is a non-virtual pseudo,
1553 it needs to be placed after the function that initializes
1554 that pseudo. */
1555 push_topmost_sequence ();
1556 if (REG_P (crtl->args.internal_arg_pointer)
1557 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1558 emit_insn_before (seq, parm_birth_insn);
1559 else
1560 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1561 pop_topmost_sequence ();
1562 return temp;
1563 }
1564 }
1565
1566 /* Perform an untyped call and save the state required to perform an
1567 untyped return of whatever value was returned by the given function. */
1568
1569 static rtx
1570 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1571 {
1572 int size, align, regno;
1573 machine_mode mode;
1574 rtx incoming_args, result, reg, dest, src;
1575 rtx_call_insn *call_insn;
1576 rtx old_stack_level = 0;
1577 rtx call_fusage = 0;
1578 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1579
1580 arguments = convert_memory_address (Pmode, arguments);
1581
1582 /* Create a block where the return registers can be saved. */
1583 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1584
1585 /* Fetch the arg pointer from the ARGUMENTS block. */
1586 incoming_args = gen_reg_rtx (Pmode);
1587 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1588 if (!STACK_GROWS_DOWNWARD)
1589 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1590 incoming_args, 0, OPTAB_LIB_WIDEN);
1591
1592 /* Push a new argument block and copy the arguments. Do not allow
1593 the (potential) memcpy call below to interfere with our stack
1594 manipulations. */
1595 do_pending_stack_adjust ();
1596 NO_DEFER_POP;
1597
1598 /* Save the stack with nonlocal if available. */
1599 if (targetm.have_save_stack_nonlocal ())
1600 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1601 else
1602 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1603
1604 /* Allocate a block of memory onto the stack and copy the memory
1605 arguments to the outgoing arguments address. We can pass TRUE
1606 as the 4th argument because we just saved the stack pointer
1607 and will restore it right after the call. */
1608 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1609
1610 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1611 may have already set current_function_calls_alloca to true.
1612 current_function_calls_alloca won't be set if argsize is zero,
1613 so we have to guarantee need_drap is true here. */
1614 if (SUPPORTS_STACK_ALIGNMENT)
1615 crtl->need_drap = true;
1616
1617 dest = virtual_outgoing_args_rtx;
1618 if (!STACK_GROWS_DOWNWARD)
1619 {
1620 if (CONST_INT_P (argsize))
1621 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1622 else
1623 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1624 }
1625 dest = gen_rtx_MEM (BLKmode, dest);
1626 set_mem_align (dest, PARM_BOUNDARY);
1627 src = gen_rtx_MEM (BLKmode, incoming_args);
1628 set_mem_align (src, PARM_BOUNDARY);
1629 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1630
1631 /* Refer to the argument block. */
1632 apply_args_size ();
1633 arguments = gen_rtx_MEM (BLKmode, arguments);
1634 set_mem_align (arguments, PARM_BOUNDARY);
1635
1636 /* Walk past the arg-pointer and structure value address. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 size += GET_MODE_SIZE (Pmode);
1640
1641 /* Restore each of the registers previously saved. Make USE insns
1642 for each of these registers for use in making the call. */
1643 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1644 if ((mode = apply_args_mode[regno]) != VOIDmode)
1645 {
1646 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1647 if (size % align != 0)
1648 size = CEIL (size, align) * align;
1649 reg = gen_rtx_REG (mode, regno);
1650 emit_move_insn (reg, adjust_address (arguments, mode, size));
1651 use_reg (&call_fusage, reg);
1652 size += GET_MODE_SIZE (mode);
1653 }
1654
1655 /* Restore the structure value address unless this is passed as an
1656 "invisible" first argument. */
1657 size = GET_MODE_SIZE (Pmode);
1658 if (struct_value)
1659 {
1660 rtx value = gen_reg_rtx (Pmode);
1661 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1662 emit_move_insn (struct_value, value);
1663 if (REG_P (struct_value))
1664 use_reg (&call_fusage, struct_value);
1665 size += GET_MODE_SIZE (Pmode);
1666 }
1667
1668 /* All arguments and registers used for the call are set up by now! */
1669 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1670
1671 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1672 and we don't want to load it into a register as an optimization,
1673 because prepare_call_address already did it if it should be done. */
1674 if (GET_CODE (function) != SYMBOL_REF)
1675 function = memory_address (FUNCTION_MODE, function);
1676
1677 /* Generate the actual call instruction and save the return value. */
1678 if (targetm.have_untyped_call ())
1679 {
1680 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1681 emit_call_insn (targetm.gen_untyped_call (mem, result,
1682 result_vector (1, result)));
1683 }
1684 else if (targetm.have_call_value ())
1685 {
1686 rtx valreg = 0;
1687
1688 /* Locate the unique return register. It is not possible to
1689 express a call that sets more than one return register using
1690 call_value; use untyped_call for that. In fact, untyped_call
1691 only needs to save the return registers in the given block. */
1692 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1693 if ((mode = apply_result_mode[regno]) != VOIDmode)
1694 {
1695 gcc_assert (!valreg); /* have_untyped_call required. */
1696
1697 valreg = gen_rtx_REG (mode, regno);
1698 }
1699
1700 emit_insn (targetm.gen_call_value (valreg,
1701 gen_rtx_MEM (FUNCTION_MODE, function),
1702 const0_rtx, NULL_RTX, const0_rtx));
1703
1704 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1705 }
1706 else
1707 gcc_unreachable ();
1708
1709 /* Find the CALL insn we just emitted, and attach the register usage
1710 information. */
1711 call_insn = last_call_insn ();
1712 add_function_usage_to (call_insn, call_fusage);
1713
1714 /* Restore the stack. */
1715 if (targetm.have_save_stack_nonlocal ())
1716 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1717 else
1718 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1719 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1720
1721 OK_DEFER_POP;
1722
1723 /* Return the address of the result block. */
1724 result = copy_addr_to_reg (XEXP (result, 0));
1725 return convert_memory_address (ptr_mode, result);
1726 }
1727
1728 /* Perform an untyped return. */
1729
1730 static void
1731 expand_builtin_return (rtx result)
1732 {
1733 int size, align, regno;
1734 machine_mode mode;
1735 rtx reg;
1736 rtx_insn *call_fusage = 0;
1737
1738 result = convert_memory_address (Pmode, result);
1739
1740 apply_result_size ();
1741 result = gen_rtx_MEM (BLKmode, result);
1742
1743 if (targetm.have_untyped_return ())
1744 {
1745 rtx vector = result_vector (0, result);
1746 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1747 emit_barrier ();
1748 return;
1749 }
1750
1751 /* Restore the return value and note that each value is used. */
1752 size = 0;
1753 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1754 if ((mode = apply_result_mode[regno]) != VOIDmode)
1755 {
1756 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1757 if (size % align != 0)
1758 size = CEIL (size, align) * align;
1759 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1760 emit_move_insn (reg, adjust_address (result, mode, size));
1761
1762 push_to_sequence (call_fusage);
1763 emit_use (reg);
1764 call_fusage = get_insns ();
1765 end_sequence ();
1766 size += GET_MODE_SIZE (mode);
1767 }
1768
1769 /* Put the USE insns before the return. */
1770 emit_insn (call_fusage);
1771
1772 /* Return whatever values was restored by jumping directly to the end
1773 of the function. */
1774 expand_naked_return ();
1775 }
1776
1777 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1778
1779 static enum type_class
1780 type_to_class (tree type)
1781 {
1782 switch (TREE_CODE (type))
1783 {
1784 case VOID_TYPE: return void_type_class;
1785 case INTEGER_TYPE: return integer_type_class;
1786 case ENUMERAL_TYPE: return enumeral_type_class;
1787 case BOOLEAN_TYPE: return boolean_type_class;
1788 case POINTER_TYPE: return pointer_type_class;
1789 case REFERENCE_TYPE: return reference_type_class;
1790 case OFFSET_TYPE: return offset_type_class;
1791 case REAL_TYPE: return real_type_class;
1792 case COMPLEX_TYPE: return complex_type_class;
1793 case FUNCTION_TYPE: return function_type_class;
1794 case METHOD_TYPE: return method_type_class;
1795 case RECORD_TYPE: return record_type_class;
1796 case UNION_TYPE:
1797 case QUAL_UNION_TYPE: return union_type_class;
1798 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1799 ? string_type_class : array_type_class);
1800 case LANG_TYPE: return lang_type_class;
1801 default: return no_type_class;
1802 }
1803 }
1804
1805 /* Expand a call EXP to __builtin_classify_type. */
1806
1807 static rtx
1808 expand_builtin_classify_type (tree exp)
1809 {
1810 if (call_expr_nargs (exp))
1811 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1812 return GEN_INT (no_type_class);
1813 }
1814
1815 /* This helper macro, meant to be used in mathfn_built_in below, determines
1816 which among a set of builtin math functions is appropriate for a given type
1817 mode. The `F' (float) and `L' (long double) are automatically generated
1818 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1819 types, there are additional types that are considered with 'F32', 'F64',
1820 'F128', etc. suffixes. */
1821 #define CASE_MATHFN(MATHFN) \
1822 CASE_CFN_##MATHFN: \
1823 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1824 fcodel = BUILT_IN_##MATHFN##L ; break;
1825 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1826 types. */
1827 #define CASE_MATHFN_FLOATN(MATHFN) \
1828 CASE_CFN_##MATHFN: \
1829 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1830 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1831 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1832 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1833 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1834 break;
1835 /* Similar to above, but appends _R after any F/L suffix. */
1836 #define CASE_MATHFN_REENT(MATHFN) \
1837 case CFN_BUILT_IN_##MATHFN##_R: \
1838 case CFN_BUILT_IN_##MATHFN##F_R: \
1839 case CFN_BUILT_IN_##MATHFN##L_R: \
1840 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1841 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1842
1843 /* Return a function equivalent to FN but operating on floating-point
1844 values of type TYPE, or END_BUILTINS if no such function exists.
1845 This is purely an operation on function codes; it does not guarantee
1846 that the target actually has an implementation of the function. */
1847
1848 static built_in_function
1849 mathfn_built_in_2 (tree type, combined_fn fn)
1850 {
1851 tree mtype;
1852 built_in_function fcode, fcodef, fcodel;
1853 built_in_function fcodef16 = END_BUILTINS;
1854 built_in_function fcodef32 = END_BUILTINS;
1855 built_in_function fcodef64 = END_BUILTINS;
1856 built_in_function fcodef128 = END_BUILTINS;
1857 built_in_function fcodef32x = END_BUILTINS;
1858 built_in_function fcodef64x = END_BUILTINS;
1859 built_in_function fcodef128x = END_BUILTINS;
1860
1861 switch (fn)
1862 {
1863 CASE_MATHFN (ACOS)
1864 CASE_MATHFN (ACOSH)
1865 CASE_MATHFN (ASIN)
1866 CASE_MATHFN (ASINH)
1867 CASE_MATHFN (ATAN)
1868 CASE_MATHFN (ATAN2)
1869 CASE_MATHFN (ATANH)
1870 CASE_MATHFN (CBRT)
1871 CASE_MATHFN (CEIL)
1872 CASE_MATHFN (CEXPI)
1873 CASE_MATHFN_FLOATN (COPYSIGN)
1874 CASE_MATHFN (COS)
1875 CASE_MATHFN (COSH)
1876 CASE_MATHFN (DREM)
1877 CASE_MATHFN (ERF)
1878 CASE_MATHFN (ERFC)
1879 CASE_MATHFN (EXP)
1880 CASE_MATHFN (EXP10)
1881 CASE_MATHFN (EXP2)
1882 CASE_MATHFN (EXPM1)
1883 CASE_MATHFN (FABS)
1884 CASE_MATHFN (FDIM)
1885 CASE_MATHFN (FLOOR)
1886 CASE_MATHFN_FLOATN (FMA)
1887 CASE_MATHFN_FLOATN (FMAX)
1888 CASE_MATHFN_FLOATN (FMIN)
1889 CASE_MATHFN (FMOD)
1890 CASE_MATHFN (FREXP)
1891 CASE_MATHFN (GAMMA)
1892 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1893 CASE_MATHFN (HUGE_VAL)
1894 CASE_MATHFN (HYPOT)
1895 CASE_MATHFN (ILOGB)
1896 CASE_MATHFN (ICEIL)
1897 CASE_MATHFN (IFLOOR)
1898 CASE_MATHFN (INF)
1899 CASE_MATHFN (IRINT)
1900 CASE_MATHFN (IROUND)
1901 CASE_MATHFN (ISINF)
1902 CASE_MATHFN (J0)
1903 CASE_MATHFN (J1)
1904 CASE_MATHFN (JN)
1905 CASE_MATHFN (LCEIL)
1906 CASE_MATHFN (LDEXP)
1907 CASE_MATHFN (LFLOOR)
1908 CASE_MATHFN (LGAMMA)
1909 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1910 CASE_MATHFN (LLCEIL)
1911 CASE_MATHFN (LLFLOOR)
1912 CASE_MATHFN (LLRINT)
1913 CASE_MATHFN (LLROUND)
1914 CASE_MATHFN (LOG)
1915 CASE_MATHFN (LOG10)
1916 CASE_MATHFN (LOG1P)
1917 CASE_MATHFN (LOG2)
1918 CASE_MATHFN (LOGB)
1919 CASE_MATHFN (LRINT)
1920 CASE_MATHFN (LROUND)
1921 CASE_MATHFN (MODF)
1922 CASE_MATHFN (NAN)
1923 CASE_MATHFN (NANS)
1924 CASE_MATHFN (NEARBYINT)
1925 CASE_MATHFN (NEXTAFTER)
1926 CASE_MATHFN (NEXTTOWARD)
1927 CASE_MATHFN (POW)
1928 CASE_MATHFN (POWI)
1929 CASE_MATHFN (POW10)
1930 CASE_MATHFN (REMAINDER)
1931 CASE_MATHFN (REMQUO)
1932 CASE_MATHFN (RINT)
1933 CASE_MATHFN (ROUND)
1934 CASE_MATHFN (SCALB)
1935 CASE_MATHFN (SCALBLN)
1936 CASE_MATHFN (SCALBN)
1937 CASE_MATHFN (SIGNBIT)
1938 CASE_MATHFN (SIGNIFICAND)
1939 CASE_MATHFN (SIN)
1940 CASE_MATHFN (SINCOS)
1941 CASE_MATHFN (SINH)
1942 CASE_MATHFN_FLOATN (SQRT)
1943 CASE_MATHFN (TAN)
1944 CASE_MATHFN (TANH)
1945 CASE_MATHFN (TGAMMA)
1946 CASE_MATHFN (TRUNC)
1947 CASE_MATHFN (Y0)
1948 CASE_MATHFN (Y1)
1949 CASE_MATHFN (YN)
1950
1951 default:
1952 return END_BUILTINS;
1953 }
1954
1955 mtype = TYPE_MAIN_VARIANT (type);
1956 if (mtype == double_type_node)
1957 return fcode;
1958 else if (mtype == float_type_node)
1959 return fcodef;
1960 else if (mtype == long_double_type_node)
1961 return fcodel;
1962 else if (mtype == float16_type_node)
1963 return fcodef16;
1964 else if (mtype == float32_type_node)
1965 return fcodef32;
1966 else if (mtype == float64_type_node)
1967 return fcodef64;
1968 else if (mtype == float128_type_node)
1969 return fcodef128;
1970 else if (mtype == float32x_type_node)
1971 return fcodef32x;
1972 else if (mtype == float64x_type_node)
1973 return fcodef64x;
1974 else if (mtype == float128x_type_node)
1975 return fcodef128x;
1976 else
1977 return END_BUILTINS;
1978 }
1979
1980 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1981 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1982 otherwise use the explicit declaration. If we can't do the conversion,
1983 return null. */
1984
1985 static tree
1986 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1987 {
1988 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1989 if (fcode2 == END_BUILTINS)
1990 return NULL_TREE;
1991
1992 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1993 return NULL_TREE;
1994
1995 return builtin_decl_explicit (fcode2);
1996 }
1997
1998 /* Like mathfn_built_in_1, but always use the implicit array. */
1999
2000 tree
2001 mathfn_built_in (tree type, combined_fn fn)
2002 {
2003 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2004 }
2005
2006 /* Like mathfn_built_in_1, but take a built_in_function and
2007 always use the implicit array. */
2008
2009 tree
2010 mathfn_built_in (tree type, enum built_in_function fn)
2011 {
2012 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2013 }
2014
2015 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2016 return its code, otherwise return IFN_LAST. Note that this function
2017 only tests whether the function is defined in internals.def, not whether
2018 it is actually available on the target. */
2019
2020 internal_fn
2021 associated_internal_fn (tree fndecl)
2022 {
2023 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2024 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2025 switch (DECL_FUNCTION_CODE (fndecl))
2026 {
2027 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2028 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2029 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2030 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2031 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2032 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2033 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2034 #include "internal-fn.def"
2035
2036 CASE_FLT_FN (BUILT_IN_POW10):
2037 return IFN_EXP10;
2038
2039 CASE_FLT_FN (BUILT_IN_DREM):
2040 return IFN_REMAINDER;
2041
2042 CASE_FLT_FN (BUILT_IN_SCALBN):
2043 CASE_FLT_FN (BUILT_IN_SCALBLN):
2044 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2045 return IFN_LDEXP;
2046 return IFN_LAST;
2047
2048 default:
2049 return IFN_LAST;
2050 }
2051 }
2052
2053 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2054 on the current target by a call to an internal function, return the
2055 code of that internal function, otherwise return IFN_LAST. The caller
2056 is responsible for ensuring that any side-effects of the built-in
2057 call are dealt with correctly. E.g. if CALL sets errno, the caller
2058 must decide that the errno result isn't needed or make it available
2059 in some other way. */
2060
2061 internal_fn
2062 replacement_internal_fn (gcall *call)
2063 {
2064 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2065 {
2066 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2067 if (ifn != IFN_LAST)
2068 {
2069 tree_pair types = direct_internal_fn_types (ifn, call);
2070 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2071 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2072 return ifn;
2073 }
2074 }
2075 return IFN_LAST;
2076 }
2077
2078 /* Expand a call to the builtin trinary math functions (fma).
2079 Return NULL_RTX if a normal call should be emitted rather than expanding the
2080 function in-line. EXP is the expression that is a call to the builtin
2081 function; if convenient, the result should be placed in TARGET.
2082 SUBTARGET may be used as the target for computing one of EXP's
2083 operands. */
2084
2085 static rtx
2086 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2087 {
2088 optab builtin_optab;
2089 rtx op0, op1, op2, result;
2090 rtx_insn *insns;
2091 tree fndecl = get_callee_fndecl (exp);
2092 tree arg0, arg1, arg2;
2093 machine_mode mode;
2094
2095 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2096 return NULL_RTX;
2097
2098 arg0 = CALL_EXPR_ARG (exp, 0);
2099 arg1 = CALL_EXPR_ARG (exp, 1);
2100 arg2 = CALL_EXPR_ARG (exp, 2);
2101
2102 switch (DECL_FUNCTION_CODE (fndecl))
2103 {
2104 CASE_FLT_FN (BUILT_IN_FMA):
2105 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2106 builtin_optab = fma_optab; break;
2107 default:
2108 gcc_unreachable ();
2109 }
2110
2111 /* Make a suitable register to place result in. */
2112 mode = TYPE_MODE (TREE_TYPE (exp));
2113
2114 /* Before working hard, check whether the instruction is available. */
2115 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2116 return NULL_RTX;
2117
2118 result = gen_reg_rtx (mode);
2119
2120 /* Always stabilize the argument list. */
2121 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2122 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2123 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2124
2125 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2126 op1 = expand_normal (arg1);
2127 op2 = expand_normal (arg2);
2128
2129 start_sequence ();
2130
2131 /* Compute into RESULT.
2132 Set RESULT to wherever the result comes back. */
2133 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2134 result, 0);
2135
2136 /* If we were unable to expand via the builtin, stop the sequence
2137 (without outputting the insns) and call to the library function
2138 with the stabilized argument list. */
2139 if (result == 0)
2140 {
2141 end_sequence ();
2142 return expand_call (exp, target, target == const0_rtx);
2143 }
2144
2145 /* Output the entire sequence. */
2146 insns = get_insns ();
2147 end_sequence ();
2148 emit_insn (insns);
2149
2150 return result;
2151 }
2152
2153 /* Expand a call to the builtin sin and cos math functions.
2154 Return NULL_RTX if a normal call should be emitted rather than expanding the
2155 function in-line. EXP is the expression that is a call to the builtin
2156 function; if convenient, the result should be placed in TARGET.
2157 SUBTARGET may be used as the target for computing one of EXP's
2158 operands. */
2159
2160 static rtx
2161 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2162 {
2163 optab builtin_optab;
2164 rtx op0;
2165 rtx_insn *insns;
2166 tree fndecl = get_callee_fndecl (exp);
2167 machine_mode mode;
2168 tree arg;
2169
2170 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2171 return NULL_RTX;
2172
2173 arg = CALL_EXPR_ARG (exp, 0);
2174
2175 switch (DECL_FUNCTION_CODE (fndecl))
2176 {
2177 CASE_FLT_FN (BUILT_IN_SIN):
2178 CASE_FLT_FN (BUILT_IN_COS):
2179 builtin_optab = sincos_optab; break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183
2184 /* Make a suitable register to place result in. */
2185 mode = TYPE_MODE (TREE_TYPE (exp));
2186
2187 /* Check if sincos insn is available, otherwise fallback
2188 to sin or cos insn. */
2189 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2190 switch (DECL_FUNCTION_CODE (fndecl))
2191 {
2192 CASE_FLT_FN (BUILT_IN_SIN):
2193 builtin_optab = sin_optab; break;
2194 CASE_FLT_FN (BUILT_IN_COS):
2195 builtin_optab = cos_optab; break;
2196 default:
2197 gcc_unreachable ();
2198 }
2199
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2202 {
2203 rtx result = gen_reg_rtx (mode);
2204
2205 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2206 need to expand the argument again. This way, we will not perform
2207 side-effects more the once. */
2208 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2209
2210 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2211
2212 start_sequence ();
2213
2214 /* Compute into RESULT.
2215 Set RESULT to wherever the result comes back. */
2216 if (builtin_optab == sincos_optab)
2217 {
2218 int ok;
2219
2220 switch (DECL_FUNCTION_CODE (fndecl))
2221 {
2222 CASE_FLT_FN (BUILT_IN_SIN):
2223 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2224 break;
2225 CASE_FLT_FN (BUILT_IN_COS):
2226 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2227 break;
2228 default:
2229 gcc_unreachable ();
2230 }
2231 gcc_assert (ok);
2232 }
2233 else
2234 result = expand_unop (mode, builtin_optab, op0, result, 0);
2235
2236 if (result != 0)
2237 {
2238 /* Output the entire sequence. */
2239 insns = get_insns ();
2240 end_sequence ();
2241 emit_insn (insns);
2242 return result;
2243 }
2244
2245 /* If we were unable to expand via the builtin, stop the sequence
2246 (without outputting the insns) and call to the library function
2247 with the stabilized argument list. */
2248 end_sequence ();
2249 }
2250
2251 return expand_call (exp, target, target == const0_rtx);
2252 }
2253
2254 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2255 return an RTL instruction code that implements the functionality.
2256 If that isn't possible or available return CODE_FOR_nothing. */
2257
2258 static enum insn_code
2259 interclass_mathfn_icode (tree arg, tree fndecl)
2260 {
2261 bool errno_set = false;
2262 optab builtin_optab = unknown_optab;
2263 machine_mode mode;
2264
2265 switch (DECL_FUNCTION_CODE (fndecl))
2266 {
2267 CASE_FLT_FN (BUILT_IN_ILOGB):
2268 errno_set = true; builtin_optab = ilogb_optab; break;
2269 CASE_FLT_FN (BUILT_IN_ISINF):
2270 builtin_optab = isinf_optab; break;
2271 case BUILT_IN_ISNORMAL:
2272 case BUILT_IN_ISFINITE:
2273 CASE_FLT_FN (BUILT_IN_FINITE):
2274 case BUILT_IN_FINITED32:
2275 case BUILT_IN_FINITED64:
2276 case BUILT_IN_FINITED128:
2277 case BUILT_IN_ISINFD32:
2278 case BUILT_IN_ISINFD64:
2279 case BUILT_IN_ISINFD128:
2280 /* These builtins have no optabs (yet). */
2281 break;
2282 default:
2283 gcc_unreachable ();
2284 }
2285
2286 /* There's no easy way to detect the case we need to set EDOM. */
2287 if (flag_errno_math && errno_set)
2288 return CODE_FOR_nothing;
2289
2290 /* Optab mode depends on the mode of the input argument. */
2291 mode = TYPE_MODE (TREE_TYPE (arg));
2292
2293 if (builtin_optab)
2294 return optab_handler (builtin_optab, mode);
2295 return CODE_FOR_nothing;
2296 }
2297
2298 /* Expand a call to one of the builtin math functions that operate on
2299 floating point argument and output an integer result (ilogb, isinf,
2300 isnan, etc).
2301 Return 0 if a normal call should be emitted rather than expanding the
2302 function in-line. EXP is the expression that is a call to the builtin
2303 function; if convenient, the result should be placed in TARGET. */
2304
2305 static rtx
2306 expand_builtin_interclass_mathfn (tree exp, rtx target)
2307 {
2308 enum insn_code icode = CODE_FOR_nothing;
2309 rtx op0;
2310 tree fndecl = get_callee_fndecl (exp);
2311 machine_mode mode;
2312 tree arg;
2313
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2316
2317 arg = CALL_EXPR_ARG (exp, 0);
2318 icode = interclass_mathfn_icode (arg, fndecl);
2319 mode = TYPE_MODE (TREE_TYPE (arg));
2320
2321 if (icode != CODE_FOR_nothing)
2322 {
2323 struct expand_operand ops[1];
2324 rtx_insn *last = get_last_insn ();
2325 tree orig_arg = arg;
2326
2327 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2328 need to expand the argument again. This way, we will not perform
2329 side-effects more the once. */
2330 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331
2332 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2333
2334 if (mode != GET_MODE (op0))
2335 op0 = convert_to_mode (mode, op0, 0);
2336
2337 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2338 if (maybe_legitimize_operands (icode, 0, 1, ops)
2339 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2340 return ops[0].value;
2341
2342 delete_insns_since (last);
2343 CALL_EXPR_ARG (exp, 0) = orig_arg;
2344 }
2345
2346 return NULL_RTX;
2347 }
2348
2349 /* Expand a call to the builtin sincos math function.
2350 Return NULL_RTX if a normal call should be emitted rather than expanding the
2351 function in-line. EXP is the expression that is a call to the builtin
2352 function. */
2353
2354 static rtx
2355 expand_builtin_sincos (tree exp)
2356 {
2357 rtx op0, op1, op2, target1, target2;
2358 machine_mode mode;
2359 tree arg, sinp, cosp;
2360 int result;
2361 location_t loc = EXPR_LOCATION (exp);
2362 tree alias_type, alias_off;
2363
2364 if (!validate_arglist (exp, REAL_TYPE,
2365 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2366 return NULL_RTX;
2367
2368 arg = CALL_EXPR_ARG (exp, 0);
2369 sinp = CALL_EXPR_ARG (exp, 1);
2370 cosp = CALL_EXPR_ARG (exp, 2);
2371
2372 /* Make a suitable register to place result in. */
2373 mode = TYPE_MODE (TREE_TYPE (arg));
2374
2375 /* Check if sincos insn is available, otherwise emit the call. */
2376 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2377 return NULL_RTX;
2378
2379 target1 = gen_reg_rtx (mode);
2380 target2 = gen_reg_rtx (mode);
2381
2382 op0 = expand_normal (arg);
2383 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2384 alias_off = build_int_cst (alias_type, 0);
2385 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2386 sinp, alias_off));
2387 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2388 cosp, alias_off));
2389
2390 /* Compute into target1 and target2.
2391 Set TARGET to wherever the result comes back. */
2392 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2393 gcc_assert (result);
2394
2395 /* Move target1 and target2 to the memory locations indicated
2396 by op1 and op2. */
2397 emit_move_insn (op1, target1);
2398 emit_move_insn (op2, target2);
2399
2400 return const0_rtx;
2401 }
2402
2403 /* Expand a call to the internal cexpi builtin to the sincos math function.
2404 EXP is the expression that is a call to the builtin function; if convenient,
2405 the result should be placed in TARGET. */
2406
2407 static rtx
2408 expand_builtin_cexpi (tree exp, rtx target)
2409 {
2410 tree fndecl = get_callee_fndecl (exp);
2411 tree arg, type;
2412 machine_mode mode;
2413 rtx op0, op1, op2;
2414 location_t loc = EXPR_LOCATION (exp);
2415
2416 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2417 return NULL_RTX;
2418
2419 arg = CALL_EXPR_ARG (exp, 0);
2420 type = TREE_TYPE (arg);
2421 mode = TYPE_MODE (TREE_TYPE (arg));
2422
2423 /* Try expanding via a sincos optab, fall back to emitting a libcall
2424 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2425 is only generated from sincos, cexp or if we have either of them. */
2426 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2427 {
2428 op1 = gen_reg_rtx (mode);
2429 op2 = gen_reg_rtx (mode);
2430
2431 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2432
2433 /* Compute into op1 and op2. */
2434 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2435 }
2436 else if (targetm.libc_has_function (function_sincos))
2437 {
2438 tree call, fn = NULL_TREE;
2439 tree top1, top2;
2440 rtx op1a, op2a;
2441
2442 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2443 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2444 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2445 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2447 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2448 else
2449 gcc_unreachable ();
2450
2451 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2452 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2453 op1a = copy_addr_to_reg (XEXP (op1, 0));
2454 op2a = copy_addr_to_reg (XEXP (op2, 0));
2455 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2456 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2457
2458 /* Make sure not to fold the sincos call again. */
2459 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2460 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2461 call, 3, arg, top1, top2));
2462 }
2463 else
2464 {
2465 tree call, fn = NULL_TREE, narg;
2466 tree ctype = build_complex_type (type);
2467
2468 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2469 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2471 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2472 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2473 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2474 else
2475 gcc_unreachable ();
2476
2477 /* If we don't have a decl for cexp create one. This is the
2478 friendliest fallback if the user calls __builtin_cexpi
2479 without full target C99 function support. */
2480 if (fn == NULL_TREE)
2481 {
2482 tree fntype;
2483 const char *name = NULL;
2484
2485 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2486 name = "cexpf";
2487 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2488 name = "cexp";
2489 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2490 name = "cexpl";
2491
2492 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2493 fn = build_fn_decl (name, fntype);
2494 }
2495
2496 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2497 build_real (type, dconst0), arg);
2498
2499 /* Make sure not to fold the cexp call again. */
2500 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2501 return expand_expr (build_call_nary (ctype, call, 1, narg),
2502 target, VOIDmode, EXPAND_NORMAL);
2503 }
2504
2505 /* Now build the proper return type. */
2506 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2507 make_tree (TREE_TYPE (arg), op2),
2508 make_tree (TREE_TYPE (arg), op1)),
2509 target, VOIDmode, EXPAND_NORMAL);
2510 }
2511
2512 /* Conveniently construct a function call expression. FNDECL names the
2513 function to be called, N is the number of arguments, and the "..."
2514 parameters are the argument expressions. Unlike build_call_exr
2515 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2516
2517 static tree
2518 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2519 {
2520 va_list ap;
2521 tree fntype = TREE_TYPE (fndecl);
2522 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2523
2524 va_start (ap, n);
2525 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2526 va_end (ap);
2527 SET_EXPR_LOCATION (fn, loc);
2528 return fn;
2529 }
2530
2531 /* Expand a call to one of the builtin rounding functions gcc defines
2532 as an extension (lfloor and lceil). As these are gcc extensions we
2533 do not need to worry about setting errno to EDOM.
2534 If expanding via optab fails, lower expression to (int)(floor(x)).
2535 EXP is the expression that is a call to the builtin function;
2536 if convenient, the result should be placed in TARGET. */
2537
2538 static rtx
2539 expand_builtin_int_roundingfn (tree exp, rtx target)
2540 {
2541 convert_optab builtin_optab;
2542 rtx op0, tmp;
2543 rtx_insn *insns;
2544 tree fndecl = get_callee_fndecl (exp);
2545 enum built_in_function fallback_fn;
2546 tree fallback_fndecl;
2547 machine_mode mode;
2548 tree arg;
2549
2550 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2551 gcc_unreachable ();
2552
2553 arg = CALL_EXPR_ARG (exp, 0);
2554
2555 switch (DECL_FUNCTION_CODE (fndecl))
2556 {
2557 CASE_FLT_FN (BUILT_IN_ICEIL):
2558 CASE_FLT_FN (BUILT_IN_LCEIL):
2559 CASE_FLT_FN (BUILT_IN_LLCEIL):
2560 builtin_optab = lceil_optab;
2561 fallback_fn = BUILT_IN_CEIL;
2562 break;
2563
2564 CASE_FLT_FN (BUILT_IN_IFLOOR):
2565 CASE_FLT_FN (BUILT_IN_LFLOOR):
2566 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2567 builtin_optab = lfloor_optab;
2568 fallback_fn = BUILT_IN_FLOOR;
2569 break;
2570
2571 default:
2572 gcc_unreachable ();
2573 }
2574
2575 /* Make a suitable register to place result in. */
2576 mode = TYPE_MODE (TREE_TYPE (exp));
2577
2578 target = gen_reg_rtx (mode);
2579
2580 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2581 need to expand the argument again. This way, we will not perform
2582 side-effects more the once. */
2583 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2584
2585 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2586
2587 start_sequence ();
2588
2589 /* Compute into TARGET. */
2590 if (expand_sfix_optab (target, op0, builtin_optab))
2591 {
2592 /* Output the entire sequence. */
2593 insns = get_insns ();
2594 end_sequence ();
2595 emit_insn (insns);
2596 return target;
2597 }
2598
2599 /* If we were unable to expand via the builtin, stop the sequence
2600 (without outputting the insns). */
2601 end_sequence ();
2602
2603 /* Fall back to floating point rounding optab. */
2604 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2605
2606 /* For non-C99 targets we may end up without a fallback fndecl here
2607 if the user called __builtin_lfloor directly. In this case emit
2608 a call to the floor/ceil variants nevertheless. This should result
2609 in the best user experience for not full C99 targets. */
2610 if (fallback_fndecl == NULL_TREE)
2611 {
2612 tree fntype;
2613 const char *name = NULL;
2614
2615 switch (DECL_FUNCTION_CODE (fndecl))
2616 {
2617 case BUILT_IN_ICEIL:
2618 case BUILT_IN_LCEIL:
2619 case BUILT_IN_LLCEIL:
2620 name = "ceil";
2621 break;
2622 case BUILT_IN_ICEILF:
2623 case BUILT_IN_LCEILF:
2624 case BUILT_IN_LLCEILF:
2625 name = "ceilf";
2626 break;
2627 case BUILT_IN_ICEILL:
2628 case BUILT_IN_LCEILL:
2629 case BUILT_IN_LLCEILL:
2630 name = "ceill";
2631 break;
2632 case BUILT_IN_IFLOOR:
2633 case BUILT_IN_LFLOOR:
2634 case BUILT_IN_LLFLOOR:
2635 name = "floor";
2636 break;
2637 case BUILT_IN_IFLOORF:
2638 case BUILT_IN_LFLOORF:
2639 case BUILT_IN_LLFLOORF:
2640 name = "floorf";
2641 break;
2642 case BUILT_IN_IFLOORL:
2643 case BUILT_IN_LFLOORL:
2644 case BUILT_IN_LLFLOORL:
2645 name = "floorl";
2646 break;
2647 default:
2648 gcc_unreachable ();
2649 }
2650
2651 fntype = build_function_type_list (TREE_TYPE (arg),
2652 TREE_TYPE (arg), NULL_TREE);
2653 fallback_fndecl = build_fn_decl (name, fntype);
2654 }
2655
2656 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2657
2658 tmp = expand_normal (exp);
2659 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2660
2661 /* Truncate the result of floating point optab to integer
2662 via expand_fix (). */
2663 target = gen_reg_rtx (mode);
2664 expand_fix (target, tmp, 0);
2665
2666 return target;
2667 }
2668
2669 /* Expand a call to one of the builtin math functions doing integer
2670 conversion (lrint).
2671 Return 0 if a normal call should be emitted rather than expanding the
2672 function in-line. EXP is the expression that is a call to the builtin
2673 function; if convenient, the result should be placed in TARGET. */
2674
2675 static rtx
2676 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2677 {
2678 convert_optab builtin_optab;
2679 rtx op0;
2680 rtx_insn *insns;
2681 tree fndecl = get_callee_fndecl (exp);
2682 tree arg;
2683 machine_mode mode;
2684 enum built_in_function fallback_fn = BUILT_IN_NONE;
2685
2686 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2687 gcc_unreachable ();
2688
2689 arg = CALL_EXPR_ARG (exp, 0);
2690
2691 switch (DECL_FUNCTION_CODE (fndecl))
2692 {
2693 CASE_FLT_FN (BUILT_IN_IRINT):
2694 fallback_fn = BUILT_IN_LRINT;
2695 gcc_fallthrough ();
2696 CASE_FLT_FN (BUILT_IN_LRINT):
2697 CASE_FLT_FN (BUILT_IN_LLRINT):
2698 builtin_optab = lrint_optab;
2699 break;
2700
2701 CASE_FLT_FN (BUILT_IN_IROUND):
2702 fallback_fn = BUILT_IN_LROUND;
2703 gcc_fallthrough ();
2704 CASE_FLT_FN (BUILT_IN_LROUND):
2705 CASE_FLT_FN (BUILT_IN_LLROUND):
2706 builtin_optab = lround_optab;
2707 break;
2708
2709 default:
2710 gcc_unreachable ();
2711 }
2712
2713 /* There's no easy way to detect the case we need to set EDOM. */
2714 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2715 return NULL_RTX;
2716
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2719
2720 /* There's no easy way to detect the case we need to set EDOM. */
2721 if (!flag_errno_math)
2722 {
2723 rtx result = gen_reg_rtx (mode);
2724
2725 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2726 need to expand the argument again. This way, we will not perform
2727 side-effects more the once. */
2728 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2729
2730 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2731
2732 start_sequence ();
2733
2734 if (expand_sfix_optab (result, op0, builtin_optab))
2735 {
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return result;
2741 }
2742
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns) and call to the library function
2745 with the stabilized argument list. */
2746 end_sequence ();
2747 }
2748
2749 if (fallback_fn != BUILT_IN_NONE)
2750 {
2751 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2752 targets, (int) round (x) should never be transformed into
2753 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2754 a call to lround in the hope that the target provides at least some
2755 C99 functions. This should result in the best user experience for
2756 not full C99 targets. */
2757 tree fallback_fndecl = mathfn_built_in_1
2758 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2759
2760 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2761 fallback_fndecl, 1, arg);
2762
2763 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2764 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2765 return convert_to_mode (mode, target, 0);
2766 }
2767
2768 return expand_call (exp, target, target == const0_rtx);
2769 }
2770
2771 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2772 a normal call should be emitted rather than expanding the function
2773 in-line. EXP is the expression that is a call to the builtin
2774 function; if convenient, the result should be placed in TARGET. */
2775
2776 static rtx
2777 expand_builtin_powi (tree exp, rtx target)
2778 {
2779 tree arg0, arg1;
2780 rtx op0, op1;
2781 machine_mode mode;
2782 machine_mode mode2;
2783
2784 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2785 return NULL_RTX;
2786
2787 arg0 = CALL_EXPR_ARG (exp, 0);
2788 arg1 = CALL_EXPR_ARG (exp, 1);
2789 mode = TYPE_MODE (TREE_TYPE (exp));
2790
2791 /* Emit a libcall to libgcc. */
2792
2793 /* Mode of the 2nd argument must match that of an int. */
2794 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2795
2796 if (target == NULL_RTX)
2797 target = gen_reg_rtx (mode);
2798
2799 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2800 if (GET_MODE (op0) != mode)
2801 op0 = convert_to_mode (mode, op0, 0);
2802 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2803 if (GET_MODE (op1) != mode2)
2804 op1 = convert_to_mode (mode2, op1, 0);
2805
2806 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2807 target, LCT_CONST, mode,
2808 op0, mode, op1, mode2);
2809
2810 return target;
2811 }
2812
2813 /* Expand expression EXP which is a call to the strlen builtin. Return
2814 NULL_RTX if we failed the caller should emit a normal call, otherwise
2815 try to get the result in TARGET, if convenient. */
2816
2817 static rtx
2818 expand_builtin_strlen (tree exp, rtx target,
2819 machine_mode target_mode)
2820 {
2821 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2822 return NULL_RTX;
2823 else
2824 {
2825 struct expand_operand ops[4];
2826 rtx pat;
2827 tree len;
2828 tree src = CALL_EXPR_ARG (exp, 0);
2829 rtx src_reg;
2830 rtx_insn *before_strlen;
2831 machine_mode insn_mode;
2832 enum insn_code icode = CODE_FOR_nothing;
2833 unsigned int align;
2834
2835 /* If the length can be computed at compile-time, return it. */
2836 len = c_strlen (src, 0);
2837 if (len)
2838 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2839
2840 /* If the length can be computed at compile-time and is constant
2841 integer, but there are side-effects in src, evaluate
2842 src for side-effects, then return len.
2843 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2844 can be optimized into: i++; x = 3; */
2845 len = c_strlen (src, 1);
2846 if (len && TREE_CODE (len) == INTEGER_CST)
2847 {
2848 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2849 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2850 }
2851
2852 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2853
2854 /* If SRC is not a pointer type, don't do this operation inline. */
2855 if (align == 0)
2856 return NULL_RTX;
2857
2858 /* Bail out if we can't compute strlen in the right mode. */
2859 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2860 {
2861 icode = optab_handler (strlen_optab, insn_mode);
2862 if (icode != CODE_FOR_nothing)
2863 break;
2864 }
2865 if (insn_mode == VOIDmode)
2866 return NULL_RTX;
2867
2868 /* Make a place to hold the source address. We will not expand
2869 the actual source until we are sure that the expansion will
2870 not fail -- there are trees that cannot be expanded twice. */
2871 src_reg = gen_reg_rtx (Pmode);
2872
2873 /* Mark the beginning of the strlen sequence so we can emit the
2874 source operand later. */
2875 before_strlen = get_last_insn ();
2876
2877 create_output_operand (&ops[0], target, insn_mode);
2878 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2879 create_integer_operand (&ops[2], 0);
2880 create_integer_operand (&ops[3], align);
2881 if (!maybe_expand_insn (icode, 4, ops))
2882 return NULL_RTX;
2883
2884 /* Check to see if the argument was declared attribute nonstring
2885 and if so, issue a warning since at this point it's not known
2886 to be nul-terminated. */
2887 maybe_warn_nonstring_arg (TREE_OPERAND (CALL_EXPR_FN (exp), 0), exp);
2888
2889 /* Now that we are assured of success, expand the source. */
2890 start_sequence ();
2891 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2892 if (pat != src_reg)
2893 {
2894 #ifdef POINTERS_EXTEND_UNSIGNED
2895 if (GET_MODE (pat) != Pmode)
2896 pat = convert_to_mode (Pmode, pat,
2897 POINTERS_EXTEND_UNSIGNED);
2898 #endif
2899 emit_move_insn (src_reg, pat);
2900 }
2901 pat = get_insns ();
2902 end_sequence ();
2903
2904 if (before_strlen)
2905 emit_insn_after (pat, before_strlen);
2906 else
2907 emit_insn_before (pat, get_insns ());
2908
2909 /* Return the value in the proper mode for this function. */
2910 if (GET_MODE (ops[0].value) == target_mode)
2911 target = ops[0].value;
2912 else if (target != 0)
2913 convert_move (target, ops[0].value, 0);
2914 else
2915 target = convert_to_mode (target_mode, ops[0].value, 0);
2916
2917 return target;
2918 }
2919 }
2920
2921 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2922 bytes from constant string DATA + OFFSET and return it as target
2923 constant. */
2924
2925 static rtx
2926 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2927 scalar_int_mode mode)
2928 {
2929 const char *str = (const char *) data;
2930
2931 gcc_assert (offset >= 0
2932 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2933 <= strlen (str) + 1));
2934
2935 return c_readstr (str + offset, mode);
2936 }
2937
2938 /* LEN specify length of the block of memcpy/memset operation.
2939 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2940 In some cases we can make very likely guess on max size, then we
2941 set it into PROBABLE_MAX_SIZE. */
2942
2943 static void
2944 determine_block_size (tree len, rtx len_rtx,
2945 unsigned HOST_WIDE_INT *min_size,
2946 unsigned HOST_WIDE_INT *max_size,
2947 unsigned HOST_WIDE_INT *probable_max_size)
2948 {
2949 if (CONST_INT_P (len_rtx))
2950 {
2951 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2952 return;
2953 }
2954 else
2955 {
2956 wide_int min, max;
2957 enum value_range_type range_type = VR_UNDEFINED;
2958
2959 /* Determine bounds from the type. */
2960 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2961 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2962 else
2963 *min_size = 0;
2964 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2965 *probable_max_size = *max_size
2966 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2967 else
2968 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2969
2970 if (TREE_CODE (len) == SSA_NAME)
2971 range_type = get_range_info (len, &min, &max);
2972 if (range_type == VR_RANGE)
2973 {
2974 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2975 *min_size = min.to_uhwi ();
2976 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2977 *probable_max_size = *max_size = max.to_uhwi ();
2978 }
2979 else if (range_type == VR_ANTI_RANGE)
2980 {
2981 /* Anti range 0...N lets us to determine minimal size to N+1. */
2982 if (min == 0)
2983 {
2984 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2985 *min_size = max.to_uhwi () + 1;
2986 }
2987 /* Code like
2988
2989 int n;
2990 if (n < 100)
2991 memcpy (a, b, n)
2992
2993 Produce anti range allowing negative values of N. We still
2994 can use the information and make a guess that N is not negative.
2995 */
2996 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2997 *probable_max_size = min.to_uhwi () - 1;
2998 }
2999 }
3000 gcc_checking_assert (*max_size <=
3001 (unsigned HOST_WIDE_INT)
3002 GET_MODE_MASK (GET_MODE (len_rtx)));
3003 }
3004
3005 /* Try to verify that the sizes and lengths of the arguments to a string
3006 manipulation function given by EXP are within valid bounds and that
3007 the operation does not lead to buffer overflow or read past the end.
3008 Arguments other than EXP may be null. When non-null, the arguments
3009 have the following meaning:
3010 DST is the destination of a copy call or NULL otherwise.
3011 SRC is the source of a copy call or NULL otherwise.
3012 DSTWRITE is the number of bytes written into the destination obtained
3013 from the user-supplied size argument to the function (such as in
3014 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3015 MAXREAD is the user-supplied bound on the length of the source sequence
3016 (such as in strncat(d, s, N). It specifies the upper limit on the number
3017 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3018 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3019 expression EXP is a string function call (as opposed to a memory call
3020 like memcpy). As an exception, SRCSTR can also be an integer denoting
3021 the precomputed size of the source string or object (for functions like
3022 memcpy).
3023 DSTSIZE is the size of the destination object specified by the last
3024 argument to the _chk builtins, typically resulting from the expansion
3025 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3026 DSTSIZE).
3027
3028 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3029 SIZE_MAX.
3030
3031 If the call is successfully verified as safe return true, otherwise
3032 return false. */
3033
3034 static bool
3035 check_access (tree exp, tree, tree, tree dstwrite,
3036 tree maxread, tree srcstr, tree dstsize)
3037 {
3038 int opt = OPT_Wstringop_overflow_;
3039
3040 /* The size of the largest object is half the address space, or
3041 PTRDIFF_MAX. (This is way too permissive.) */
3042 tree maxobjsize = max_object_size ();
3043
3044 /* Either the length of the source string for string functions or
3045 the size of the source object for raw memory functions. */
3046 tree slen = NULL_TREE;
3047
3048 tree range[2] = { NULL_TREE, NULL_TREE };
3049
3050 /* Set to true when the exact number of bytes written by a string
3051 function like strcpy is not known and the only thing that is
3052 known is that it must be at least one (for the terminating nul). */
3053 bool at_least_one = false;
3054 if (srcstr)
3055 {
3056 /* SRCSTR is normally a pointer to string but as a special case
3057 it can be an integer denoting the length of a string. */
3058 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3059 {
3060 /* Try to determine the range of lengths the source string
3061 refers to. If it can be determined and is less than
3062 the upper bound given by MAXREAD add one to it for
3063 the terminating nul. Otherwise, set it to one for
3064 the same reason, or to MAXREAD as appropriate. */
3065 get_range_strlen (srcstr, range);
3066 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3067 {
3068 if (maxread && tree_int_cst_le (maxread, range[0]))
3069 range[0] = range[1] = maxread;
3070 else
3071 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3072 range[0], size_one_node);
3073
3074 if (maxread && tree_int_cst_le (maxread, range[1]))
3075 range[1] = maxread;
3076 else if (!integer_all_onesp (range[1]))
3077 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3078 range[1], size_one_node);
3079
3080 slen = range[0];
3081 }
3082 else
3083 {
3084 at_least_one = true;
3085 slen = size_one_node;
3086 }
3087 }
3088 else
3089 slen = srcstr;
3090 }
3091
3092 if (!dstwrite && !maxread)
3093 {
3094 /* When the only available piece of data is the object size
3095 there is nothing to do. */
3096 if (!slen)
3097 return true;
3098
3099 /* Otherwise, when the length of the source sequence is known
3100 (as with strlen), set DSTWRITE to it. */
3101 if (!range[0])
3102 dstwrite = slen;
3103 }
3104
3105 if (!dstsize)
3106 dstsize = maxobjsize;
3107
3108 if (dstwrite)
3109 get_size_range (dstwrite, range);
3110
3111 tree func = get_callee_fndecl (exp);
3112
3113 /* First check the number of bytes to be written against the maximum
3114 object size. */
3115 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3116 {
3117 location_t loc = tree_nonartificial_location (exp);
3118 loc = expansion_point_location_if_in_system_header (loc);
3119
3120 if (range[0] == range[1])
3121 warning_at (loc, opt,
3122 "%K%qD specified size %E "
3123 "exceeds maximum object size %E",
3124 exp, func, range[0], maxobjsize);
3125 else
3126 warning_at (loc, opt,
3127 "%K%qD specified size between %E and %E "
3128 "exceeds maximum object size %E",
3129 exp, func,
3130 range[0], range[1], maxobjsize);
3131 return false;
3132 }
3133
3134 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3135 constant, and in range of unsigned HOST_WIDE_INT. */
3136 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3137
3138 /* Next check the number of bytes to be written against the destination
3139 object size. */
3140 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3141 {
3142 if (range[0]
3143 && ((tree_fits_uhwi_p (dstsize)
3144 && tree_int_cst_lt (dstsize, range[0]))
3145 || (tree_fits_uhwi_p (dstwrite)
3146 && tree_int_cst_lt (dstwrite, range[0]))))
3147 {
3148 location_t loc = tree_nonartificial_location (exp);
3149 loc = expansion_point_location_if_in_system_header (loc);
3150
3151 if (dstwrite == slen && at_least_one)
3152 {
3153 /* This is a call to strcpy with a destination of 0 size
3154 and a source of unknown length. The call will write
3155 at least one byte past the end of the destination. */
3156 warning_at (loc, opt,
3157 "%K%qD writing %E or more bytes into a region "
3158 "of size %E overflows the destination",
3159 exp, func, range[0], dstsize);
3160 }
3161 else if (tree_int_cst_equal (range[0], range[1]))
3162 warning_at (loc, opt,
3163 (integer_onep (range[0])
3164 ? G_("%K%qD writing %E byte into a region "
3165 "of size %E overflows the destination")
3166 : G_("%K%qD writing %E bytes into a region "
3167 "of size %E overflows the destination")),
3168 exp, func, range[0], dstsize);
3169 else if (tree_int_cst_sign_bit (range[1]))
3170 {
3171 /* Avoid printing the upper bound if it's invalid. */
3172 warning_at (loc, opt,
3173 "%K%qD writing %E or more bytes into a region "
3174 "of size %E overflows the destination",
3175 exp, func, range[0], dstsize);
3176 }
3177 else
3178 warning_at (loc, opt,
3179 "%K%qD writing between %E and %E bytes into "
3180 "a region of size %E overflows the destination",
3181 exp, func, range[0], range[1],
3182 dstsize);
3183
3184 /* Return error when an overflow has been detected. */
3185 return false;
3186 }
3187 }
3188
3189 /* Check the maximum length of the source sequence against the size
3190 of the destination object if known, or against the maximum size
3191 of an object. */
3192 if (maxread)
3193 {
3194 get_size_range (maxread, range);
3195
3196 /* Use the lower end for MAXREAD from now on. */
3197 if (range[0])
3198 maxread = range[0];
3199
3200 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3201 {
3202 location_t loc = tree_nonartificial_location (exp);
3203 loc = expansion_point_location_if_in_system_header (loc);
3204
3205 if (tree_int_cst_lt (maxobjsize, range[0]))
3206 {
3207 /* Warn about crazy big sizes first since that's more
3208 likely to be meaningful than saying that the bound
3209 is greater than the object size if both are big. */
3210 if (range[0] == range[1])
3211 warning_at (loc, opt,
3212 "%K%qD specified bound %E "
3213 "exceeds maximum object size %E",
3214 exp, func,
3215 range[0], maxobjsize);
3216 else
3217 warning_at (loc, opt,
3218 "%K%qD specified bound between %E and %E "
3219 "exceeds maximum object size %E",
3220 exp, func,
3221 range[0], range[1], maxobjsize);
3222
3223 return false;
3224 }
3225
3226 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3227 {
3228 if (tree_int_cst_equal (range[0], range[1]))
3229 warning_at (loc, opt,
3230 "%K%qD specified bound %E "
3231 "exceeds destination size %E",
3232 exp, func,
3233 range[0], dstsize);
3234 else
3235 warning_at (loc, opt,
3236 "%K%qD specified bound between %E and %E "
3237 "exceeds destination size %E",
3238 exp, func,
3239 range[0], range[1], dstsize);
3240 return false;
3241 }
3242 }
3243 }
3244
3245 /* Check for reading past the end of SRC. */
3246 if (slen
3247 && slen == srcstr
3248 && dstwrite && range[0]
3249 && tree_int_cst_lt (slen, range[0]))
3250 {
3251 location_t loc = tree_nonartificial_location (exp);
3252
3253 if (tree_int_cst_equal (range[0], range[1]))
3254 warning_at (loc, opt,
3255 (tree_int_cst_equal (range[0], integer_one_node)
3256 ? G_("%K%qD reading %E byte from a region of size %E")
3257 : G_("%K%qD reading %E bytes from a region of size %E")),
3258 exp, func, range[0], slen);
3259 else if (tree_int_cst_sign_bit (range[1]))
3260 {
3261 /* Avoid printing the upper bound if it's invalid. */
3262 warning_at (loc, opt,
3263 "%K%qD reading %E or more bytes from a region "
3264 "of size %E",
3265 exp, func, range[0], slen);
3266 }
3267 else
3268 warning_at (loc, opt,
3269 "%K%qD reading between %E and %E bytes from a region "
3270 "of size %E",
3271 exp, func, range[0], range[1], slen);
3272 return false;
3273 }
3274
3275 return true;
3276 }
3277
3278 /* Helper to compute the size of the object referenced by the DEST
3279 expression which must have pointer type, using Object Size type
3280 OSTYPE (only the least significant 2 bits are used). Return
3281 an estimate of the size of the object if successful or NULL when
3282 the size cannot be determined. When the referenced object involves
3283 a non-constant offset in some range the returned value represents
3284 the largest size given the smallest non-negative offset in the
3285 range. The function is intended for diagnostics and should not
3286 be used to influence code generation or optimization. */
3287
3288 tree
3289 compute_objsize (tree dest, int ostype)
3290 {
3291 unsigned HOST_WIDE_INT size;
3292
3293 /* Only the two least significant bits are meaningful. */
3294 ostype &= 3;
3295
3296 if (compute_builtin_object_size (dest, ostype, &size))
3297 return build_int_cst (sizetype, size);
3298
3299 if (TREE_CODE (dest) == SSA_NAME)
3300 {
3301 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3302 if (!is_gimple_assign (stmt))
3303 return NULL_TREE;
3304
3305 dest = gimple_assign_rhs1 (stmt);
3306
3307 tree_code code = gimple_assign_rhs_code (stmt);
3308 if (code == POINTER_PLUS_EXPR)
3309 {
3310 /* compute_builtin_object_size fails for addresses with
3311 non-constant offsets. Try to determine the range of
3312 such an offset here and use it to adjus the constant
3313 size. */
3314 tree off = gimple_assign_rhs2 (stmt);
3315 if (TREE_CODE (off) == SSA_NAME
3316 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3317 {
3318 wide_int min, max;
3319 enum value_range_type rng = get_range_info (off, &min, &max);
3320
3321 if (rng == VR_RANGE)
3322 {
3323 if (tree size = compute_objsize (dest, ostype))
3324 {
3325 wide_int wisiz = wi::to_wide (size);
3326
3327 /* Ignore negative offsets for now. For others,
3328 use the lower bound as the most optimistic
3329 estimate of the (remaining)size. */
3330 if (wi::sign_mask (min))
3331 ;
3332 else if (wi::ltu_p (min, wisiz))
3333 return wide_int_to_tree (TREE_TYPE (size),
3334 wi::sub (wisiz, min));
3335 else
3336 return size_zero_node;
3337 }
3338 }
3339 }
3340 }
3341 else if (code != ADDR_EXPR)
3342 return NULL_TREE;
3343 }
3344
3345 /* Unless computing the largest size (for memcpy and other raw memory
3346 functions), try to determine the size of the object from its type. */
3347 if (!ostype)
3348 return NULL_TREE;
3349
3350 if (TREE_CODE (dest) != ADDR_EXPR)
3351 return NULL_TREE;
3352
3353 tree type = TREE_TYPE (dest);
3354 if (TREE_CODE (type) == POINTER_TYPE)
3355 type = TREE_TYPE (type);
3356
3357 type = TYPE_MAIN_VARIANT (type);
3358
3359 if (TREE_CODE (type) == ARRAY_TYPE
3360 && !array_at_struct_end_p (dest))
3361 {
3362 /* Return the constant size unless it's zero (that's a zero-length
3363 array likely at the end of a struct). */
3364 tree size = TYPE_SIZE_UNIT (type);
3365 if (size && TREE_CODE (size) == INTEGER_CST
3366 && !integer_zerop (size))
3367 return size;
3368 }
3369
3370 return NULL_TREE;
3371 }
3372
3373 /* Helper to determine and check the sizes of the source and the destination
3374 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3375 call expression, DEST is the destination argument, SRC is the source
3376 argument or null, and LEN is the number of bytes. Use Object Size type-0
3377 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3378 (no overflow or invalid sizes), false otherwise. */
3379
3380 static bool
3381 check_memop_access (tree exp, tree dest, tree src, tree size)
3382 {
3383 /* For functions like memset and memcpy that operate on raw memory
3384 try to determine the size of the largest source and destination
3385 object using type-0 Object Size regardless of the object size
3386 type specified by the option. */
3387 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3388 tree dstsize = compute_objsize (dest, 0);
3389
3390 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3391 srcsize, dstsize);
3392 }
3393
3394 /* Validate memchr arguments without performing any expansion.
3395 Return NULL_RTX. */
3396
3397 static rtx
3398 expand_builtin_memchr (tree exp, rtx)
3399 {
3400 if (!validate_arglist (exp,
3401 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3402 return NULL_RTX;
3403
3404 tree arg1 = CALL_EXPR_ARG (exp, 0);
3405 tree len = CALL_EXPR_ARG (exp, 2);
3406
3407 /* Diagnose calls where the specified length exceeds the size
3408 of the object. */
3409 if (warn_stringop_overflow)
3410 {
3411 tree size = compute_objsize (arg1, 0);
3412 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3413 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3414 }
3415
3416 return NULL_RTX;
3417 }
3418
3419 /* Expand a call EXP to the memcpy builtin.
3420 Return NULL_RTX if we failed, the caller should emit a normal call,
3421 otherwise try to get the result in TARGET, if convenient (and in
3422 mode MODE if that's convenient). */
3423
3424 static rtx
3425 expand_builtin_memcpy (tree exp, rtx target)
3426 {
3427 if (!validate_arglist (exp,
3428 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3429 return NULL_RTX;
3430
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 1);
3433 tree len = CALL_EXPR_ARG (exp, 2);
3434
3435 check_memop_access (exp, dest, src, len);
3436
3437 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3438 /*endp=*/ 0);
3439 }
3440
3441 /* Check a call EXP to the memmove built-in for validity.
3442 Return NULL_RTX on both success and failure. */
3443
3444 static rtx
3445 expand_builtin_memmove (tree exp, rtx)
3446 {
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3450
3451 tree dest = CALL_EXPR_ARG (exp, 0);
3452 tree src = CALL_EXPR_ARG (exp, 1);
3453 tree len = CALL_EXPR_ARG (exp, 2);
3454
3455 check_memop_access (exp, dest, src, len);
3456
3457 return NULL_RTX;
3458 }
3459
3460 /* Expand an instrumented call EXP to the memcpy builtin.
3461 Return NULL_RTX if we failed, the caller should emit a normal call,
3462 otherwise try to get the result in TARGET, if convenient (and in
3463 mode MODE if that's convenient). */
3464
3465 static rtx
3466 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3467 {
3468 if (!validate_arglist (exp,
3469 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3470 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3471 INTEGER_TYPE, VOID_TYPE))
3472 return NULL_RTX;
3473 else
3474 {
3475 tree dest = CALL_EXPR_ARG (exp, 0);
3476 tree src = CALL_EXPR_ARG (exp, 2);
3477 tree len = CALL_EXPR_ARG (exp, 4);
3478 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3479 /*end_p=*/ 0);
3480
3481 /* Return src bounds with the result. */
3482 if (res)
3483 {
3484 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3485 expand_normal (CALL_EXPR_ARG (exp, 1)));
3486 res = chkp_join_splitted_slot (res, bnd);
3487 }
3488 return res;
3489 }
3490 }
3491
3492 /* Expand a call EXP to the mempcpy builtin.
3493 Return NULL_RTX if we failed; the caller should emit a normal call,
3494 otherwise try to get the result in TARGET, if convenient (and in
3495 mode MODE if that's convenient). If ENDP is 0 return the
3496 destination pointer, if ENDP is 1 return the end pointer ala
3497 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3498 stpcpy. */
3499
3500 static rtx
3501 expand_builtin_mempcpy (tree exp, rtx target)
3502 {
3503 if (!validate_arglist (exp,
3504 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3505 return NULL_RTX;
3506
3507 tree dest = CALL_EXPR_ARG (exp, 0);
3508 tree src = CALL_EXPR_ARG (exp, 1);
3509 tree len = CALL_EXPR_ARG (exp, 2);
3510
3511 /* Policy does not generally allow using compute_objsize (which
3512 is used internally by check_memop_size) to change code generation
3513 or drive optimization decisions.
3514
3515 In this instance it is safe because the code we generate has
3516 the same semantics regardless of the return value of
3517 check_memop_sizes. Exactly the same amount of data is copied
3518 and the return value is exactly the same in both cases.
3519
3520 Furthermore, check_memop_size always uses mode 0 for the call to
3521 compute_objsize, so the imprecise nature of compute_objsize is
3522 avoided. */
3523
3524 /* Avoid expanding mempcpy into memcpy when the call is determined
3525 to overflow the buffer. This also prevents the same overflow
3526 from being diagnosed again when expanding memcpy. */
3527 if (!check_memop_access (exp, dest, src, len))
3528 return NULL_RTX;
3529
3530 return expand_builtin_mempcpy_args (dest, src, len,
3531 target, exp, /*endp=*/ 1);
3532 }
3533
3534 /* Expand an instrumented call EXP to the mempcpy builtin.
3535 Return NULL_RTX if we failed, the caller should emit a normal call,
3536 otherwise try to get the result in TARGET, if convenient (and in
3537 mode MODE if that's convenient). */
3538
3539 static rtx
3540 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3541 {
3542 if (!validate_arglist (exp,
3543 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3544 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3545 INTEGER_TYPE, VOID_TYPE))
3546 return NULL_RTX;
3547 else
3548 {
3549 tree dest = CALL_EXPR_ARG (exp, 0);
3550 tree src = CALL_EXPR_ARG (exp, 2);
3551 tree len = CALL_EXPR_ARG (exp, 4);
3552 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3553 exp, 1);
3554
3555 /* Return src bounds with the result. */
3556 if (res)
3557 {
3558 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3559 expand_normal (CALL_EXPR_ARG (exp, 1)));
3560 res = chkp_join_splitted_slot (res, bnd);
3561 }
3562 return res;
3563 }
3564 }
3565
3566 /* Helper function to do the actual work for expand of memory copy family
3567 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3568 of memory from SRC to DEST and assign to TARGET if convenient.
3569 If ENDP is 0 return the
3570 destination pointer, if ENDP is 1 return the end pointer ala
3571 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3572 stpcpy. */
3573
3574 static rtx
3575 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3576 rtx target, tree exp, int endp)
3577 {
3578 const char *src_str;
3579 unsigned int src_align = get_pointer_alignment (src);
3580 unsigned int dest_align = get_pointer_alignment (dest);
3581 rtx dest_mem, src_mem, dest_addr, len_rtx;
3582 HOST_WIDE_INT expected_size = -1;
3583 unsigned int expected_align = 0;
3584 unsigned HOST_WIDE_INT min_size;
3585 unsigned HOST_WIDE_INT max_size;
3586 unsigned HOST_WIDE_INT probable_max_size;
3587
3588 /* If DEST is not a pointer type, call the normal function. */
3589 if (dest_align == 0)
3590 return NULL_RTX;
3591
3592 /* If either SRC is not a pointer type, don't do this
3593 operation in-line. */
3594 if (src_align == 0)
3595 return NULL_RTX;
3596
3597 if (currently_expanding_gimple_stmt)
3598 stringop_block_profile (currently_expanding_gimple_stmt,
3599 &expected_align, &expected_size);
3600
3601 if (expected_align < dest_align)
3602 expected_align = dest_align;
3603 dest_mem = get_memory_rtx (dest, len);
3604 set_mem_align (dest_mem, dest_align);
3605 len_rtx = expand_normal (len);
3606 determine_block_size (len, len_rtx, &min_size, &max_size,
3607 &probable_max_size);
3608 src_str = c_getstr (src);
3609
3610 /* If SRC is a string constant and block move would be done
3611 by pieces, we can avoid loading the string from memory
3612 and only stored the computed constants. */
3613 if (src_str
3614 && CONST_INT_P (len_rtx)
3615 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3616 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3617 CONST_CAST (char *, src_str),
3618 dest_align, false))
3619 {
3620 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3621 builtin_memcpy_read_str,
3622 CONST_CAST (char *, src_str),
3623 dest_align, false, endp);
3624 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3625 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3626 return dest_mem;
3627 }
3628
3629 src_mem = get_memory_rtx (src, len);
3630 set_mem_align (src_mem, src_align);
3631
3632 /* Copy word part most expediently. */
3633 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3634 CALL_EXPR_TAILCALL (exp)
3635 && (endp == 0 || target == const0_rtx)
3636 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3637 expected_align, expected_size,
3638 min_size, max_size, probable_max_size);
3639
3640 if (dest_addr == 0)
3641 {
3642 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3643 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3644 }
3645
3646 if (endp && target != const0_rtx)
3647 {
3648 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3649 /* stpcpy pointer to last byte. */
3650 if (endp == 2)
3651 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3652 }
3653
3654 return dest_addr;
3655 }
3656
3657 static rtx
3658 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3659 rtx target, tree orig_exp, int endp)
3660 {
3661 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3662 endp);
3663 }
3664
3665 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3666 we failed, the caller should emit a normal call, otherwise try to
3667 get the result in TARGET, if convenient. If ENDP is 0 return the
3668 destination pointer, if ENDP is 1 return the end pointer ala
3669 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3670 stpcpy. */
3671
3672 static rtx
3673 expand_movstr (tree dest, tree src, rtx target, int endp)
3674 {
3675 struct expand_operand ops[3];
3676 rtx dest_mem;
3677 rtx src_mem;
3678
3679 if (!targetm.have_movstr ())
3680 return NULL_RTX;
3681
3682 dest_mem = get_memory_rtx (dest, NULL);
3683 src_mem = get_memory_rtx (src, NULL);
3684 if (!endp)
3685 {
3686 target = force_reg (Pmode, XEXP (dest_mem, 0));
3687 dest_mem = replace_equiv_address (dest_mem, target);
3688 }
3689
3690 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3691 create_fixed_operand (&ops[1], dest_mem);
3692 create_fixed_operand (&ops[2], src_mem);
3693 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3694 return NULL_RTX;
3695
3696 if (endp && target != const0_rtx)
3697 {
3698 target = ops[0].value;
3699 /* movstr is supposed to set end to the address of the NUL
3700 terminator. If the caller requested a mempcpy-like return value,
3701 adjust it. */
3702 if (endp == 1)
3703 {
3704 rtx tem = plus_constant (GET_MODE (target),
3705 gen_lowpart (GET_MODE (target), target), 1);
3706 emit_move_insn (target, force_operand (tem, NULL_RTX));
3707 }
3708 }
3709 return target;
3710 }
3711
3712 /* Do some very basic size validation of a call to the strcpy builtin
3713 given by EXP. Return NULL_RTX to have the built-in expand to a call
3714 to the library function. */
3715
3716 static rtx
3717 expand_builtin_strcat (tree exp, rtx)
3718 {
3719 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3720 || !warn_stringop_overflow)
3721 return NULL_RTX;
3722
3723 tree dest = CALL_EXPR_ARG (exp, 0);
3724 tree src = CALL_EXPR_ARG (exp, 1);
3725
3726 /* There is no way here to determine the length of the string in
3727 the destination to which the SRC string is being appended so
3728 just diagnose cases when the souce string is longer than
3729 the destination object. */
3730
3731 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3732
3733 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3734 destsize);
3735
3736 return NULL_RTX;
3737 }
3738
3739 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3740 NULL_RTX if we failed the caller should emit a normal call, otherwise
3741 try to get the result in TARGET, if convenient (and in mode MODE if that's
3742 convenient). */
3743
3744 static rtx
3745 expand_builtin_strcpy (tree exp, rtx target)
3746 {
3747 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3748 return NULL_RTX;
3749
3750 tree dest = CALL_EXPR_ARG (exp, 0);
3751 tree src = CALL_EXPR_ARG (exp, 1);
3752
3753 if (warn_stringop_overflow)
3754 {
3755 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3756 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3757 src, destsize);
3758 }
3759
3760 return expand_builtin_strcpy_args (dest, src, target);
3761 }
3762
3763 /* Helper function to do the actual work for expand_builtin_strcpy. The
3764 arguments to the builtin_strcpy call DEST and SRC are broken out
3765 so that this can also be called without constructing an actual CALL_EXPR.
3766 The other arguments and return value are the same as for
3767 expand_builtin_strcpy. */
3768
3769 static rtx
3770 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3771 {
3772 return expand_movstr (dest, src, target, /*endp=*/0);
3773 }
3774
3775 /* Expand a call EXP to the stpcpy builtin.
3776 Return NULL_RTX if we failed the caller should emit a normal call,
3777 otherwise try to get the result in TARGET, if convenient (and in
3778 mode MODE if that's convenient). */
3779
3780 static rtx
3781 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3782 {
3783 tree dst, src;
3784 location_t loc = EXPR_LOCATION (exp);
3785
3786 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3787 return NULL_RTX;
3788
3789 dst = CALL_EXPR_ARG (exp, 0);
3790 src = CALL_EXPR_ARG (exp, 1);
3791
3792 if (warn_stringop_overflow)
3793 {
3794 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3795 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3796 src, destsize);
3797 }
3798
3799 /* If return value is ignored, transform stpcpy into strcpy. */
3800 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3801 {
3802 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3803 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3804 return expand_expr (result, target, mode, EXPAND_NORMAL);
3805 }
3806 else
3807 {
3808 tree len, lenp1;
3809 rtx ret;
3810
3811 /* Ensure we get an actual string whose length can be evaluated at
3812 compile-time, not an expression containing a string. This is
3813 because the latter will potentially produce pessimized code
3814 when used to produce the return value. */
3815 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3816 return expand_movstr (dst, src, target, /*endp=*/2);
3817
3818 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3819 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3820 target, exp, /*endp=*/2);
3821
3822 if (ret)
3823 return ret;
3824
3825 if (TREE_CODE (len) == INTEGER_CST)
3826 {
3827 rtx len_rtx = expand_normal (len);
3828
3829 if (CONST_INT_P (len_rtx))
3830 {
3831 ret = expand_builtin_strcpy_args (dst, src, target);
3832
3833 if (ret)
3834 {
3835 if (! target)
3836 {
3837 if (mode != VOIDmode)
3838 target = gen_reg_rtx (mode);
3839 else
3840 target = gen_reg_rtx (GET_MODE (ret));
3841 }
3842 if (GET_MODE (target) != GET_MODE (ret))
3843 ret = gen_lowpart (GET_MODE (target), ret);
3844
3845 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3846 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3847 gcc_assert (ret);
3848
3849 return target;
3850 }
3851 }
3852 }
3853
3854 return expand_movstr (dst, src, target, /*endp=*/2);
3855 }
3856 }
3857
3858 /* Check a call EXP to the stpncpy built-in for validity.
3859 Return NULL_RTX on both success and failure. */
3860
3861 static rtx
3862 expand_builtin_stpncpy (tree exp, rtx)
3863 {
3864 if (!validate_arglist (exp,
3865 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3866 || !warn_stringop_overflow)
3867 return NULL_RTX;
3868
3869 /* The source and destination of the call. */
3870 tree dest = CALL_EXPR_ARG (exp, 0);
3871 tree src = CALL_EXPR_ARG (exp, 1);
3872
3873 /* The exact number of bytes to write (not the maximum). */
3874 tree len = CALL_EXPR_ARG (exp, 2);
3875
3876 /* The size of the destination object. */
3877 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3878
3879 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3880
3881 return NULL_RTX;
3882 }
3883
3884 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3885 bytes from constant string DATA + OFFSET and return it as target
3886 constant. */
3887
3888 rtx
3889 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3890 scalar_int_mode mode)
3891 {
3892 const char *str = (const char *) data;
3893
3894 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3895 return const0_rtx;
3896
3897 return c_readstr (str + offset, mode);
3898 }
3899
3900 /* Helper to check the sizes of sequences and the destination of calls
3901 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3902 success (no overflow or invalid sizes), false otherwise. */
3903
3904 static bool
3905 check_strncat_sizes (tree exp, tree objsize)
3906 {
3907 tree dest = CALL_EXPR_ARG (exp, 0);
3908 tree src = CALL_EXPR_ARG (exp, 1);
3909 tree maxread = CALL_EXPR_ARG (exp, 2);
3910
3911 /* Try to determine the range of lengths that the source expression
3912 refers to. */
3913 tree lenrange[2];
3914 get_range_strlen (src, lenrange);
3915
3916 /* Try to verify that the destination is big enough for the shortest
3917 string. */
3918
3919 if (!objsize && warn_stringop_overflow)
3920 {
3921 /* If it hasn't been provided by __strncat_chk, try to determine
3922 the size of the destination object into which the source is
3923 being copied. */
3924 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3925 }
3926
3927 /* Add one for the terminating nul. */
3928 tree srclen = (lenrange[0]
3929 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3930 size_one_node)
3931 : NULL_TREE);
3932
3933 /* The strncat function copies at most MAXREAD bytes and always appends
3934 the terminating nul so the specified upper bound should never be equal
3935 to (or greater than) the size of the destination. */
3936 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3937 && tree_int_cst_equal (objsize, maxread))
3938 {
3939 location_t loc = tree_nonartificial_location (exp);
3940 loc = expansion_point_location_if_in_system_header (loc);
3941
3942 warning_at (loc, OPT_Wstringop_overflow_,
3943 "%K%qD specified bound %E equals destination size",
3944 exp, get_callee_fndecl (exp), maxread);
3945
3946 return false;
3947 }
3948
3949 if (!srclen
3950 || (maxread && tree_fits_uhwi_p (maxread)
3951 && tree_fits_uhwi_p (srclen)
3952 && tree_int_cst_lt (maxread, srclen)))
3953 srclen = maxread;
3954
3955 /* The number of bytes to write is LEN but check_access will also
3956 check SRCLEN if LEN's value isn't known. */
3957 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3958 objsize);
3959 }
3960
3961 /* Similar to expand_builtin_strcat, do some very basic size validation
3962 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3963 the built-in expand to a call to the library function. */
3964
3965 static rtx
3966 expand_builtin_strncat (tree exp, rtx)
3967 {
3968 if (!validate_arglist (exp,
3969 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3970 || !warn_stringop_overflow)
3971 return NULL_RTX;
3972
3973 tree dest = CALL_EXPR_ARG (exp, 0);
3974 tree src = CALL_EXPR_ARG (exp, 1);
3975 /* The upper bound on the number of bytes to write. */
3976 tree maxread = CALL_EXPR_ARG (exp, 2);
3977 /* The length of the source sequence. */
3978 tree slen = c_strlen (src, 1);
3979
3980 /* Try to determine the range of lengths that the source expression
3981 refers to. */
3982 tree lenrange[2];
3983 if (slen)
3984 lenrange[0] = lenrange[1] = slen;
3985 else
3986 get_range_strlen (src, lenrange);
3987
3988 /* Try to verify that the destination is big enough for the shortest
3989 string. First try to determine the size of the destination object
3990 into which the source is being copied. */
3991 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3992
3993 /* Add one for the terminating nul. */
3994 tree srclen = (lenrange[0]
3995 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3996 size_one_node)
3997 : NULL_TREE);
3998
3999 /* The strncat function copies at most MAXREAD bytes and always appends
4000 the terminating nul so the specified upper bound should never be equal
4001 to (or greater than) the size of the destination. */
4002 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4003 && tree_int_cst_equal (destsize, maxread))
4004 {
4005 location_t loc = tree_nonartificial_location (exp);
4006 loc = expansion_point_location_if_in_system_header (loc);
4007
4008 warning_at (loc, OPT_Wstringop_overflow_,
4009 "%K%qD specified bound %E equals destination size",
4010 exp, get_callee_fndecl (exp), maxread);
4011
4012 return NULL_RTX;
4013 }
4014
4015 if (!srclen
4016 || (maxread && tree_fits_uhwi_p (maxread)
4017 && tree_fits_uhwi_p (srclen)
4018 && tree_int_cst_lt (maxread, srclen)))
4019 srclen = maxread;
4020
4021 /* The number of bytes to write is SRCLEN. */
4022 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4023
4024 return NULL_RTX;
4025 }
4026
4027 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4028 NULL_RTX if we failed the caller should emit a normal call. */
4029
4030 static rtx
4031 expand_builtin_strncpy (tree exp, rtx target)
4032 {
4033 location_t loc = EXPR_LOCATION (exp);
4034
4035 if (validate_arglist (exp,
4036 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4037 {
4038 tree dest = CALL_EXPR_ARG (exp, 0);
4039 tree src = CALL_EXPR_ARG (exp, 1);
4040 /* The number of bytes to write (not the maximum). */
4041 tree len = CALL_EXPR_ARG (exp, 2);
4042 /* The length of the source sequence. */
4043 tree slen = c_strlen (src, 1);
4044
4045 if (warn_stringop_overflow)
4046 {
4047 tree destsize = compute_objsize (dest,
4048 warn_stringop_overflow - 1);
4049
4050 /* The number of bytes to write is LEN but check_access will also
4051 check SLEN if LEN's value isn't known. */
4052 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4053 destsize);
4054 }
4055
4056 /* We must be passed a constant len and src parameter. */
4057 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4058 return NULL_RTX;
4059
4060 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4061
4062 /* We're required to pad with trailing zeros if the requested
4063 len is greater than strlen(s2)+1. In that case try to
4064 use store_by_pieces, if it fails, punt. */
4065 if (tree_int_cst_lt (slen, len))
4066 {
4067 unsigned int dest_align = get_pointer_alignment (dest);
4068 const char *p = c_getstr (src);
4069 rtx dest_mem;
4070
4071 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4072 || !can_store_by_pieces (tree_to_uhwi (len),
4073 builtin_strncpy_read_str,
4074 CONST_CAST (char *, p),
4075 dest_align, false))
4076 return NULL_RTX;
4077
4078 dest_mem = get_memory_rtx (dest, len);
4079 store_by_pieces (dest_mem, tree_to_uhwi (len),
4080 builtin_strncpy_read_str,
4081 CONST_CAST (char *, p), dest_align, false, 0);
4082 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4083 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4084 return dest_mem;
4085 }
4086 }
4087 return NULL_RTX;
4088 }
4089
4090 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4091 bytes from constant string DATA + OFFSET and return it as target
4092 constant. */
4093
4094 rtx
4095 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4096 scalar_int_mode mode)
4097 {
4098 const char *c = (const char *) data;
4099 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4100
4101 memset (p, *c, GET_MODE_SIZE (mode));
4102
4103 return c_readstr (p, mode);
4104 }
4105
4106 /* Callback routine for store_by_pieces. Return the RTL of a register
4107 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4108 char value given in the RTL register data. For example, if mode is
4109 4 bytes wide, return the RTL for 0x01010101*data. */
4110
4111 static rtx
4112 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4113 scalar_int_mode mode)
4114 {
4115 rtx target, coeff;
4116 size_t size;
4117 char *p;
4118
4119 size = GET_MODE_SIZE (mode);
4120 if (size == 1)
4121 return (rtx) data;
4122
4123 p = XALLOCAVEC (char, size);
4124 memset (p, 1, size);
4125 coeff = c_readstr (p, mode);
4126
4127 target = convert_to_mode (mode, (rtx) data, 1);
4128 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4129 return force_reg (mode, target);
4130 }
4131
4132 /* Expand expression EXP, which is a call to the memset builtin. Return
4133 NULL_RTX if we failed the caller should emit a normal call, otherwise
4134 try to get the result in TARGET, if convenient (and in mode MODE if that's
4135 convenient). */
4136
4137 static rtx
4138 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4139 {
4140 if (!validate_arglist (exp,
4141 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4142 return NULL_RTX;
4143
4144 tree dest = CALL_EXPR_ARG (exp, 0);
4145 tree val = CALL_EXPR_ARG (exp, 1);
4146 tree len = CALL_EXPR_ARG (exp, 2);
4147
4148 check_memop_access (exp, dest, NULL_TREE, len);
4149
4150 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4151 }
4152
4153 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4154 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4155 try to get the result in TARGET, if convenient (and in mode MODE if that's
4156 convenient). */
4157
4158 static rtx
4159 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4160 {
4161 if (!validate_arglist (exp,
4162 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4163 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4164 return NULL_RTX;
4165 else
4166 {
4167 tree dest = CALL_EXPR_ARG (exp, 0);
4168 tree val = CALL_EXPR_ARG (exp, 2);
4169 tree len = CALL_EXPR_ARG (exp, 3);
4170 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4171
4172 /* Return src bounds with the result. */
4173 if (res)
4174 {
4175 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4176 expand_normal (CALL_EXPR_ARG (exp, 1)));
4177 res = chkp_join_splitted_slot (res, bnd);
4178 }
4179 return res;
4180 }
4181 }
4182
4183 /* Helper function to do the actual work for expand_builtin_memset. The
4184 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4185 so that this can also be called without constructing an actual CALL_EXPR.
4186 The other arguments and return value are the same as for
4187 expand_builtin_memset. */
4188
4189 static rtx
4190 expand_builtin_memset_args (tree dest, tree val, tree len,
4191 rtx target, machine_mode mode, tree orig_exp)
4192 {
4193 tree fndecl, fn;
4194 enum built_in_function fcode;
4195 machine_mode val_mode;
4196 char c;
4197 unsigned int dest_align;
4198 rtx dest_mem, dest_addr, len_rtx;
4199 HOST_WIDE_INT expected_size = -1;
4200 unsigned int expected_align = 0;
4201 unsigned HOST_WIDE_INT min_size;
4202 unsigned HOST_WIDE_INT max_size;
4203 unsigned HOST_WIDE_INT probable_max_size;
4204
4205 dest_align = get_pointer_alignment (dest);
4206
4207 /* If DEST is not a pointer type, don't do this operation in-line. */
4208 if (dest_align == 0)
4209 return NULL_RTX;
4210
4211 if (currently_expanding_gimple_stmt)
4212 stringop_block_profile (currently_expanding_gimple_stmt,
4213 &expected_align, &expected_size);
4214
4215 if (expected_align < dest_align)
4216 expected_align = dest_align;
4217
4218 /* If the LEN parameter is zero, return DEST. */
4219 if (integer_zerop (len))
4220 {
4221 /* Evaluate and ignore VAL in case it has side-effects. */
4222 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4223 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4224 }
4225
4226 /* Stabilize the arguments in case we fail. */
4227 dest = builtin_save_expr (dest);
4228 val = builtin_save_expr (val);
4229 len = builtin_save_expr (len);
4230
4231 len_rtx = expand_normal (len);
4232 determine_block_size (len, len_rtx, &min_size, &max_size,
4233 &probable_max_size);
4234 dest_mem = get_memory_rtx (dest, len);
4235 val_mode = TYPE_MODE (unsigned_char_type_node);
4236
4237 if (TREE_CODE (val) != INTEGER_CST)
4238 {
4239 rtx val_rtx;
4240
4241 val_rtx = expand_normal (val);
4242 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4243
4244 /* Assume that we can memset by pieces if we can store
4245 * the coefficients by pieces (in the required modes).
4246 * We can't pass builtin_memset_gen_str as that emits RTL. */
4247 c = 1;
4248 if (tree_fits_uhwi_p (len)
4249 && can_store_by_pieces (tree_to_uhwi (len),
4250 builtin_memset_read_str, &c, dest_align,
4251 true))
4252 {
4253 val_rtx = force_reg (val_mode, val_rtx);
4254 store_by_pieces (dest_mem, tree_to_uhwi (len),
4255 builtin_memset_gen_str, val_rtx, dest_align,
4256 true, 0);
4257 }
4258 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4259 dest_align, expected_align,
4260 expected_size, min_size, max_size,
4261 probable_max_size))
4262 goto do_libcall;
4263
4264 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4265 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4266 return dest_mem;
4267 }
4268
4269 if (target_char_cast (val, &c))
4270 goto do_libcall;
4271
4272 if (c)
4273 {
4274 if (tree_fits_uhwi_p (len)
4275 && can_store_by_pieces (tree_to_uhwi (len),
4276 builtin_memset_read_str, &c, dest_align,
4277 true))
4278 store_by_pieces (dest_mem, tree_to_uhwi (len),
4279 builtin_memset_read_str, &c, dest_align, true, 0);
4280 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4281 gen_int_mode (c, val_mode),
4282 dest_align, expected_align,
4283 expected_size, min_size, max_size,
4284 probable_max_size))
4285 goto do_libcall;
4286
4287 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4288 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4289 return dest_mem;
4290 }
4291
4292 set_mem_align (dest_mem, dest_align);
4293 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4294 CALL_EXPR_TAILCALL (orig_exp)
4295 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4296 expected_align, expected_size,
4297 min_size, max_size,
4298 probable_max_size);
4299
4300 if (dest_addr == 0)
4301 {
4302 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4303 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4304 }
4305
4306 return dest_addr;
4307
4308 do_libcall:
4309 fndecl = get_callee_fndecl (orig_exp);
4310 fcode = DECL_FUNCTION_CODE (fndecl);
4311 if (fcode == BUILT_IN_MEMSET
4312 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4313 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4314 dest, val, len);
4315 else if (fcode == BUILT_IN_BZERO)
4316 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4317 dest, len);
4318 else
4319 gcc_unreachable ();
4320 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4321 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4322 return expand_call (fn, target, target == const0_rtx);
4323 }
4324
4325 /* Expand expression EXP, which is a call to the bzero builtin. Return
4326 NULL_RTX if we failed the caller should emit a normal call. */
4327
4328 static rtx
4329 expand_builtin_bzero (tree exp)
4330 {
4331 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4332 return NULL_RTX;
4333
4334 tree dest = CALL_EXPR_ARG (exp, 0);
4335 tree size = CALL_EXPR_ARG (exp, 1);
4336
4337 check_memop_access (exp, dest, NULL_TREE, size);
4338
4339 /* New argument list transforming bzero(ptr x, int y) to
4340 memset(ptr x, int 0, size_t y). This is done this way
4341 so that if it isn't expanded inline, we fallback to
4342 calling bzero instead of memset. */
4343
4344 location_t loc = EXPR_LOCATION (exp);
4345
4346 return expand_builtin_memset_args (dest, integer_zero_node,
4347 fold_convert_loc (loc,
4348 size_type_node, size),
4349 const0_rtx, VOIDmode, exp);
4350 }
4351
4352 /* Try to expand cmpstr operation ICODE with the given operands.
4353 Return the result rtx on success, otherwise return null. */
4354
4355 static rtx
4356 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4357 HOST_WIDE_INT align)
4358 {
4359 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4360
4361 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4362 target = NULL_RTX;
4363
4364 struct expand_operand ops[4];
4365 create_output_operand (&ops[0], target, insn_mode);
4366 create_fixed_operand (&ops[1], arg1_rtx);
4367 create_fixed_operand (&ops[2], arg2_rtx);
4368 create_integer_operand (&ops[3], align);
4369 if (maybe_expand_insn (icode, 4, ops))
4370 return ops[0].value;
4371 return NULL_RTX;
4372 }
4373
4374 /* Expand expression EXP, which is a call to the memcmp built-in function.
4375 Return NULL_RTX if we failed and the caller should emit a normal call,
4376 otherwise try to get the result in TARGET, if convenient.
4377 RESULT_EQ is true if we can relax the returned value to be either zero
4378 or nonzero, without caring about the sign. */
4379
4380 static rtx
4381 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4382 {
4383 if (!validate_arglist (exp,
4384 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4385 return NULL_RTX;
4386
4387 tree arg1 = CALL_EXPR_ARG (exp, 0);
4388 tree arg2 = CALL_EXPR_ARG (exp, 1);
4389 tree len = CALL_EXPR_ARG (exp, 2);
4390
4391 /* Diagnose calls where the specified length exceeds the size of either
4392 object. */
4393 if (warn_stringop_overflow)
4394 {
4395 tree size = compute_objsize (arg1, 0);
4396 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4397 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4398 {
4399 size = compute_objsize (arg2, 0);
4400 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4401 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4402 }
4403 }
4404
4405 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4406 location_t loc = EXPR_LOCATION (exp);
4407
4408 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4409 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4410
4411 /* If we don't have POINTER_TYPE, call the function. */
4412 if (arg1_align == 0 || arg2_align == 0)
4413 return NULL_RTX;
4414
4415 rtx arg1_rtx = get_memory_rtx (arg1, len);
4416 rtx arg2_rtx = get_memory_rtx (arg2, len);
4417 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4418
4419 /* Set MEM_SIZE as appropriate. */
4420 if (CONST_INT_P (len_rtx))
4421 {
4422 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4423 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4424 }
4425
4426 by_pieces_constfn constfn = NULL;
4427
4428 const char *src_str = c_getstr (arg2);
4429 if (result_eq && src_str == NULL)
4430 {
4431 src_str = c_getstr (arg1);
4432 if (src_str != NULL)
4433 std::swap (arg1_rtx, arg2_rtx);
4434 }
4435
4436 /* If SRC is a string constant and block move would be done
4437 by pieces, we can avoid loading the string from memory
4438 and only stored the computed constants. */
4439 if (src_str
4440 && CONST_INT_P (len_rtx)
4441 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4442 constfn = builtin_memcpy_read_str;
4443
4444 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4445 TREE_TYPE (len), target,
4446 result_eq, constfn,
4447 CONST_CAST (char *, src_str));
4448
4449 if (result)
4450 {
4451 /* Return the value in the proper mode for this function. */
4452 if (GET_MODE (result) == mode)
4453 return result;
4454
4455 if (target != 0)
4456 {
4457 convert_move (target, result, 0);
4458 return target;
4459 }
4460
4461 return convert_to_mode (mode, result, 0);
4462 }
4463
4464 return NULL_RTX;
4465 }
4466
4467 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4468 if we failed the caller should emit a normal call, otherwise try to get
4469 the result in TARGET, if convenient. */
4470
4471 static rtx
4472 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4473 {
4474 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4475 return NULL_RTX;
4476
4477 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4478 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4479 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4480 {
4481 rtx arg1_rtx, arg2_rtx;
4482 tree fndecl, fn;
4483 tree arg1 = CALL_EXPR_ARG (exp, 0);
4484 tree arg2 = CALL_EXPR_ARG (exp, 1);
4485 rtx result = NULL_RTX;
4486
4487 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4488 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4489
4490 /* If we don't have POINTER_TYPE, call the function. */
4491 if (arg1_align == 0 || arg2_align == 0)
4492 return NULL_RTX;
4493
4494 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4495 arg1 = builtin_save_expr (arg1);
4496 arg2 = builtin_save_expr (arg2);
4497
4498 arg1_rtx = get_memory_rtx (arg1, NULL);
4499 arg2_rtx = get_memory_rtx (arg2, NULL);
4500
4501 /* Try to call cmpstrsi. */
4502 if (cmpstr_icode != CODE_FOR_nothing)
4503 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4504 MIN (arg1_align, arg2_align));
4505
4506 /* Try to determine at least one length and call cmpstrnsi. */
4507 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4508 {
4509 tree len;
4510 rtx arg3_rtx;
4511
4512 tree len1 = c_strlen (arg1, 1);
4513 tree len2 = c_strlen (arg2, 1);
4514
4515 if (len1)
4516 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4517 if (len2)
4518 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4519
4520 /* If we don't have a constant length for the first, use the length
4521 of the second, if we know it. We don't require a constant for
4522 this case; some cost analysis could be done if both are available
4523 but neither is constant. For now, assume they're equally cheap,
4524 unless one has side effects. If both strings have constant lengths,
4525 use the smaller. */
4526
4527 if (!len1)
4528 len = len2;
4529 else if (!len2)
4530 len = len1;
4531 else if (TREE_SIDE_EFFECTS (len1))
4532 len = len2;
4533 else if (TREE_SIDE_EFFECTS (len2))
4534 len = len1;
4535 else if (TREE_CODE (len1) != INTEGER_CST)
4536 len = len2;
4537 else if (TREE_CODE (len2) != INTEGER_CST)
4538 len = len1;
4539 else if (tree_int_cst_lt (len1, len2))
4540 len = len1;
4541 else
4542 len = len2;
4543
4544 /* If both arguments have side effects, we cannot optimize. */
4545 if (len && !TREE_SIDE_EFFECTS (len))
4546 {
4547 arg3_rtx = expand_normal (len);
4548 result = expand_cmpstrn_or_cmpmem
4549 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4550 arg3_rtx, MIN (arg1_align, arg2_align));
4551 }
4552 }
4553
4554 if (result)
4555 {
4556 /* Return the value in the proper mode for this function. */
4557 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4558 if (GET_MODE (result) == mode)
4559 return result;
4560 if (target == 0)
4561 return convert_to_mode (mode, result, 0);
4562 convert_move (target, result, 0);
4563 return target;
4564 }
4565
4566 /* Expand the library call ourselves using a stabilized argument
4567 list to avoid re-evaluating the function's arguments twice. */
4568 fndecl = get_callee_fndecl (exp);
4569 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4570 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4571 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4572 return expand_call (fn, target, target == const0_rtx);
4573 }
4574 return NULL_RTX;
4575 }
4576
4577 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4578 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4579 the result in TARGET, if convenient. */
4580
4581 static rtx
4582 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4583 ATTRIBUTE_UNUSED machine_mode mode)
4584 {
4585 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4586
4587 if (!validate_arglist (exp,
4588 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4589 return NULL_RTX;
4590
4591 /* If c_strlen can determine an expression for one of the string
4592 lengths, and it doesn't have side effects, then emit cmpstrnsi
4593 using length MIN(strlen(string)+1, arg3). */
4594 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4595 if (cmpstrn_icode != CODE_FOR_nothing)
4596 {
4597 tree len, len1, len2, len3;
4598 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4599 rtx result;
4600 tree fndecl, fn;
4601 tree arg1 = CALL_EXPR_ARG (exp, 0);
4602 tree arg2 = CALL_EXPR_ARG (exp, 1);
4603 tree arg3 = CALL_EXPR_ARG (exp, 2);
4604
4605 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4606 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4607
4608 len1 = c_strlen (arg1, 1);
4609 len2 = c_strlen (arg2, 1);
4610
4611 if (len1)
4612 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4613 if (len2)
4614 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4615
4616 len3 = fold_convert_loc (loc, sizetype, arg3);
4617
4618 /* If we don't have a constant length for the first, use the length
4619 of the second, if we know it. If neither string is constant length,
4620 use the given length argument. We don't require a constant for
4621 this case; some cost analysis could be done if both are available
4622 but neither is constant. For now, assume they're equally cheap,
4623 unless one has side effects. If both strings have constant lengths,
4624 use the smaller. */
4625
4626 if (!len1 && !len2)
4627 len = len3;
4628 else if (!len1)
4629 len = len2;
4630 else if (!len2)
4631 len = len1;
4632 else if (TREE_SIDE_EFFECTS (len1))
4633 len = len2;
4634 else if (TREE_SIDE_EFFECTS (len2))
4635 len = len1;
4636 else if (TREE_CODE (len1) != INTEGER_CST)
4637 len = len2;
4638 else if (TREE_CODE (len2) != INTEGER_CST)
4639 len = len1;
4640 else if (tree_int_cst_lt (len1, len2))
4641 len = len1;
4642 else
4643 len = len2;
4644
4645 /* If we are not using the given length, we must incorporate it here.
4646 The actual new length parameter will be MIN(len,arg3) in this case. */
4647 if (len != len3)
4648 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4649 arg1_rtx = get_memory_rtx (arg1, len);
4650 arg2_rtx = get_memory_rtx (arg2, len);
4651 arg3_rtx = expand_normal (len);
4652 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4653 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4654 MIN (arg1_align, arg2_align));
4655 if (result)
4656 {
4657 /* Return the value in the proper mode for this function. */
4658 mode = TYPE_MODE (TREE_TYPE (exp));
4659 if (GET_MODE (result) == mode)
4660 return result;
4661 if (target == 0)
4662 return convert_to_mode (mode, result, 0);
4663 convert_move (target, result, 0);
4664 return target;
4665 }
4666
4667 /* Expand the library call ourselves using a stabilized argument
4668 list to avoid re-evaluating the function's arguments twice. */
4669 fndecl = get_callee_fndecl (exp);
4670 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4671 arg1, arg2, len);
4672 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4673 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4674 return expand_call (fn, target, target == const0_rtx);
4675 }
4676 return NULL_RTX;
4677 }
4678
4679 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4680 if that's convenient. */
4681
4682 rtx
4683 expand_builtin_saveregs (void)
4684 {
4685 rtx val;
4686 rtx_insn *seq;
4687
4688 /* Don't do __builtin_saveregs more than once in a function.
4689 Save the result of the first call and reuse it. */
4690 if (saveregs_value != 0)
4691 return saveregs_value;
4692
4693 /* When this function is called, it means that registers must be
4694 saved on entry to this function. So we migrate the call to the
4695 first insn of this function. */
4696
4697 start_sequence ();
4698
4699 /* Do whatever the machine needs done in this case. */
4700 val = targetm.calls.expand_builtin_saveregs ();
4701
4702 seq = get_insns ();
4703 end_sequence ();
4704
4705 saveregs_value = val;
4706
4707 /* Put the insns after the NOTE that starts the function. If this
4708 is inside a start_sequence, make the outer-level insn chain current, so
4709 the code is placed at the start of the function. */
4710 push_topmost_sequence ();
4711 emit_insn_after (seq, entry_of_function ());
4712 pop_topmost_sequence ();
4713
4714 return val;
4715 }
4716
4717 /* Expand a call to __builtin_next_arg. */
4718
4719 static rtx
4720 expand_builtin_next_arg (void)
4721 {
4722 /* Checking arguments is already done in fold_builtin_next_arg
4723 that must be called before this function. */
4724 return expand_binop (ptr_mode, add_optab,
4725 crtl->args.internal_arg_pointer,
4726 crtl->args.arg_offset_rtx,
4727 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4728 }
4729
4730 /* Make it easier for the backends by protecting the valist argument
4731 from multiple evaluations. */
4732
4733 static tree
4734 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4735 {
4736 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4737
4738 /* The current way of determining the type of valist is completely
4739 bogus. We should have the information on the va builtin instead. */
4740 if (!vatype)
4741 vatype = targetm.fn_abi_va_list (cfun->decl);
4742
4743 if (TREE_CODE (vatype) == ARRAY_TYPE)
4744 {
4745 if (TREE_SIDE_EFFECTS (valist))
4746 valist = save_expr (valist);
4747
4748 /* For this case, the backends will be expecting a pointer to
4749 vatype, but it's possible we've actually been given an array
4750 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4751 So fix it. */
4752 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4753 {
4754 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4755 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4756 }
4757 }
4758 else
4759 {
4760 tree pt = build_pointer_type (vatype);
4761
4762 if (! needs_lvalue)
4763 {
4764 if (! TREE_SIDE_EFFECTS (valist))
4765 return valist;
4766
4767 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4768 TREE_SIDE_EFFECTS (valist) = 1;
4769 }
4770
4771 if (TREE_SIDE_EFFECTS (valist))
4772 valist = save_expr (valist);
4773 valist = fold_build2_loc (loc, MEM_REF,
4774 vatype, valist, build_int_cst (pt, 0));
4775 }
4776
4777 return valist;
4778 }
4779
4780 /* The "standard" definition of va_list is void*. */
4781
4782 tree
4783 std_build_builtin_va_list (void)
4784 {
4785 return ptr_type_node;
4786 }
4787
4788 /* The "standard" abi va_list is va_list_type_node. */
4789
4790 tree
4791 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4792 {
4793 return va_list_type_node;
4794 }
4795
4796 /* The "standard" type of va_list is va_list_type_node. */
4797
4798 tree
4799 std_canonical_va_list_type (tree type)
4800 {
4801 tree wtype, htype;
4802
4803 wtype = va_list_type_node;
4804 htype = type;
4805
4806 if (TREE_CODE (wtype) == ARRAY_TYPE)
4807 {
4808 /* If va_list is an array type, the argument may have decayed
4809 to a pointer type, e.g. by being passed to another function.
4810 In that case, unwrap both types so that we can compare the
4811 underlying records. */
4812 if (TREE_CODE (htype) == ARRAY_TYPE
4813 || POINTER_TYPE_P (htype))
4814 {
4815 wtype = TREE_TYPE (wtype);
4816 htype = TREE_TYPE (htype);
4817 }
4818 }
4819 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4820 return va_list_type_node;
4821
4822 return NULL_TREE;
4823 }
4824
4825 /* The "standard" implementation of va_start: just assign `nextarg' to
4826 the variable. */
4827
4828 void
4829 std_expand_builtin_va_start (tree valist, rtx nextarg)
4830 {
4831 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4832 convert_move (va_r, nextarg, 0);
4833
4834 /* We do not have any valid bounds for the pointer, so
4835 just store zero bounds for it. */
4836 if (chkp_function_instrumented_p (current_function_decl))
4837 chkp_expand_bounds_reset_for_mem (valist,
4838 make_tree (TREE_TYPE (valist),
4839 nextarg));
4840 }
4841
4842 /* Expand EXP, a call to __builtin_va_start. */
4843
4844 static rtx
4845 expand_builtin_va_start (tree exp)
4846 {
4847 rtx nextarg;
4848 tree valist;
4849 location_t loc = EXPR_LOCATION (exp);
4850
4851 if (call_expr_nargs (exp) < 2)
4852 {
4853 error_at (loc, "too few arguments to function %<va_start%>");
4854 return const0_rtx;
4855 }
4856
4857 if (fold_builtin_next_arg (exp, true))
4858 return const0_rtx;
4859
4860 nextarg = expand_builtin_next_arg ();
4861 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4862
4863 if (targetm.expand_builtin_va_start)
4864 targetm.expand_builtin_va_start (valist, nextarg);
4865 else
4866 std_expand_builtin_va_start (valist, nextarg);
4867
4868 return const0_rtx;
4869 }
4870
4871 /* Expand EXP, a call to __builtin_va_end. */
4872
4873 static rtx
4874 expand_builtin_va_end (tree exp)
4875 {
4876 tree valist = CALL_EXPR_ARG (exp, 0);
4877
4878 /* Evaluate for side effects, if needed. I hate macros that don't
4879 do that. */
4880 if (TREE_SIDE_EFFECTS (valist))
4881 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4882
4883 return const0_rtx;
4884 }
4885
4886 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4887 builtin rather than just as an assignment in stdarg.h because of the
4888 nastiness of array-type va_list types. */
4889
4890 static rtx
4891 expand_builtin_va_copy (tree exp)
4892 {
4893 tree dst, src, t;
4894 location_t loc = EXPR_LOCATION (exp);
4895
4896 dst = CALL_EXPR_ARG (exp, 0);
4897 src = CALL_EXPR_ARG (exp, 1);
4898
4899 dst = stabilize_va_list_loc (loc, dst, 1);
4900 src = stabilize_va_list_loc (loc, src, 0);
4901
4902 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4903
4904 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4905 {
4906 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4907 TREE_SIDE_EFFECTS (t) = 1;
4908 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4909 }
4910 else
4911 {
4912 rtx dstb, srcb, size;
4913
4914 /* Evaluate to pointers. */
4915 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4916 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4917 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4918 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4919
4920 dstb = convert_memory_address (Pmode, dstb);
4921 srcb = convert_memory_address (Pmode, srcb);
4922
4923 /* "Dereference" to BLKmode memories. */
4924 dstb = gen_rtx_MEM (BLKmode, dstb);
4925 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4926 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4927 srcb = gen_rtx_MEM (BLKmode, srcb);
4928 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4929 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4930
4931 /* Copy. */
4932 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4933 }
4934
4935 return const0_rtx;
4936 }
4937
4938 /* Expand a call to one of the builtin functions __builtin_frame_address or
4939 __builtin_return_address. */
4940
4941 static rtx
4942 expand_builtin_frame_address (tree fndecl, tree exp)
4943 {
4944 /* The argument must be a nonnegative integer constant.
4945 It counts the number of frames to scan up the stack.
4946 The value is either the frame pointer value or the return
4947 address saved in that frame. */
4948 if (call_expr_nargs (exp) == 0)
4949 /* Warning about missing arg was already issued. */
4950 return const0_rtx;
4951 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4952 {
4953 error ("invalid argument to %qD", fndecl);
4954 return const0_rtx;
4955 }
4956 else
4957 {
4958 /* Number of frames to scan up the stack. */
4959 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4960
4961 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4962
4963 /* Some ports cannot access arbitrary stack frames. */
4964 if (tem == NULL)
4965 {
4966 warning (0, "unsupported argument to %qD", fndecl);
4967 return const0_rtx;
4968 }
4969
4970 if (count)
4971 {
4972 /* Warn since no effort is made to ensure that any frame
4973 beyond the current one exists or can be safely reached. */
4974 warning (OPT_Wframe_address, "calling %qD with "
4975 "a nonzero argument is unsafe", fndecl);
4976 }
4977
4978 /* For __builtin_frame_address, return what we've got. */
4979 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4980 return tem;
4981
4982 if (!REG_P (tem)
4983 && ! CONSTANT_P (tem))
4984 tem = copy_addr_to_reg (tem);
4985 return tem;
4986 }
4987 }
4988
4989 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4990 failed and the caller should emit a normal call. */
4991
4992 static rtx
4993 expand_builtin_alloca (tree exp)
4994 {
4995 rtx op0;
4996 rtx result;
4997 unsigned int align;
4998 tree fndecl = get_callee_fndecl (exp);
4999 HOST_WIDE_INT max_size;
5000 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5001 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5002 bool valid_arglist
5003 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5004 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5005 VOID_TYPE)
5006 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5007 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5008 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5009
5010 if (!valid_arglist)
5011 return NULL_RTX;
5012
5013 if ((alloca_for_var && !warn_vla_limit)
5014 || (!alloca_for_var && !warn_alloca_limit))
5015 {
5016 /* -Walloca-larger-than and -Wvla-larger-than settings override
5017 the more general -Walloc-size-larger-than so unless either of
5018 the former options is specified check the alloca arguments for
5019 overflow. */
5020 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5021 int idx[] = { 0, -1 };
5022 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5023 }
5024
5025 /* Compute the argument. */
5026 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5027
5028 /* Compute the alignment. */
5029 align = (fcode == BUILT_IN_ALLOCA
5030 ? BIGGEST_ALIGNMENT
5031 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5032
5033 /* Compute the maximum size. */
5034 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5035 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5036 : -1);
5037
5038 /* Allocate the desired space. If the allocation stems from the declaration
5039 of a variable-sized object, it cannot accumulate. */
5040 result
5041 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5042 result = convert_memory_address (ptr_mode, result);
5043
5044 return result;
5045 }
5046
5047 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5048 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5049 dummy value into second parameter relying on this function to perform the
5050 change. See motivation for this in comment to handle_builtin_stack_restore
5051 function. */
5052
5053 static rtx
5054 expand_asan_emit_allocas_unpoison (tree exp)
5055 {
5056 tree arg0 = CALL_EXPR_ARG (exp, 0);
5057 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5058 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5059 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5060 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5061 top, ptr_mode, bot, ptr_mode);
5062 return ret;
5063 }
5064
5065 /* Expand a call to bswap builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069
5070 static rtx
5071 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5072 rtx subtarget)
5073 {
5074 tree arg;
5075 rtx op0;
5076
5077 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5078 return NULL_RTX;
5079
5080 arg = CALL_EXPR_ARG (exp, 0);
5081 op0 = expand_expr (arg,
5082 subtarget && GET_MODE (subtarget) == target_mode
5083 ? subtarget : NULL_RTX,
5084 target_mode, EXPAND_NORMAL);
5085 if (GET_MODE (op0) != target_mode)
5086 op0 = convert_to_mode (target_mode, op0, 1);
5087
5088 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5089
5090 gcc_assert (target);
5091
5092 return convert_to_mode (target_mode, target, 1);
5093 }
5094
5095 /* Expand a call to a unary builtin in EXP.
5096 Return NULL_RTX if a normal call should be emitted rather than expanding the
5097 function in-line. If convenient, the result should be placed in TARGET.
5098 SUBTARGET may be used as the target for computing one of EXP's operands. */
5099
5100 static rtx
5101 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5102 rtx subtarget, optab op_optab)
5103 {
5104 rtx op0;
5105
5106 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5107 return NULL_RTX;
5108
5109 /* Compute the argument. */
5110 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5111 (subtarget
5112 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5113 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5114 VOIDmode, EXPAND_NORMAL);
5115 /* Compute op, into TARGET if possible.
5116 Set TARGET to wherever the result comes back. */
5117 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5118 op_optab, op0, target, op_optab != clrsb_optab);
5119 gcc_assert (target);
5120
5121 return convert_to_mode (target_mode, target, 0);
5122 }
5123
5124 /* Expand a call to __builtin_expect. We just return our argument
5125 as the builtin_expect semantic should've been already executed by
5126 tree branch prediction pass. */
5127
5128 static rtx
5129 expand_builtin_expect (tree exp, rtx target)
5130 {
5131 tree arg;
5132
5133 if (call_expr_nargs (exp) < 2)
5134 return const0_rtx;
5135 arg = CALL_EXPR_ARG (exp, 0);
5136
5137 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5138 /* When guessing was done, the hints should be already stripped away. */
5139 gcc_assert (!flag_guess_branch_prob
5140 || optimize == 0 || seen_error ());
5141 return target;
5142 }
5143
5144 /* Expand a call to __builtin_assume_aligned. We just return our first
5145 argument as the builtin_assume_aligned semantic should've been already
5146 executed by CCP. */
5147
5148 static rtx
5149 expand_builtin_assume_aligned (tree exp, rtx target)
5150 {
5151 if (call_expr_nargs (exp) < 2)
5152 return const0_rtx;
5153 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5154 EXPAND_NORMAL);
5155 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5156 && (call_expr_nargs (exp) < 3
5157 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5158 return target;
5159 }
5160
5161 void
5162 expand_builtin_trap (void)
5163 {
5164 if (targetm.have_trap ())
5165 {
5166 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5167 /* For trap insns when not accumulating outgoing args force
5168 REG_ARGS_SIZE note to prevent crossjumping of calls with
5169 different args sizes. */
5170 if (!ACCUMULATE_OUTGOING_ARGS)
5171 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5172 }
5173 else
5174 {
5175 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5176 tree call_expr = build_call_expr (fn, 0);
5177 expand_call (call_expr, NULL_RTX, false);
5178 }
5179
5180 emit_barrier ();
5181 }
5182
5183 /* Expand a call to __builtin_unreachable. We do nothing except emit
5184 a barrier saying that control flow will not pass here.
5185
5186 It is the responsibility of the program being compiled to ensure
5187 that control flow does never reach __builtin_unreachable. */
5188 static void
5189 expand_builtin_unreachable (void)
5190 {
5191 emit_barrier ();
5192 }
5193
5194 /* Expand EXP, a call to fabs, fabsf or fabsl.
5195 Return NULL_RTX if a normal call should be emitted rather than expanding
5196 the function inline. If convenient, the result should be placed
5197 in TARGET. SUBTARGET may be used as the target for computing
5198 the operand. */
5199
5200 static rtx
5201 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5202 {
5203 machine_mode mode;
5204 tree arg;
5205 rtx op0;
5206
5207 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5208 return NULL_RTX;
5209
5210 arg = CALL_EXPR_ARG (exp, 0);
5211 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5212 mode = TYPE_MODE (TREE_TYPE (arg));
5213 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5214 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5215 }
5216
5217 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5218 Return NULL is a normal call should be emitted rather than expanding the
5219 function inline. If convenient, the result should be placed in TARGET.
5220 SUBTARGET may be used as the target for computing the operand. */
5221
5222 static rtx
5223 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5224 {
5225 rtx op0, op1;
5226 tree arg;
5227
5228 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5229 return NULL_RTX;
5230
5231 arg = CALL_EXPR_ARG (exp, 0);
5232 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5233
5234 arg = CALL_EXPR_ARG (exp, 1);
5235 op1 = expand_normal (arg);
5236
5237 return expand_copysign (op0, op1, target);
5238 }
5239
5240 /* Expand a call to __builtin___clear_cache. */
5241
5242 static rtx
5243 expand_builtin___clear_cache (tree exp)
5244 {
5245 if (!targetm.code_for_clear_cache)
5246 {
5247 #ifdef CLEAR_INSN_CACHE
5248 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5249 does something. Just do the default expansion to a call to
5250 __clear_cache(). */
5251 return NULL_RTX;
5252 #else
5253 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5254 does nothing. There is no need to call it. Do nothing. */
5255 return const0_rtx;
5256 #endif /* CLEAR_INSN_CACHE */
5257 }
5258
5259 /* We have a "clear_cache" insn, and it will handle everything. */
5260 tree begin, end;
5261 rtx begin_rtx, end_rtx;
5262
5263 /* We must not expand to a library call. If we did, any
5264 fallback library function in libgcc that might contain a call to
5265 __builtin___clear_cache() would recurse infinitely. */
5266 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5267 {
5268 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5269 return const0_rtx;
5270 }
5271
5272 if (targetm.have_clear_cache ())
5273 {
5274 struct expand_operand ops[2];
5275
5276 begin = CALL_EXPR_ARG (exp, 0);
5277 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5278
5279 end = CALL_EXPR_ARG (exp, 1);
5280 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5281
5282 create_address_operand (&ops[0], begin_rtx);
5283 create_address_operand (&ops[1], end_rtx);
5284 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5285 return const0_rtx;
5286 }
5287 return const0_rtx;
5288 }
5289
5290 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5291
5292 static rtx
5293 round_trampoline_addr (rtx tramp)
5294 {
5295 rtx temp, addend, mask;
5296
5297 /* If we don't need too much alignment, we'll have been guaranteed
5298 proper alignment by get_trampoline_type. */
5299 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5300 return tramp;
5301
5302 /* Round address up to desired boundary. */
5303 temp = gen_reg_rtx (Pmode);
5304 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5305 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5306
5307 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5308 temp, 0, OPTAB_LIB_WIDEN);
5309 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5310 temp, 0, OPTAB_LIB_WIDEN);
5311
5312 return tramp;
5313 }
5314
5315 static rtx
5316 expand_builtin_init_trampoline (tree exp, bool onstack)
5317 {
5318 tree t_tramp, t_func, t_chain;
5319 rtx m_tramp, r_tramp, r_chain, tmp;
5320
5321 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5322 POINTER_TYPE, VOID_TYPE))
5323 return NULL_RTX;
5324
5325 t_tramp = CALL_EXPR_ARG (exp, 0);
5326 t_func = CALL_EXPR_ARG (exp, 1);
5327 t_chain = CALL_EXPR_ARG (exp, 2);
5328
5329 r_tramp = expand_normal (t_tramp);
5330 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5331 MEM_NOTRAP_P (m_tramp) = 1;
5332
5333 /* If ONSTACK, the TRAMP argument should be the address of a field
5334 within the local function's FRAME decl. Either way, let's see if
5335 we can fill in the MEM_ATTRs for this memory. */
5336 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5337 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5338
5339 /* Creator of a heap trampoline is responsible for making sure the
5340 address is aligned to at least STACK_BOUNDARY. Normally malloc
5341 will ensure this anyhow. */
5342 tmp = round_trampoline_addr (r_tramp);
5343 if (tmp != r_tramp)
5344 {
5345 m_tramp = change_address (m_tramp, BLKmode, tmp);
5346 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5347 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5348 }
5349
5350 /* The FUNC argument should be the address of the nested function.
5351 Extract the actual function decl to pass to the hook. */
5352 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5353 t_func = TREE_OPERAND (t_func, 0);
5354 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5355
5356 r_chain = expand_normal (t_chain);
5357
5358 /* Generate insns to initialize the trampoline. */
5359 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5360
5361 if (onstack)
5362 {
5363 trampolines_created = 1;
5364
5365 if (targetm.calls.custom_function_descriptors != 0)
5366 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5367 "trampoline generated for nested function %qD", t_func);
5368 }
5369
5370 return const0_rtx;
5371 }
5372
5373 static rtx
5374 expand_builtin_adjust_trampoline (tree exp)
5375 {
5376 rtx tramp;
5377
5378 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5379 return NULL_RTX;
5380
5381 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5382 tramp = round_trampoline_addr (tramp);
5383 if (targetm.calls.trampoline_adjust_address)
5384 tramp = targetm.calls.trampoline_adjust_address (tramp);
5385
5386 return tramp;
5387 }
5388
5389 /* Expand a call to the builtin descriptor initialization routine.
5390 A descriptor is made up of a couple of pointers to the static
5391 chain and the code entry in this order. */
5392
5393 static rtx
5394 expand_builtin_init_descriptor (tree exp)
5395 {
5396 tree t_descr, t_func, t_chain;
5397 rtx m_descr, r_descr, r_func, r_chain;
5398
5399 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5400 VOID_TYPE))
5401 return NULL_RTX;
5402
5403 t_descr = CALL_EXPR_ARG (exp, 0);
5404 t_func = CALL_EXPR_ARG (exp, 1);
5405 t_chain = CALL_EXPR_ARG (exp, 2);
5406
5407 r_descr = expand_normal (t_descr);
5408 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5409 MEM_NOTRAP_P (m_descr) = 1;
5410
5411 r_func = expand_normal (t_func);
5412 r_chain = expand_normal (t_chain);
5413
5414 /* Generate insns to initialize the descriptor. */
5415 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5416 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5417 POINTER_SIZE / BITS_PER_UNIT), r_func);
5418
5419 return const0_rtx;
5420 }
5421
5422 /* Expand a call to the builtin descriptor adjustment routine. */
5423
5424 static rtx
5425 expand_builtin_adjust_descriptor (tree exp)
5426 {
5427 rtx tramp;
5428
5429 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5430 return NULL_RTX;
5431
5432 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5433
5434 /* Unalign the descriptor to allow runtime identification. */
5435 tramp = plus_constant (ptr_mode, tramp,
5436 targetm.calls.custom_function_descriptors);
5437
5438 return force_operand (tramp, NULL_RTX);
5439 }
5440
5441 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5442 function. The function first checks whether the back end provides
5443 an insn to implement signbit for the respective mode. If not, it
5444 checks whether the floating point format of the value is such that
5445 the sign bit can be extracted. If that is not the case, error out.
5446 EXP is the expression that is a call to the builtin function; if
5447 convenient, the result should be placed in TARGET. */
5448 static rtx
5449 expand_builtin_signbit (tree exp, rtx target)
5450 {
5451 const struct real_format *fmt;
5452 scalar_float_mode fmode;
5453 scalar_int_mode rmode, imode;
5454 tree arg;
5455 int word, bitpos;
5456 enum insn_code icode;
5457 rtx temp;
5458 location_t loc = EXPR_LOCATION (exp);
5459
5460 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5461 return NULL_RTX;
5462
5463 arg = CALL_EXPR_ARG (exp, 0);
5464 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5465 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5466 fmt = REAL_MODE_FORMAT (fmode);
5467
5468 arg = builtin_save_expr (arg);
5469
5470 /* Expand the argument yielding a RTX expression. */
5471 temp = expand_normal (arg);
5472
5473 /* Check if the back end provides an insn that handles signbit for the
5474 argument's mode. */
5475 icode = optab_handler (signbit_optab, fmode);
5476 if (icode != CODE_FOR_nothing)
5477 {
5478 rtx_insn *last = get_last_insn ();
5479 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5480 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5481 return target;
5482 delete_insns_since (last);
5483 }
5484
5485 /* For floating point formats without a sign bit, implement signbit
5486 as "ARG < 0.0". */
5487 bitpos = fmt->signbit_ro;
5488 if (bitpos < 0)
5489 {
5490 /* But we can't do this if the format supports signed zero. */
5491 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5492
5493 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5494 build_real (TREE_TYPE (arg), dconst0));
5495 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5496 }
5497
5498 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5499 {
5500 imode = int_mode_for_mode (fmode).require ();
5501 temp = gen_lowpart (imode, temp);
5502 }
5503 else
5504 {
5505 imode = word_mode;
5506 /* Handle targets with different FP word orders. */
5507 if (FLOAT_WORDS_BIG_ENDIAN)
5508 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5509 else
5510 word = bitpos / BITS_PER_WORD;
5511 temp = operand_subword_force (temp, word, fmode);
5512 bitpos = bitpos % BITS_PER_WORD;
5513 }
5514
5515 /* Force the intermediate word_mode (or narrower) result into a
5516 register. This avoids attempting to create paradoxical SUBREGs
5517 of floating point modes below. */
5518 temp = force_reg (imode, temp);
5519
5520 /* If the bitpos is within the "result mode" lowpart, the operation
5521 can be implement with a single bitwise AND. Otherwise, we need
5522 a right shift and an AND. */
5523
5524 if (bitpos < GET_MODE_BITSIZE (rmode))
5525 {
5526 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5527
5528 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5529 temp = gen_lowpart (rmode, temp);
5530 temp = expand_binop (rmode, and_optab, temp,
5531 immed_wide_int_const (mask, rmode),
5532 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5533 }
5534 else
5535 {
5536 /* Perform a logical right shift to place the signbit in the least
5537 significant bit, then truncate the result to the desired mode
5538 and mask just this bit. */
5539 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5540 temp = gen_lowpart (rmode, temp);
5541 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5542 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5543 }
5544
5545 return temp;
5546 }
5547
5548 /* Expand fork or exec calls. TARGET is the desired target of the
5549 call. EXP is the call. FN is the
5550 identificator of the actual function. IGNORE is nonzero if the
5551 value is to be ignored. */
5552
5553 static rtx
5554 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5555 {
5556 tree id, decl;
5557 tree call;
5558
5559 /* If we are not profiling, just call the function. */
5560 if (!profile_arc_flag)
5561 return NULL_RTX;
5562
5563 /* Otherwise call the wrapper. This should be equivalent for the rest of
5564 compiler, so the code does not diverge, and the wrapper may run the
5565 code necessary for keeping the profiling sane. */
5566
5567 switch (DECL_FUNCTION_CODE (fn))
5568 {
5569 case BUILT_IN_FORK:
5570 id = get_identifier ("__gcov_fork");
5571 break;
5572
5573 case BUILT_IN_EXECL:
5574 id = get_identifier ("__gcov_execl");
5575 break;
5576
5577 case BUILT_IN_EXECV:
5578 id = get_identifier ("__gcov_execv");
5579 break;
5580
5581 case BUILT_IN_EXECLP:
5582 id = get_identifier ("__gcov_execlp");
5583 break;
5584
5585 case BUILT_IN_EXECLE:
5586 id = get_identifier ("__gcov_execle");
5587 break;
5588
5589 case BUILT_IN_EXECVP:
5590 id = get_identifier ("__gcov_execvp");
5591 break;
5592
5593 case BUILT_IN_EXECVE:
5594 id = get_identifier ("__gcov_execve");
5595 break;
5596
5597 default:
5598 gcc_unreachable ();
5599 }
5600
5601 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5602 FUNCTION_DECL, id, TREE_TYPE (fn));
5603 DECL_EXTERNAL (decl) = 1;
5604 TREE_PUBLIC (decl) = 1;
5605 DECL_ARTIFICIAL (decl) = 1;
5606 TREE_NOTHROW (decl) = 1;
5607 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5608 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5609 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5610 return expand_call (call, target, ignore);
5611 }
5612
5613
5614 \f
5615 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5616 the pointer in these functions is void*, the tree optimizers may remove
5617 casts. The mode computed in expand_builtin isn't reliable either, due
5618 to __sync_bool_compare_and_swap.
5619
5620 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5621 group of builtins. This gives us log2 of the mode size. */
5622
5623 static inline machine_mode
5624 get_builtin_sync_mode (int fcode_diff)
5625 {
5626 /* The size is not negotiable, so ask not to get BLKmode in return
5627 if the target indicates that a smaller size would be better. */
5628 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5629 }
5630
5631 /* Expand the memory expression LOC and return the appropriate memory operand
5632 for the builtin_sync operations. */
5633
5634 static rtx
5635 get_builtin_sync_mem (tree loc, machine_mode mode)
5636 {
5637 rtx addr, mem;
5638
5639 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5640 addr = convert_memory_address (Pmode, addr);
5641
5642 /* Note that we explicitly do not want any alias information for this
5643 memory, so that we kill all other live memories. Otherwise we don't
5644 satisfy the full barrier semantics of the intrinsic. */
5645 mem = validize_mem (gen_rtx_MEM (mode, addr));
5646
5647 /* The alignment needs to be at least according to that of the mode. */
5648 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5649 get_pointer_alignment (loc)));
5650 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5651 MEM_VOLATILE_P (mem) = 1;
5652
5653 return mem;
5654 }
5655
5656 /* Make sure an argument is in the right mode.
5657 EXP is the tree argument.
5658 MODE is the mode it should be in. */
5659
5660 static rtx
5661 expand_expr_force_mode (tree exp, machine_mode mode)
5662 {
5663 rtx val;
5664 machine_mode old_mode;
5665
5666 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5667 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5668 of CONST_INTs, where we know the old_mode only from the call argument. */
5669
5670 old_mode = GET_MODE (val);
5671 if (old_mode == VOIDmode)
5672 old_mode = TYPE_MODE (TREE_TYPE (exp));
5673 val = convert_modes (mode, old_mode, val, 1);
5674 return val;
5675 }
5676
5677
5678 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5679 EXP is the CALL_EXPR. CODE is the rtx code
5680 that corresponds to the arithmetic or logical operation from the name;
5681 an exception here is that NOT actually means NAND. TARGET is an optional
5682 place for us to store the results; AFTER is true if this is the
5683 fetch_and_xxx form. */
5684
5685 static rtx
5686 expand_builtin_sync_operation (machine_mode mode, tree exp,
5687 enum rtx_code code, bool after,
5688 rtx target)
5689 {
5690 rtx val, mem;
5691 location_t loc = EXPR_LOCATION (exp);
5692
5693 if (code == NOT && warn_sync_nand)
5694 {
5695 tree fndecl = get_callee_fndecl (exp);
5696 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5697
5698 static bool warned_f_a_n, warned_n_a_f;
5699
5700 switch (fcode)
5701 {
5702 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5703 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5704 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5705 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5706 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5707 if (warned_f_a_n)
5708 break;
5709
5710 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5711 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5712 warned_f_a_n = true;
5713 break;
5714
5715 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5716 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5717 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5718 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5719 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5720 if (warned_n_a_f)
5721 break;
5722
5723 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5724 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5725 warned_n_a_f = true;
5726 break;
5727
5728 default:
5729 gcc_unreachable ();
5730 }
5731 }
5732
5733 /* Expand the operands. */
5734 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5735 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5736
5737 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5738 after);
5739 }
5740
5741 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5742 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5743 true if this is the boolean form. TARGET is a place for us to store the
5744 results; this is NOT optional if IS_BOOL is true. */
5745
5746 static rtx
5747 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5748 bool is_bool, rtx target)
5749 {
5750 rtx old_val, new_val, mem;
5751 rtx *pbool, *poval;
5752
5753 /* Expand the operands. */
5754 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5755 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5756 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5757
5758 pbool = poval = NULL;
5759 if (target != const0_rtx)
5760 {
5761 if (is_bool)
5762 pbool = &target;
5763 else
5764 poval = &target;
5765 }
5766 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5767 false, MEMMODEL_SYNC_SEQ_CST,
5768 MEMMODEL_SYNC_SEQ_CST))
5769 return NULL_RTX;
5770
5771 return target;
5772 }
5773
5774 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5775 general form is actually an atomic exchange, and some targets only
5776 support a reduced form with the second argument being a constant 1.
5777 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5778 the results. */
5779
5780 static rtx
5781 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5782 rtx target)
5783 {
5784 rtx val, mem;
5785
5786 /* Expand the operands. */
5787 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5788 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5789
5790 return expand_sync_lock_test_and_set (target, mem, val);
5791 }
5792
5793 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5794
5795 static void
5796 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5797 {
5798 rtx mem;
5799
5800 /* Expand the operands. */
5801 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5802
5803 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5804 }
5805
5806 /* Given an integer representing an ``enum memmodel'', verify its
5807 correctness and return the memory model enum. */
5808
5809 static enum memmodel
5810 get_memmodel (tree exp)
5811 {
5812 rtx op;
5813 unsigned HOST_WIDE_INT val;
5814 source_location loc
5815 = expansion_point_location_if_in_system_header (input_location);
5816
5817 /* If the parameter is not a constant, it's a run time value so we'll just
5818 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5819 if (TREE_CODE (exp) != INTEGER_CST)
5820 return MEMMODEL_SEQ_CST;
5821
5822 op = expand_normal (exp);
5823
5824 val = INTVAL (op);
5825 if (targetm.memmodel_check)
5826 val = targetm.memmodel_check (val);
5827 else if (val & ~MEMMODEL_MASK)
5828 {
5829 warning_at (loc, OPT_Winvalid_memory_model,
5830 "unknown architecture specifier in memory model to builtin");
5831 return MEMMODEL_SEQ_CST;
5832 }
5833
5834 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5835 if (memmodel_base (val) >= MEMMODEL_LAST)
5836 {
5837 warning_at (loc, OPT_Winvalid_memory_model,
5838 "invalid memory model argument to builtin");
5839 return MEMMODEL_SEQ_CST;
5840 }
5841
5842 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5843 be conservative and promote consume to acquire. */
5844 if (val == MEMMODEL_CONSUME)
5845 val = MEMMODEL_ACQUIRE;
5846
5847 return (enum memmodel) val;
5848 }
5849
5850 /* Expand the __atomic_exchange intrinsic:
5851 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5852 EXP is the CALL_EXPR.
5853 TARGET is an optional place for us to store the results. */
5854
5855 static rtx
5856 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5857 {
5858 rtx val, mem;
5859 enum memmodel model;
5860
5861 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5862
5863 if (!flag_inline_atomics)
5864 return NULL_RTX;
5865
5866 /* Expand the operands. */
5867 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5868 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5869
5870 return expand_atomic_exchange (target, mem, val, model);
5871 }
5872
5873 /* Expand the __atomic_compare_exchange intrinsic:
5874 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5875 TYPE desired, BOOL weak,
5876 enum memmodel success,
5877 enum memmodel failure)
5878 EXP is the CALL_EXPR.
5879 TARGET is an optional place for us to store the results. */
5880
5881 static rtx
5882 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5883 rtx target)
5884 {
5885 rtx expect, desired, mem, oldval;
5886 rtx_code_label *label;
5887 enum memmodel success, failure;
5888 tree weak;
5889 bool is_weak;
5890 source_location loc
5891 = expansion_point_location_if_in_system_header (input_location);
5892
5893 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5894 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5895
5896 if (failure > success)
5897 {
5898 warning_at (loc, OPT_Winvalid_memory_model,
5899 "failure memory model cannot be stronger than success "
5900 "memory model for %<__atomic_compare_exchange%>");
5901 success = MEMMODEL_SEQ_CST;
5902 }
5903
5904 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5905 {
5906 warning_at (loc, OPT_Winvalid_memory_model,
5907 "invalid failure memory model for "
5908 "%<__atomic_compare_exchange%>");
5909 failure = MEMMODEL_SEQ_CST;
5910 success = MEMMODEL_SEQ_CST;
5911 }
5912
5913
5914 if (!flag_inline_atomics)
5915 return NULL_RTX;
5916
5917 /* Expand the operands. */
5918 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5919
5920 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5921 expect = convert_memory_address (Pmode, expect);
5922 expect = gen_rtx_MEM (mode, expect);
5923 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5924
5925 weak = CALL_EXPR_ARG (exp, 3);
5926 is_weak = false;
5927 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5928 is_weak = true;
5929
5930 if (target == const0_rtx)
5931 target = NULL;
5932
5933 /* Lest the rtl backend create a race condition with an imporoper store
5934 to memory, always create a new pseudo for OLDVAL. */
5935 oldval = NULL;
5936
5937 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5938 is_weak, success, failure))
5939 return NULL_RTX;
5940
5941 /* Conditionally store back to EXPECT, lest we create a race condition
5942 with an improper store to memory. */
5943 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5944 the normal case where EXPECT is totally private, i.e. a register. At
5945 which point the store can be unconditional. */
5946 label = gen_label_rtx ();
5947 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5948 GET_MODE (target), 1, label);
5949 emit_move_insn (expect, oldval);
5950 emit_label (label);
5951
5952 return target;
5953 }
5954
5955 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5956 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5957 call. The weak parameter must be dropped to match the expected parameter
5958 list and the expected argument changed from value to pointer to memory
5959 slot. */
5960
5961 static void
5962 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5963 {
5964 unsigned int z;
5965 vec<tree, va_gc> *vec;
5966
5967 vec_alloc (vec, 5);
5968 vec->quick_push (gimple_call_arg (call, 0));
5969 tree expected = gimple_call_arg (call, 1);
5970 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5971 TREE_TYPE (expected));
5972 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5973 if (expd != x)
5974 emit_move_insn (x, expd);
5975 tree v = make_tree (TREE_TYPE (expected), x);
5976 vec->quick_push (build1 (ADDR_EXPR,
5977 build_pointer_type (TREE_TYPE (expected)), v));
5978 vec->quick_push (gimple_call_arg (call, 2));
5979 /* Skip the boolean weak parameter. */
5980 for (z = 4; z < 6; z++)
5981 vec->quick_push (gimple_call_arg (call, z));
5982 built_in_function fncode
5983 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5984 + exact_log2 (GET_MODE_SIZE (mode)));
5985 tree fndecl = builtin_decl_explicit (fncode);
5986 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5987 fndecl);
5988 tree exp = build_call_vec (boolean_type_node, fn, vec);
5989 tree lhs = gimple_call_lhs (call);
5990 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5991 if (lhs)
5992 {
5993 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5994 if (GET_MODE (boolret) != mode)
5995 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5996 x = force_reg (mode, x);
5997 write_complex_part (target, boolret, true);
5998 write_complex_part (target, x, false);
5999 }
6000 }
6001
6002 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6003
6004 void
6005 expand_ifn_atomic_compare_exchange (gcall *call)
6006 {
6007 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6008 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6009 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6010 rtx expect, desired, mem, oldval, boolret;
6011 enum memmodel success, failure;
6012 tree lhs;
6013 bool is_weak;
6014 source_location loc
6015 = expansion_point_location_if_in_system_header (gimple_location (call));
6016
6017 success = get_memmodel (gimple_call_arg (call, 4));
6018 failure = get_memmodel (gimple_call_arg (call, 5));
6019
6020 if (failure > success)
6021 {
6022 warning_at (loc, OPT_Winvalid_memory_model,
6023 "failure memory model cannot be stronger than success "
6024 "memory model for %<__atomic_compare_exchange%>");
6025 success = MEMMODEL_SEQ_CST;
6026 }
6027
6028 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6029 {
6030 warning_at (loc, OPT_Winvalid_memory_model,
6031 "invalid failure memory model for "
6032 "%<__atomic_compare_exchange%>");
6033 failure = MEMMODEL_SEQ_CST;
6034 success = MEMMODEL_SEQ_CST;
6035 }
6036
6037 if (!flag_inline_atomics)
6038 {
6039 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6040 return;
6041 }
6042
6043 /* Expand the operands. */
6044 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6045
6046 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6047 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6048
6049 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6050
6051 boolret = NULL;
6052 oldval = NULL;
6053
6054 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6055 is_weak, success, failure))
6056 {
6057 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6058 return;
6059 }
6060
6061 lhs = gimple_call_lhs (call);
6062 if (lhs)
6063 {
6064 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6065 if (GET_MODE (boolret) != mode)
6066 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6067 write_complex_part (target, boolret, true);
6068 write_complex_part (target, oldval, false);
6069 }
6070 }
6071
6072 /* Expand the __atomic_load intrinsic:
6073 TYPE __atomic_load (TYPE *object, enum memmodel)
6074 EXP is the CALL_EXPR.
6075 TARGET is an optional place for us to store the results. */
6076
6077 static rtx
6078 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6079 {
6080 rtx mem;
6081 enum memmodel model;
6082
6083 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6084 if (is_mm_release (model) || is_mm_acq_rel (model))
6085 {
6086 source_location loc
6087 = expansion_point_location_if_in_system_header (input_location);
6088 warning_at (loc, OPT_Winvalid_memory_model,
6089 "invalid memory model for %<__atomic_load%>");
6090 model = MEMMODEL_SEQ_CST;
6091 }
6092
6093 if (!flag_inline_atomics)
6094 return NULL_RTX;
6095
6096 /* Expand the operand. */
6097 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6098
6099 return expand_atomic_load (target, mem, model);
6100 }
6101
6102
6103 /* Expand the __atomic_store intrinsic:
6104 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6105 EXP is the CALL_EXPR.
6106 TARGET is an optional place for us to store the results. */
6107
6108 static rtx
6109 expand_builtin_atomic_store (machine_mode mode, tree exp)
6110 {
6111 rtx mem, val;
6112 enum memmodel model;
6113
6114 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6115 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6116 || is_mm_release (model)))
6117 {
6118 source_location loc
6119 = expansion_point_location_if_in_system_header (input_location);
6120 warning_at (loc, OPT_Winvalid_memory_model,
6121 "invalid memory model for %<__atomic_store%>");
6122 model = MEMMODEL_SEQ_CST;
6123 }
6124
6125 if (!flag_inline_atomics)
6126 return NULL_RTX;
6127
6128 /* Expand the operands. */
6129 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6130 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6131
6132 return expand_atomic_store (mem, val, model, false);
6133 }
6134
6135 /* Expand the __atomic_fetch_XXX intrinsic:
6136 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6137 EXP is the CALL_EXPR.
6138 TARGET is an optional place for us to store the results.
6139 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6140 FETCH_AFTER is true if returning the result of the operation.
6141 FETCH_AFTER is false if returning the value before the operation.
6142 IGNORE is true if the result is not used.
6143 EXT_CALL is the correct builtin for an external call if this cannot be
6144 resolved to an instruction sequence. */
6145
6146 static rtx
6147 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6148 enum rtx_code code, bool fetch_after,
6149 bool ignore, enum built_in_function ext_call)
6150 {
6151 rtx val, mem, ret;
6152 enum memmodel model;
6153 tree fndecl;
6154 tree addr;
6155
6156 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6157
6158 /* Expand the operands. */
6159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6161
6162 /* Only try generating instructions if inlining is turned on. */
6163 if (flag_inline_atomics)
6164 {
6165 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6166 if (ret)
6167 return ret;
6168 }
6169
6170 /* Return if a different routine isn't needed for the library call. */
6171 if (ext_call == BUILT_IN_NONE)
6172 return NULL_RTX;
6173
6174 /* Change the call to the specified function. */
6175 fndecl = get_callee_fndecl (exp);
6176 addr = CALL_EXPR_FN (exp);
6177 STRIP_NOPS (addr);
6178
6179 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6180 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6181
6182 /* If we will emit code after the call, the call can not be a tail call.
6183 If it is emitted as a tail call, a barrier is emitted after it, and
6184 then all trailing code is removed. */
6185 if (!ignore)
6186 CALL_EXPR_TAILCALL (exp) = 0;
6187
6188 /* Expand the call here so we can emit trailing code. */
6189 ret = expand_call (exp, target, ignore);
6190
6191 /* Replace the original function just in case it matters. */
6192 TREE_OPERAND (addr, 0) = fndecl;
6193
6194 /* Then issue the arithmetic correction to return the right result. */
6195 if (!ignore)
6196 {
6197 if (code == NOT)
6198 {
6199 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6200 OPTAB_LIB_WIDEN);
6201 ret = expand_simple_unop (mode, NOT, ret, target, true);
6202 }
6203 else
6204 ret = expand_simple_binop (mode, code, ret, val, target, true,
6205 OPTAB_LIB_WIDEN);
6206 }
6207 return ret;
6208 }
6209
6210 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6211
6212 void
6213 expand_ifn_atomic_bit_test_and (gcall *call)
6214 {
6215 tree ptr = gimple_call_arg (call, 0);
6216 tree bit = gimple_call_arg (call, 1);
6217 tree flag = gimple_call_arg (call, 2);
6218 tree lhs = gimple_call_lhs (call);
6219 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6220 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6221 enum rtx_code code;
6222 optab optab;
6223 struct expand_operand ops[5];
6224
6225 gcc_assert (flag_inline_atomics);
6226
6227 if (gimple_call_num_args (call) == 4)
6228 model = get_memmodel (gimple_call_arg (call, 3));
6229
6230 rtx mem = get_builtin_sync_mem (ptr, mode);
6231 rtx val = expand_expr_force_mode (bit, mode);
6232
6233 switch (gimple_call_internal_fn (call))
6234 {
6235 case IFN_ATOMIC_BIT_TEST_AND_SET:
6236 code = IOR;
6237 optab = atomic_bit_test_and_set_optab;
6238 break;
6239 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6240 code = XOR;
6241 optab = atomic_bit_test_and_complement_optab;
6242 break;
6243 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6244 code = AND;
6245 optab = atomic_bit_test_and_reset_optab;
6246 break;
6247 default:
6248 gcc_unreachable ();
6249 }
6250
6251 if (lhs == NULL_TREE)
6252 {
6253 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6254 val, NULL_RTX, true, OPTAB_DIRECT);
6255 if (code == AND)
6256 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6257 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6258 return;
6259 }
6260
6261 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6262 enum insn_code icode = direct_optab_handler (optab, mode);
6263 gcc_assert (icode != CODE_FOR_nothing);
6264 create_output_operand (&ops[0], target, mode);
6265 create_fixed_operand (&ops[1], mem);
6266 create_convert_operand_to (&ops[2], val, mode, true);
6267 create_integer_operand (&ops[3], model);
6268 create_integer_operand (&ops[4], integer_onep (flag));
6269 if (maybe_expand_insn (icode, 5, ops))
6270 return;
6271
6272 rtx bitval = val;
6273 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6274 val, NULL_RTX, true, OPTAB_DIRECT);
6275 rtx maskval = val;
6276 if (code == AND)
6277 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6278 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6279 code, model, false);
6280 if (integer_onep (flag))
6281 {
6282 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6283 NULL_RTX, true, OPTAB_DIRECT);
6284 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6285 true, OPTAB_DIRECT);
6286 }
6287 else
6288 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6289 OPTAB_DIRECT);
6290 if (result != target)
6291 emit_move_insn (target, result);
6292 }
6293
6294 /* Expand an atomic clear operation.
6295 void _atomic_clear (BOOL *obj, enum memmodel)
6296 EXP is the call expression. */
6297
6298 static rtx
6299 expand_builtin_atomic_clear (tree exp)
6300 {
6301 machine_mode mode;
6302 rtx mem, ret;
6303 enum memmodel model;
6304
6305 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6306 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6307 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6308
6309 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6310 {
6311 source_location loc
6312 = expansion_point_location_if_in_system_header (input_location);
6313 warning_at (loc, OPT_Winvalid_memory_model,
6314 "invalid memory model for %<__atomic_store%>");
6315 model = MEMMODEL_SEQ_CST;
6316 }
6317
6318 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6319 Failing that, a store is issued by __atomic_store. The only way this can
6320 fail is if the bool type is larger than a word size. Unlikely, but
6321 handle it anyway for completeness. Assume a single threaded model since
6322 there is no atomic support in this case, and no barriers are required. */
6323 ret = expand_atomic_store (mem, const0_rtx, model, true);
6324 if (!ret)
6325 emit_move_insn (mem, const0_rtx);
6326 return const0_rtx;
6327 }
6328
6329 /* Expand an atomic test_and_set operation.
6330 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6331 EXP is the call expression. */
6332
6333 static rtx
6334 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6335 {
6336 rtx mem;
6337 enum memmodel model;
6338 machine_mode mode;
6339
6340 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6341 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6342 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6343
6344 return expand_atomic_test_and_set (target, mem, model);
6345 }
6346
6347
6348 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6349 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6350
6351 static tree
6352 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6353 {
6354 int size;
6355 machine_mode mode;
6356 unsigned int mode_align, type_align;
6357
6358 if (TREE_CODE (arg0) != INTEGER_CST)
6359 return NULL_TREE;
6360
6361 /* We need a corresponding integer mode for the access to be lock-free. */
6362 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6363 if (!int_mode_for_size (size, 0).exists (&mode))
6364 return boolean_false_node;
6365
6366 mode_align = GET_MODE_ALIGNMENT (mode);
6367
6368 if (TREE_CODE (arg1) == INTEGER_CST)
6369 {
6370 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6371
6372 /* Either this argument is null, or it's a fake pointer encoding
6373 the alignment of the object. */
6374 val = least_bit_hwi (val);
6375 val *= BITS_PER_UNIT;
6376
6377 if (val == 0 || mode_align < val)
6378 type_align = mode_align;
6379 else
6380 type_align = val;
6381 }
6382 else
6383 {
6384 tree ttype = TREE_TYPE (arg1);
6385
6386 /* This function is usually invoked and folded immediately by the front
6387 end before anything else has a chance to look at it. The pointer
6388 parameter at this point is usually cast to a void *, so check for that
6389 and look past the cast. */
6390 if (CONVERT_EXPR_P (arg1)
6391 && POINTER_TYPE_P (ttype)
6392 && VOID_TYPE_P (TREE_TYPE (ttype))
6393 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6394 arg1 = TREE_OPERAND (arg1, 0);
6395
6396 ttype = TREE_TYPE (arg1);
6397 gcc_assert (POINTER_TYPE_P (ttype));
6398
6399 /* Get the underlying type of the object. */
6400 ttype = TREE_TYPE (ttype);
6401 type_align = TYPE_ALIGN (ttype);
6402 }
6403
6404 /* If the object has smaller alignment, the lock free routines cannot
6405 be used. */
6406 if (type_align < mode_align)
6407 return boolean_false_node;
6408
6409 /* Check if a compare_and_swap pattern exists for the mode which represents
6410 the required size. The pattern is not allowed to fail, so the existence
6411 of the pattern indicates support is present. Also require that an
6412 atomic load exists for the required size. */
6413 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6414 return boolean_true_node;
6415 else
6416 return boolean_false_node;
6417 }
6418
6419 /* Return true if the parameters to call EXP represent an object which will
6420 always generate lock free instructions. The first argument represents the
6421 size of the object, and the second parameter is a pointer to the object
6422 itself. If NULL is passed for the object, then the result is based on
6423 typical alignment for an object of the specified size. Otherwise return
6424 false. */
6425
6426 static rtx
6427 expand_builtin_atomic_always_lock_free (tree exp)
6428 {
6429 tree size;
6430 tree arg0 = CALL_EXPR_ARG (exp, 0);
6431 tree arg1 = CALL_EXPR_ARG (exp, 1);
6432
6433 if (TREE_CODE (arg0) != INTEGER_CST)
6434 {
6435 error ("non-constant argument 1 to __atomic_always_lock_free");
6436 return const0_rtx;
6437 }
6438
6439 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6440 if (size == boolean_true_node)
6441 return const1_rtx;
6442 return const0_rtx;
6443 }
6444
6445 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6446 is lock free on this architecture. */
6447
6448 static tree
6449 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6450 {
6451 if (!flag_inline_atomics)
6452 return NULL_TREE;
6453
6454 /* If it isn't always lock free, don't generate a result. */
6455 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6456 return boolean_true_node;
6457
6458 return NULL_TREE;
6459 }
6460
6461 /* Return true if the parameters to call EXP represent an object which will
6462 always generate lock free instructions. The first argument represents the
6463 size of the object, and the second parameter is a pointer to the object
6464 itself. If NULL is passed for the object, then the result is based on
6465 typical alignment for an object of the specified size. Otherwise return
6466 NULL*/
6467
6468 static rtx
6469 expand_builtin_atomic_is_lock_free (tree exp)
6470 {
6471 tree size;
6472 tree arg0 = CALL_EXPR_ARG (exp, 0);
6473 tree arg1 = CALL_EXPR_ARG (exp, 1);
6474
6475 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6476 {
6477 error ("non-integer argument 1 to __atomic_is_lock_free");
6478 return NULL_RTX;
6479 }
6480
6481 if (!flag_inline_atomics)
6482 return NULL_RTX;
6483
6484 /* If the value is known at compile time, return the RTX for it. */
6485 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6486 if (size == boolean_true_node)
6487 return const1_rtx;
6488
6489 return NULL_RTX;
6490 }
6491
6492 /* Expand the __atomic_thread_fence intrinsic:
6493 void __atomic_thread_fence (enum memmodel)
6494 EXP is the CALL_EXPR. */
6495
6496 static void
6497 expand_builtin_atomic_thread_fence (tree exp)
6498 {
6499 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6500 expand_mem_thread_fence (model);
6501 }
6502
6503 /* Expand the __atomic_signal_fence intrinsic:
6504 void __atomic_signal_fence (enum memmodel)
6505 EXP is the CALL_EXPR. */
6506
6507 static void
6508 expand_builtin_atomic_signal_fence (tree exp)
6509 {
6510 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6511 expand_mem_signal_fence (model);
6512 }
6513
6514 /* Expand the __sync_synchronize intrinsic. */
6515
6516 static void
6517 expand_builtin_sync_synchronize (void)
6518 {
6519 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6520 }
6521
6522 static rtx
6523 expand_builtin_thread_pointer (tree exp, rtx target)
6524 {
6525 enum insn_code icode;
6526 if (!validate_arglist (exp, VOID_TYPE))
6527 return const0_rtx;
6528 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6529 if (icode != CODE_FOR_nothing)
6530 {
6531 struct expand_operand op;
6532 /* If the target is not sutitable then create a new target. */
6533 if (target == NULL_RTX
6534 || !REG_P (target)
6535 || GET_MODE (target) != Pmode)
6536 target = gen_reg_rtx (Pmode);
6537 create_output_operand (&op, target, Pmode);
6538 expand_insn (icode, 1, &op);
6539 return target;
6540 }
6541 error ("__builtin_thread_pointer is not supported on this target");
6542 return const0_rtx;
6543 }
6544
6545 static void
6546 expand_builtin_set_thread_pointer (tree exp)
6547 {
6548 enum insn_code icode;
6549 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6550 return;
6551 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6552 if (icode != CODE_FOR_nothing)
6553 {
6554 struct expand_operand op;
6555 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6556 Pmode, EXPAND_NORMAL);
6557 create_input_operand (&op, val, Pmode);
6558 expand_insn (icode, 1, &op);
6559 return;
6560 }
6561 error ("__builtin_set_thread_pointer is not supported on this target");
6562 }
6563
6564 \f
6565 /* Emit code to restore the current value of stack. */
6566
6567 static void
6568 expand_stack_restore (tree var)
6569 {
6570 rtx_insn *prev;
6571 rtx sa = expand_normal (var);
6572
6573 sa = convert_memory_address (Pmode, sa);
6574
6575 prev = get_last_insn ();
6576 emit_stack_restore (SAVE_BLOCK, sa);
6577
6578 record_new_stack_level ();
6579
6580 fixup_args_size_notes (prev, get_last_insn (), 0);
6581 }
6582
6583 /* Emit code to save the current value of stack. */
6584
6585 static rtx
6586 expand_stack_save (void)
6587 {
6588 rtx ret = NULL_RTX;
6589
6590 emit_stack_save (SAVE_BLOCK, &ret);
6591 return ret;
6592 }
6593
6594
6595 /* Expand an expression EXP that calls a built-in function,
6596 with result going to TARGET if that's convenient
6597 (and in mode MODE if that's convenient).
6598 SUBTARGET may be used as the target for computing one of EXP's operands.
6599 IGNORE is nonzero if the value is to be ignored. */
6600
6601 rtx
6602 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6603 int ignore)
6604 {
6605 tree fndecl = get_callee_fndecl (exp);
6606 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6607 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6608 int flags;
6609
6610 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6611 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6612
6613 /* When ASan is enabled, we don't want to expand some memory/string
6614 builtins and rely on libsanitizer's hooks. This allows us to avoid
6615 redundant checks and be sure, that possible overflow will be detected
6616 by ASan. */
6617
6618 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6619 return expand_call (exp, target, ignore);
6620
6621 /* When not optimizing, generate calls to library functions for a certain
6622 set of builtins. */
6623 if (!optimize
6624 && !called_as_built_in (fndecl)
6625 && fcode != BUILT_IN_FORK
6626 && fcode != BUILT_IN_EXECL
6627 && fcode != BUILT_IN_EXECV
6628 && fcode != BUILT_IN_EXECLP
6629 && fcode != BUILT_IN_EXECLE
6630 && fcode != BUILT_IN_EXECVP
6631 && fcode != BUILT_IN_EXECVE
6632 && !ALLOCA_FUNCTION_CODE_P (fcode)
6633 && fcode != BUILT_IN_FREE
6634 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6635 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6636 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6637 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6638 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6639 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6640 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6641 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6642 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6643 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6644 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6645 && fcode != BUILT_IN_CHKP_BNDRET)
6646 return expand_call (exp, target, ignore);
6647
6648 /* The built-in function expanders test for target == const0_rtx
6649 to determine whether the function's result will be ignored. */
6650 if (ignore)
6651 target = const0_rtx;
6652
6653 /* If the result of a pure or const built-in function is ignored, and
6654 none of its arguments are volatile, we can avoid expanding the
6655 built-in call and just evaluate the arguments for side-effects. */
6656 if (target == const0_rtx
6657 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6658 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6659 {
6660 bool volatilep = false;
6661 tree arg;
6662 call_expr_arg_iterator iter;
6663
6664 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6665 if (TREE_THIS_VOLATILE (arg))
6666 {
6667 volatilep = true;
6668 break;
6669 }
6670
6671 if (! volatilep)
6672 {
6673 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6674 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6675 return const0_rtx;
6676 }
6677 }
6678
6679 /* expand_builtin_with_bounds is supposed to be used for
6680 instrumented builtin calls. */
6681 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6682
6683 switch (fcode)
6684 {
6685 CASE_FLT_FN (BUILT_IN_FABS):
6686 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6687 case BUILT_IN_FABSD32:
6688 case BUILT_IN_FABSD64:
6689 case BUILT_IN_FABSD128:
6690 target = expand_builtin_fabs (exp, target, subtarget);
6691 if (target)
6692 return target;
6693 break;
6694
6695 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6696 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6697 target = expand_builtin_copysign (exp, target, subtarget);
6698 if (target)
6699 return target;
6700 break;
6701
6702 /* Just do a normal library call if we were unable to fold
6703 the values. */
6704 CASE_FLT_FN (BUILT_IN_CABS):
6705 break;
6706
6707 CASE_FLT_FN (BUILT_IN_FMA):
6708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6709 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6710 if (target)
6711 return target;
6712 break;
6713
6714 CASE_FLT_FN (BUILT_IN_ILOGB):
6715 if (! flag_unsafe_math_optimizations)
6716 break;
6717 gcc_fallthrough ();
6718 CASE_FLT_FN (BUILT_IN_ISINF):
6719 CASE_FLT_FN (BUILT_IN_FINITE):
6720 case BUILT_IN_ISFINITE:
6721 case BUILT_IN_ISNORMAL:
6722 target = expand_builtin_interclass_mathfn (exp, target);
6723 if (target)
6724 return target;
6725 break;
6726
6727 CASE_FLT_FN (BUILT_IN_ICEIL):
6728 CASE_FLT_FN (BUILT_IN_LCEIL):
6729 CASE_FLT_FN (BUILT_IN_LLCEIL):
6730 CASE_FLT_FN (BUILT_IN_LFLOOR):
6731 CASE_FLT_FN (BUILT_IN_IFLOOR):
6732 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6733 target = expand_builtin_int_roundingfn (exp, target);
6734 if (target)
6735 return target;
6736 break;
6737
6738 CASE_FLT_FN (BUILT_IN_IRINT):
6739 CASE_FLT_FN (BUILT_IN_LRINT):
6740 CASE_FLT_FN (BUILT_IN_LLRINT):
6741 CASE_FLT_FN (BUILT_IN_IROUND):
6742 CASE_FLT_FN (BUILT_IN_LROUND):
6743 CASE_FLT_FN (BUILT_IN_LLROUND):
6744 target = expand_builtin_int_roundingfn_2 (exp, target);
6745 if (target)
6746 return target;
6747 break;
6748
6749 CASE_FLT_FN (BUILT_IN_POWI):
6750 target = expand_builtin_powi (exp, target);
6751 if (target)
6752 return target;
6753 break;
6754
6755 CASE_FLT_FN (BUILT_IN_CEXPI):
6756 target = expand_builtin_cexpi (exp, target);
6757 gcc_assert (target);
6758 return target;
6759
6760 CASE_FLT_FN (BUILT_IN_SIN):
6761 CASE_FLT_FN (BUILT_IN_COS):
6762 if (! flag_unsafe_math_optimizations)
6763 break;
6764 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6765 if (target)
6766 return target;
6767 break;
6768
6769 CASE_FLT_FN (BUILT_IN_SINCOS):
6770 if (! flag_unsafe_math_optimizations)
6771 break;
6772 target = expand_builtin_sincos (exp);
6773 if (target)
6774 return target;
6775 break;
6776
6777 case BUILT_IN_APPLY_ARGS:
6778 return expand_builtin_apply_args ();
6779
6780 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6781 FUNCTION with a copy of the parameters described by
6782 ARGUMENTS, and ARGSIZE. It returns a block of memory
6783 allocated on the stack into which is stored all the registers
6784 that might possibly be used for returning the result of a
6785 function. ARGUMENTS is the value returned by
6786 __builtin_apply_args. ARGSIZE is the number of bytes of
6787 arguments that must be copied. ??? How should this value be
6788 computed? We'll also need a safe worst case value for varargs
6789 functions. */
6790 case BUILT_IN_APPLY:
6791 if (!validate_arglist (exp, POINTER_TYPE,
6792 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6793 && !validate_arglist (exp, REFERENCE_TYPE,
6794 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6795 return const0_rtx;
6796 else
6797 {
6798 rtx ops[3];
6799
6800 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6801 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6802 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6803
6804 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6805 }
6806
6807 /* __builtin_return (RESULT) causes the function to return the
6808 value described by RESULT. RESULT is address of the block of
6809 memory returned by __builtin_apply. */
6810 case BUILT_IN_RETURN:
6811 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6812 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6813 return const0_rtx;
6814
6815 case BUILT_IN_SAVEREGS:
6816 return expand_builtin_saveregs ();
6817
6818 case BUILT_IN_VA_ARG_PACK:
6819 /* All valid uses of __builtin_va_arg_pack () are removed during
6820 inlining. */
6821 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6822 return const0_rtx;
6823
6824 case BUILT_IN_VA_ARG_PACK_LEN:
6825 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6826 inlining. */
6827 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6828 return const0_rtx;
6829
6830 /* Return the address of the first anonymous stack arg. */
6831 case BUILT_IN_NEXT_ARG:
6832 if (fold_builtin_next_arg (exp, false))
6833 return const0_rtx;
6834 return expand_builtin_next_arg ();
6835
6836 case BUILT_IN_CLEAR_CACHE:
6837 target = expand_builtin___clear_cache (exp);
6838 if (target)
6839 return target;
6840 break;
6841
6842 case BUILT_IN_CLASSIFY_TYPE:
6843 return expand_builtin_classify_type (exp);
6844
6845 case BUILT_IN_CONSTANT_P:
6846 return const0_rtx;
6847
6848 case BUILT_IN_FRAME_ADDRESS:
6849 case BUILT_IN_RETURN_ADDRESS:
6850 return expand_builtin_frame_address (fndecl, exp);
6851
6852 /* Returns the address of the area where the structure is returned.
6853 0 otherwise. */
6854 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6855 if (call_expr_nargs (exp) != 0
6856 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6857 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6858 return const0_rtx;
6859 else
6860 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6861
6862 CASE_BUILT_IN_ALLOCA:
6863 target = expand_builtin_alloca (exp);
6864 if (target)
6865 return target;
6866 break;
6867
6868 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6869 return expand_asan_emit_allocas_unpoison (exp);
6870
6871 case BUILT_IN_STACK_SAVE:
6872 return expand_stack_save ();
6873
6874 case BUILT_IN_STACK_RESTORE:
6875 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6876 return const0_rtx;
6877
6878 case BUILT_IN_BSWAP16:
6879 case BUILT_IN_BSWAP32:
6880 case BUILT_IN_BSWAP64:
6881 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6882 if (target)
6883 return target;
6884 break;
6885
6886 CASE_INT_FN (BUILT_IN_FFS):
6887 target = expand_builtin_unop (target_mode, exp, target,
6888 subtarget, ffs_optab);
6889 if (target)
6890 return target;
6891 break;
6892
6893 CASE_INT_FN (BUILT_IN_CLZ):
6894 target = expand_builtin_unop (target_mode, exp, target,
6895 subtarget, clz_optab);
6896 if (target)
6897 return target;
6898 break;
6899
6900 CASE_INT_FN (BUILT_IN_CTZ):
6901 target = expand_builtin_unop (target_mode, exp, target,
6902 subtarget, ctz_optab);
6903 if (target)
6904 return target;
6905 break;
6906
6907 CASE_INT_FN (BUILT_IN_CLRSB):
6908 target = expand_builtin_unop (target_mode, exp, target,
6909 subtarget, clrsb_optab);
6910 if (target)
6911 return target;
6912 break;
6913
6914 CASE_INT_FN (BUILT_IN_POPCOUNT):
6915 target = expand_builtin_unop (target_mode, exp, target,
6916 subtarget, popcount_optab);
6917 if (target)
6918 return target;
6919 break;
6920
6921 CASE_INT_FN (BUILT_IN_PARITY):
6922 target = expand_builtin_unop (target_mode, exp, target,
6923 subtarget, parity_optab);
6924 if (target)
6925 return target;
6926 break;
6927
6928 case BUILT_IN_STRLEN:
6929 target = expand_builtin_strlen (exp, target, target_mode);
6930 if (target)
6931 return target;
6932 break;
6933
6934 case BUILT_IN_STRCAT:
6935 target = expand_builtin_strcat (exp, target);
6936 if (target)
6937 return target;
6938 break;
6939
6940 case BUILT_IN_STRCPY:
6941 target = expand_builtin_strcpy (exp, target);
6942 if (target)
6943 return target;
6944 break;
6945
6946 case BUILT_IN_STRNCAT:
6947 target = expand_builtin_strncat (exp, target);
6948 if (target)
6949 return target;
6950 break;
6951
6952 case BUILT_IN_STRNCPY:
6953 target = expand_builtin_strncpy (exp, target);
6954 if (target)
6955 return target;
6956 break;
6957
6958 case BUILT_IN_STPCPY:
6959 target = expand_builtin_stpcpy (exp, target, mode);
6960 if (target)
6961 return target;
6962 break;
6963
6964 case BUILT_IN_STPNCPY:
6965 target = expand_builtin_stpncpy (exp, target);
6966 if (target)
6967 return target;
6968 break;
6969
6970 case BUILT_IN_MEMCHR:
6971 target = expand_builtin_memchr (exp, target);
6972 if (target)
6973 return target;
6974 break;
6975
6976 case BUILT_IN_MEMCPY:
6977 target = expand_builtin_memcpy (exp, target);
6978 if (target)
6979 return target;
6980 break;
6981
6982 case BUILT_IN_MEMMOVE:
6983 target = expand_builtin_memmove (exp, target);
6984 if (target)
6985 return target;
6986 break;
6987
6988 case BUILT_IN_MEMPCPY:
6989 target = expand_builtin_mempcpy (exp, target);
6990 if (target)
6991 return target;
6992 break;
6993
6994 case BUILT_IN_MEMSET:
6995 target = expand_builtin_memset (exp, target, mode);
6996 if (target)
6997 return target;
6998 break;
6999
7000 case BUILT_IN_BZERO:
7001 target = expand_builtin_bzero (exp);
7002 if (target)
7003 return target;
7004 break;
7005
7006 case BUILT_IN_STRCMP:
7007 target = expand_builtin_strcmp (exp, target);
7008 if (target)
7009 return target;
7010 break;
7011
7012 case BUILT_IN_STRNCMP:
7013 target = expand_builtin_strncmp (exp, target, mode);
7014 if (target)
7015 return target;
7016 break;
7017
7018 case BUILT_IN_BCMP:
7019 case BUILT_IN_MEMCMP:
7020 case BUILT_IN_MEMCMP_EQ:
7021 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7022 if (target)
7023 return target;
7024 if (fcode == BUILT_IN_MEMCMP_EQ)
7025 {
7026 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7027 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7028 }
7029 break;
7030
7031 case BUILT_IN_SETJMP:
7032 /* This should have been lowered to the builtins below. */
7033 gcc_unreachable ();
7034
7035 case BUILT_IN_SETJMP_SETUP:
7036 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7037 and the receiver label. */
7038 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7039 {
7040 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7041 VOIDmode, EXPAND_NORMAL);
7042 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7043 rtx_insn *label_r = label_rtx (label);
7044
7045 /* This is copied from the handling of non-local gotos. */
7046 expand_builtin_setjmp_setup (buf_addr, label_r);
7047 nonlocal_goto_handler_labels
7048 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7049 nonlocal_goto_handler_labels);
7050 /* ??? Do not let expand_label treat us as such since we would
7051 not want to be both on the list of non-local labels and on
7052 the list of forced labels. */
7053 FORCED_LABEL (label) = 0;
7054 return const0_rtx;
7055 }
7056 break;
7057
7058 case BUILT_IN_SETJMP_RECEIVER:
7059 /* __builtin_setjmp_receiver is passed the receiver label. */
7060 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7061 {
7062 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7063 rtx_insn *label_r = label_rtx (label);
7064
7065 expand_builtin_setjmp_receiver (label_r);
7066 return const0_rtx;
7067 }
7068 break;
7069
7070 /* __builtin_longjmp is passed a pointer to an array of five words.
7071 It's similar to the C library longjmp function but works with
7072 __builtin_setjmp above. */
7073 case BUILT_IN_LONGJMP:
7074 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7075 {
7076 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7077 VOIDmode, EXPAND_NORMAL);
7078 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7079
7080 if (value != const1_rtx)
7081 {
7082 error ("%<__builtin_longjmp%> second argument must be 1");
7083 return const0_rtx;
7084 }
7085
7086 expand_builtin_longjmp (buf_addr, value);
7087 return const0_rtx;
7088 }
7089 break;
7090
7091 case BUILT_IN_NONLOCAL_GOTO:
7092 target = expand_builtin_nonlocal_goto (exp);
7093 if (target)
7094 return target;
7095 break;
7096
7097 /* This updates the setjmp buffer that is its argument with the value
7098 of the current stack pointer. */
7099 case BUILT_IN_UPDATE_SETJMP_BUF:
7100 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7101 {
7102 rtx buf_addr
7103 = expand_normal (CALL_EXPR_ARG (exp, 0));
7104
7105 expand_builtin_update_setjmp_buf (buf_addr);
7106 return const0_rtx;
7107 }
7108 break;
7109
7110 case BUILT_IN_TRAP:
7111 expand_builtin_trap ();
7112 return const0_rtx;
7113
7114 case BUILT_IN_UNREACHABLE:
7115 expand_builtin_unreachable ();
7116 return const0_rtx;
7117
7118 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7119 case BUILT_IN_SIGNBITD32:
7120 case BUILT_IN_SIGNBITD64:
7121 case BUILT_IN_SIGNBITD128:
7122 target = expand_builtin_signbit (exp, target);
7123 if (target)
7124 return target;
7125 break;
7126
7127 /* Various hooks for the DWARF 2 __throw routine. */
7128 case BUILT_IN_UNWIND_INIT:
7129 expand_builtin_unwind_init ();
7130 return const0_rtx;
7131 case BUILT_IN_DWARF_CFA:
7132 return virtual_cfa_rtx;
7133 #ifdef DWARF2_UNWIND_INFO
7134 case BUILT_IN_DWARF_SP_COLUMN:
7135 return expand_builtin_dwarf_sp_column ();
7136 case BUILT_IN_INIT_DWARF_REG_SIZES:
7137 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7138 return const0_rtx;
7139 #endif
7140 case BUILT_IN_FROB_RETURN_ADDR:
7141 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7142 case BUILT_IN_EXTRACT_RETURN_ADDR:
7143 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7144 case BUILT_IN_EH_RETURN:
7145 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7146 CALL_EXPR_ARG (exp, 1));
7147 return const0_rtx;
7148 case BUILT_IN_EH_RETURN_DATA_REGNO:
7149 return expand_builtin_eh_return_data_regno (exp);
7150 case BUILT_IN_EXTEND_POINTER:
7151 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7152 case BUILT_IN_EH_POINTER:
7153 return expand_builtin_eh_pointer (exp);
7154 case BUILT_IN_EH_FILTER:
7155 return expand_builtin_eh_filter (exp);
7156 case BUILT_IN_EH_COPY_VALUES:
7157 return expand_builtin_eh_copy_values (exp);
7158
7159 case BUILT_IN_VA_START:
7160 return expand_builtin_va_start (exp);
7161 case BUILT_IN_VA_END:
7162 return expand_builtin_va_end (exp);
7163 case BUILT_IN_VA_COPY:
7164 return expand_builtin_va_copy (exp);
7165 case BUILT_IN_EXPECT:
7166 return expand_builtin_expect (exp, target);
7167 case BUILT_IN_ASSUME_ALIGNED:
7168 return expand_builtin_assume_aligned (exp, target);
7169 case BUILT_IN_PREFETCH:
7170 expand_builtin_prefetch (exp);
7171 return const0_rtx;
7172
7173 case BUILT_IN_INIT_TRAMPOLINE:
7174 return expand_builtin_init_trampoline (exp, true);
7175 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7176 return expand_builtin_init_trampoline (exp, false);
7177 case BUILT_IN_ADJUST_TRAMPOLINE:
7178 return expand_builtin_adjust_trampoline (exp);
7179
7180 case BUILT_IN_INIT_DESCRIPTOR:
7181 return expand_builtin_init_descriptor (exp);
7182 case BUILT_IN_ADJUST_DESCRIPTOR:
7183 return expand_builtin_adjust_descriptor (exp);
7184
7185 case BUILT_IN_FORK:
7186 case BUILT_IN_EXECL:
7187 case BUILT_IN_EXECV:
7188 case BUILT_IN_EXECLP:
7189 case BUILT_IN_EXECLE:
7190 case BUILT_IN_EXECVP:
7191 case BUILT_IN_EXECVE:
7192 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7193 if (target)
7194 return target;
7195 break;
7196
7197 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7198 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7199 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7200 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7201 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7202 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7203 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7204 if (target)
7205 return target;
7206 break;
7207
7208 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7209 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7210 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7211 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7212 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7213 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7214 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7215 if (target)
7216 return target;
7217 break;
7218
7219 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7220 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7221 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7222 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7223 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7224 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7225 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7226 if (target)
7227 return target;
7228 break;
7229
7230 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7231 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7232 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7233 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7234 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7235 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7236 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7237 if (target)
7238 return target;
7239 break;
7240
7241 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7242 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7243 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7244 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7245 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7246 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7247 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7248 if (target)
7249 return target;
7250 break;
7251
7252 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7253 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7254 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7255 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7256 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7257 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7258 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7259 if (target)
7260 return target;
7261 break;
7262
7263 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7264 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7265 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7266 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7267 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7268 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7269 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7270 if (target)
7271 return target;
7272 break;
7273
7274 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7275 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7276 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7277 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7278 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7279 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7280 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7281 if (target)
7282 return target;
7283 break;
7284
7285 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7286 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7287 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7288 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7289 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7290 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7291 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7292 if (target)
7293 return target;
7294 break;
7295
7296 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7297 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7298 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7299 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7300 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7301 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7302 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7303 if (target)
7304 return target;
7305 break;
7306
7307 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7308 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7309 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7310 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7311 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7312 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7313 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7314 if (target)
7315 return target;
7316 break;
7317
7318 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7319 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7320 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7321 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7322 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7323 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7324 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7325 if (target)
7326 return target;
7327 break;
7328
7329 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7330 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7331 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7332 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7333 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7334 if (mode == VOIDmode)
7335 mode = TYPE_MODE (boolean_type_node);
7336 if (!target || !register_operand (target, mode))
7337 target = gen_reg_rtx (mode);
7338
7339 mode = get_builtin_sync_mode
7340 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7341 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7342 if (target)
7343 return target;
7344 break;
7345
7346 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7347 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7348 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7349 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7350 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7351 mode = get_builtin_sync_mode
7352 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7353 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7354 if (target)
7355 return target;
7356 break;
7357
7358 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7359 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7360 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7361 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7362 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7364 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7365 if (target)
7366 return target;
7367 break;
7368
7369 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7370 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7371 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7372 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7373 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7374 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7375 expand_builtin_sync_lock_release (mode, exp);
7376 return const0_rtx;
7377
7378 case BUILT_IN_SYNC_SYNCHRONIZE:
7379 expand_builtin_sync_synchronize ();
7380 return const0_rtx;
7381
7382 case BUILT_IN_ATOMIC_EXCHANGE_1:
7383 case BUILT_IN_ATOMIC_EXCHANGE_2:
7384 case BUILT_IN_ATOMIC_EXCHANGE_4:
7385 case BUILT_IN_ATOMIC_EXCHANGE_8:
7386 case BUILT_IN_ATOMIC_EXCHANGE_16:
7387 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7388 target = expand_builtin_atomic_exchange (mode, exp, target);
7389 if (target)
7390 return target;
7391 break;
7392
7393 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7394 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7395 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7396 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7397 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7398 {
7399 unsigned int nargs, z;
7400 vec<tree, va_gc> *vec;
7401
7402 mode =
7403 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7404 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7405 if (target)
7406 return target;
7407
7408 /* If this is turned into an external library call, the weak parameter
7409 must be dropped to match the expected parameter list. */
7410 nargs = call_expr_nargs (exp);
7411 vec_alloc (vec, nargs - 1);
7412 for (z = 0; z < 3; z++)
7413 vec->quick_push (CALL_EXPR_ARG (exp, z));
7414 /* Skip the boolean weak parameter. */
7415 for (z = 4; z < 6; z++)
7416 vec->quick_push (CALL_EXPR_ARG (exp, z));
7417 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7418 break;
7419 }
7420
7421 case BUILT_IN_ATOMIC_LOAD_1:
7422 case BUILT_IN_ATOMIC_LOAD_2:
7423 case BUILT_IN_ATOMIC_LOAD_4:
7424 case BUILT_IN_ATOMIC_LOAD_8:
7425 case BUILT_IN_ATOMIC_LOAD_16:
7426 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7427 target = expand_builtin_atomic_load (mode, exp, target);
7428 if (target)
7429 return target;
7430 break;
7431
7432 case BUILT_IN_ATOMIC_STORE_1:
7433 case BUILT_IN_ATOMIC_STORE_2:
7434 case BUILT_IN_ATOMIC_STORE_4:
7435 case BUILT_IN_ATOMIC_STORE_8:
7436 case BUILT_IN_ATOMIC_STORE_16:
7437 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7438 target = expand_builtin_atomic_store (mode, exp);
7439 if (target)
7440 return const0_rtx;
7441 break;
7442
7443 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7444 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7445 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7446 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7447 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7448 {
7449 enum built_in_function lib;
7450 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7451 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7452 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7453 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7454 ignore, lib);
7455 if (target)
7456 return target;
7457 break;
7458 }
7459 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7460 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7461 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7462 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7463 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7464 {
7465 enum built_in_function lib;
7466 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7467 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7468 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7469 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7470 ignore, lib);
7471 if (target)
7472 return target;
7473 break;
7474 }
7475 case BUILT_IN_ATOMIC_AND_FETCH_1:
7476 case BUILT_IN_ATOMIC_AND_FETCH_2:
7477 case BUILT_IN_ATOMIC_AND_FETCH_4:
7478 case BUILT_IN_ATOMIC_AND_FETCH_8:
7479 case BUILT_IN_ATOMIC_AND_FETCH_16:
7480 {
7481 enum built_in_function lib;
7482 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7483 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7484 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7485 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7486 ignore, lib);
7487 if (target)
7488 return target;
7489 break;
7490 }
7491 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7492 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7493 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7494 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7495 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7496 {
7497 enum built_in_function lib;
7498 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7499 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7500 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7501 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7502 ignore, lib);
7503 if (target)
7504 return target;
7505 break;
7506 }
7507 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7508 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7509 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7510 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7511 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7512 {
7513 enum built_in_function lib;
7514 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7515 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7516 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7517 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7518 ignore, lib);
7519 if (target)
7520 return target;
7521 break;
7522 }
7523 case BUILT_IN_ATOMIC_OR_FETCH_1:
7524 case BUILT_IN_ATOMIC_OR_FETCH_2:
7525 case BUILT_IN_ATOMIC_OR_FETCH_4:
7526 case BUILT_IN_ATOMIC_OR_FETCH_8:
7527 case BUILT_IN_ATOMIC_OR_FETCH_16:
7528 {
7529 enum built_in_function lib;
7530 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7531 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7532 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7533 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7534 ignore, lib);
7535 if (target)
7536 return target;
7537 break;
7538 }
7539 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7540 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7541 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7542 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7543 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7544 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7545 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7546 ignore, BUILT_IN_NONE);
7547 if (target)
7548 return target;
7549 break;
7550
7551 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7552 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7553 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7554 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7555 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7556 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7557 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7558 ignore, BUILT_IN_NONE);
7559 if (target)
7560 return target;
7561 break;
7562
7563 case BUILT_IN_ATOMIC_FETCH_AND_1:
7564 case BUILT_IN_ATOMIC_FETCH_AND_2:
7565 case BUILT_IN_ATOMIC_FETCH_AND_4:
7566 case BUILT_IN_ATOMIC_FETCH_AND_8:
7567 case BUILT_IN_ATOMIC_FETCH_AND_16:
7568 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7569 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7570 ignore, BUILT_IN_NONE);
7571 if (target)
7572 return target;
7573 break;
7574
7575 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7576 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7577 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7578 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7579 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7580 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7581 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7582 ignore, BUILT_IN_NONE);
7583 if (target)
7584 return target;
7585 break;
7586
7587 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7588 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7589 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7590 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7591 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7592 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7593 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7594 ignore, BUILT_IN_NONE);
7595 if (target)
7596 return target;
7597 break;
7598
7599 case BUILT_IN_ATOMIC_FETCH_OR_1:
7600 case BUILT_IN_ATOMIC_FETCH_OR_2:
7601 case BUILT_IN_ATOMIC_FETCH_OR_4:
7602 case BUILT_IN_ATOMIC_FETCH_OR_8:
7603 case BUILT_IN_ATOMIC_FETCH_OR_16:
7604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7605 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7606 ignore, BUILT_IN_NONE);
7607 if (target)
7608 return target;
7609 break;
7610
7611 case BUILT_IN_ATOMIC_TEST_AND_SET:
7612 return expand_builtin_atomic_test_and_set (exp, target);
7613
7614 case BUILT_IN_ATOMIC_CLEAR:
7615 return expand_builtin_atomic_clear (exp);
7616
7617 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7618 return expand_builtin_atomic_always_lock_free (exp);
7619
7620 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7621 target = expand_builtin_atomic_is_lock_free (exp);
7622 if (target)
7623 return target;
7624 break;
7625
7626 case BUILT_IN_ATOMIC_THREAD_FENCE:
7627 expand_builtin_atomic_thread_fence (exp);
7628 return const0_rtx;
7629
7630 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7631 expand_builtin_atomic_signal_fence (exp);
7632 return const0_rtx;
7633
7634 case BUILT_IN_OBJECT_SIZE:
7635 return expand_builtin_object_size (exp);
7636
7637 case BUILT_IN_MEMCPY_CHK:
7638 case BUILT_IN_MEMPCPY_CHK:
7639 case BUILT_IN_MEMMOVE_CHK:
7640 case BUILT_IN_MEMSET_CHK:
7641 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7642 if (target)
7643 return target;
7644 break;
7645
7646 case BUILT_IN_STRCPY_CHK:
7647 case BUILT_IN_STPCPY_CHK:
7648 case BUILT_IN_STRNCPY_CHK:
7649 case BUILT_IN_STPNCPY_CHK:
7650 case BUILT_IN_STRCAT_CHK:
7651 case BUILT_IN_STRNCAT_CHK:
7652 case BUILT_IN_SNPRINTF_CHK:
7653 case BUILT_IN_VSNPRINTF_CHK:
7654 maybe_emit_chk_warning (exp, fcode);
7655 break;
7656
7657 case BUILT_IN_SPRINTF_CHK:
7658 case BUILT_IN_VSPRINTF_CHK:
7659 maybe_emit_sprintf_chk_warning (exp, fcode);
7660 break;
7661
7662 case BUILT_IN_FREE:
7663 if (warn_free_nonheap_object)
7664 maybe_emit_free_warning (exp);
7665 break;
7666
7667 case BUILT_IN_THREAD_POINTER:
7668 return expand_builtin_thread_pointer (exp, target);
7669
7670 case BUILT_IN_SET_THREAD_POINTER:
7671 expand_builtin_set_thread_pointer (exp);
7672 return const0_rtx;
7673
7674 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7675 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7676 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7677 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7678 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7679 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7680 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7681 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7682 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7683 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7684 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7685 /* We allow user CHKP builtins if Pointer Bounds
7686 Checker is off. */
7687 if (!chkp_function_instrumented_p (current_function_decl))
7688 {
7689 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7690 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7691 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7692 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7693 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7694 return expand_normal (CALL_EXPR_ARG (exp, 0));
7695 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7696 return expand_normal (size_zero_node);
7697 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7698 return expand_normal (size_int (-1));
7699 else
7700 return const0_rtx;
7701 }
7702 /* FALLTHROUGH */
7703
7704 case BUILT_IN_CHKP_BNDMK:
7705 case BUILT_IN_CHKP_BNDSTX:
7706 case BUILT_IN_CHKP_BNDCL:
7707 case BUILT_IN_CHKP_BNDCU:
7708 case BUILT_IN_CHKP_BNDLDX:
7709 case BUILT_IN_CHKP_BNDRET:
7710 case BUILT_IN_CHKP_INTERSECT:
7711 case BUILT_IN_CHKP_NARROW:
7712 case BUILT_IN_CHKP_EXTRACT_LOWER:
7713 case BUILT_IN_CHKP_EXTRACT_UPPER:
7714 /* Software implementation of Pointer Bounds Checker is NYI.
7715 Target support is required. */
7716 error ("Your target platform does not support -fcheck-pointer-bounds");
7717 break;
7718
7719 case BUILT_IN_ACC_ON_DEVICE:
7720 /* Do library call, if we failed to expand the builtin when
7721 folding. */
7722 break;
7723
7724 default: /* just do library call, if unknown builtin */
7725 break;
7726 }
7727
7728 /* The switch statement above can drop through to cause the function
7729 to be called normally. */
7730 return expand_call (exp, target, ignore);
7731 }
7732
7733 /* Similar to expand_builtin but is used for instrumented calls. */
7734
7735 rtx
7736 expand_builtin_with_bounds (tree exp, rtx target,
7737 rtx subtarget ATTRIBUTE_UNUSED,
7738 machine_mode mode, int ignore)
7739 {
7740 tree fndecl = get_callee_fndecl (exp);
7741 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7742
7743 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7744
7745 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7746 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7747
7748 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7749 && fcode < END_CHKP_BUILTINS);
7750
7751 switch (fcode)
7752 {
7753 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7754 target = expand_builtin_memcpy_with_bounds (exp, target);
7755 if (target)
7756 return target;
7757 break;
7758
7759 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7760 target = expand_builtin_mempcpy_with_bounds (exp, target);
7761 if (target)
7762 return target;
7763 break;
7764
7765 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7766 target = expand_builtin_memset_with_bounds (exp, target, mode);
7767 if (target)
7768 return target;
7769 break;
7770
7771 case BUILT_IN_MEMCPY_CHKP:
7772 case BUILT_IN_MEMMOVE_CHKP:
7773 case BUILT_IN_MEMPCPY_CHKP:
7774 if (call_expr_nargs (exp) > 3)
7775 {
7776 /* memcpy_chkp (void *dst, size_t dstbnd,
7777 const void *src, size_t srcbnd, size_t n)
7778 and others take a pointer bound argument just after each
7779 pointer argument. */
7780 tree dest = CALL_EXPR_ARG (exp, 0);
7781 tree src = CALL_EXPR_ARG (exp, 2);
7782 tree len = CALL_EXPR_ARG (exp, 4);
7783
7784 check_memop_access (exp, dest, src, len);
7785 break;
7786 }
7787
7788 default:
7789 break;
7790 }
7791
7792 /* The switch statement above can drop through to cause the function
7793 to be called normally. */
7794 return expand_call (exp, target, ignore);
7795 }
7796
7797 /* Determine whether a tree node represents a call to a built-in
7798 function. If the tree T is a call to a built-in function with
7799 the right number of arguments of the appropriate types, return
7800 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7801 Otherwise the return value is END_BUILTINS. */
7802
7803 enum built_in_function
7804 builtin_mathfn_code (const_tree t)
7805 {
7806 const_tree fndecl, arg, parmlist;
7807 const_tree argtype, parmtype;
7808 const_call_expr_arg_iterator iter;
7809
7810 if (TREE_CODE (t) != CALL_EXPR
7811 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7812 return END_BUILTINS;
7813
7814 fndecl = get_callee_fndecl (t);
7815 if (fndecl == NULL_TREE
7816 || TREE_CODE (fndecl) != FUNCTION_DECL
7817 || ! DECL_BUILT_IN (fndecl)
7818 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7819 return END_BUILTINS;
7820
7821 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7822 init_const_call_expr_arg_iterator (t, &iter);
7823 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7824 {
7825 /* If a function doesn't take a variable number of arguments,
7826 the last element in the list will have type `void'. */
7827 parmtype = TREE_VALUE (parmlist);
7828 if (VOID_TYPE_P (parmtype))
7829 {
7830 if (more_const_call_expr_args_p (&iter))
7831 return END_BUILTINS;
7832 return DECL_FUNCTION_CODE (fndecl);
7833 }
7834
7835 if (! more_const_call_expr_args_p (&iter))
7836 return END_BUILTINS;
7837
7838 arg = next_const_call_expr_arg (&iter);
7839 argtype = TREE_TYPE (arg);
7840
7841 if (SCALAR_FLOAT_TYPE_P (parmtype))
7842 {
7843 if (! SCALAR_FLOAT_TYPE_P (argtype))
7844 return END_BUILTINS;
7845 }
7846 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7847 {
7848 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7849 return END_BUILTINS;
7850 }
7851 else if (POINTER_TYPE_P (parmtype))
7852 {
7853 if (! POINTER_TYPE_P (argtype))
7854 return END_BUILTINS;
7855 }
7856 else if (INTEGRAL_TYPE_P (parmtype))
7857 {
7858 if (! INTEGRAL_TYPE_P (argtype))
7859 return END_BUILTINS;
7860 }
7861 else
7862 return END_BUILTINS;
7863 }
7864
7865 /* Variable-length argument list. */
7866 return DECL_FUNCTION_CODE (fndecl);
7867 }
7868
7869 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7870 evaluate to a constant. */
7871
7872 static tree
7873 fold_builtin_constant_p (tree arg)
7874 {
7875 /* We return 1 for a numeric type that's known to be a constant
7876 value at compile-time or for an aggregate type that's a
7877 literal constant. */
7878 STRIP_NOPS (arg);
7879
7880 /* If we know this is a constant, emit the constant of one. */
7881 if (CONSTANT_CLASS_P (arg)
7882 || (TREE_CODE (arg) == CONSTRUCTOR
7883 && TREE_CONSTANT (arg)))
7884 return integer_one_node;
7885 if (TREE_CODE (arg) == ADDR_EXPR)
7886 {
7887 tree op = TREE_OPERAND (arg, 0);
7888 if (TREE_CODE (op) == STRING_CST
7889 || (TREE_CODE (op) == ARRAY_REF
7890 && integer_zerop (TREE_OPERAND (op, 1))
7891 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7892 return integer_one_node;
7893 }
7894
7895 /* If this expression has side effects, show we don't know it to be a
7896 constant. Likewise if it's a pointer or aggregate type since in
7897 those case we only want literals, since those are only optimized
7898 when generating RTL, not later.
7899 And finally, if we are compiling an initializer, not code, we
7900 need to return a definite result now; there's not going to be any
7901 more optimization done. */
7902 if (TREE_SIDE_EFFECTS (arg)
7903 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7904 || POINTER_TYPE_P (TREE_TYPE (arg))
7905 || cfun == 0
7906 || folding_initializer
7907 || force_folding_builtin_constant_p)
7908 return integer_zero_node;
7909
7910 return NULL_TREE;
7911 }
7912
7913 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7914 return it as a truthvalue. */
7915
7916 static tree
7917 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7918 tree predictor)
7919 {
7920 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7921
7922 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7923 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7924 ret_type = TREE_TYPE (TREE_TYPE (fn));
7925 pred_type = TREE_VALUE (arg_types);
7926 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7927
7928 pred = fold_convert_loc (loc, pred_type, pred);
7929 expected = fold_convert_loc (loc, expected_type, expected);
7930 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7931 predictor);
7932
7933 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7934 build_int_cst (ret_type, 0));
7935 }
7936
7937 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7938 NULL_TREE if no simplification is possible. */
7939
7940 tree
7941 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7942 {
7943 tree inner, fndecl, inner_arg0;
7944 enum tree_code code;
7945
7946 /* Distribute the expected value over short-circuiting operators.
7947 See through the cast from truthvalue_type_node to long. */
7948 inner_arg0 = arg0;
7949 while (CONVERT_EXPR_P (inner_arg0)
7950 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7951 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7952 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7953
7954 /* If this is a builtin_expect within a builtin_expect keep the
7955 inner one. See through a comparison against a constant. It
7956 might have been added to create a thruthvalue. */
7957 inner = inner_arg0;
7958
7959 if (COMPARISON_CLASS_P (inner)
7960 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7961 inner = TREE_OPERAND (inner, 0);
7962
7963 if (TREE_CODE (inner) == CALL_EXPR
7964 && (fndecl = get_callee_fndecl (inner))
7965 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7966 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7967 return arg0;
7968
7969 inner = inner_arg0;
7970 code = TREE_CODE (inner);
7971 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7972 {
7973 tree op0 = TREE_OPERAND (inner, 0);
7974 tree op1 = TREE_OPERAND (inner, 1);
7975
7976 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7977 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7978 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7979
7980 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7981 }
7982
7983 /* If the argument isn't invariant then there's nothing else we can do. */
7984 if (!TREE_CONSTANT (inner_arg0))
7985 return NULL_TREE;
7986
7987 /* If we expect that a comparison against the argument will fold to
7988 a constant return the constant. In practice, this means a true
7989 constant or the address of a non-weak symbol. */
7990 inner = inner_arg0;
7991 STRIP_NOPS (inner);
7992 if (TREE_CODE (inner) == ADDR_EXPR)
7993 {
7994 do
7995 {
7996 inner = TREE_OPERAND (inner, 0);
7997 }
7998 while (TREE_CODE (inner) == COMPONENT_REF
7999 || TREE_CODE (inner) == ARRAY_REF);
8000 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8001 return NULL_TREE;
8002 }
8003
8004 /* Otherwise, ARG0 already has the proper type for the return value. */
8005 return arg0;
8006 }
8007
8008 /* Fold a call to __builtin_classify_type with argument ARG. */
8009
8010 static tree
8011 fold_builtin_classify_type (tree arg)
8012 {
8013 if (arg == 0)
8014 return build_int_cst (integer_type_node, no_type_class);
8015
8016 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8017 }
8018
8019 /* Fold a call to __builtin_strlen with argument ARG. */
8020
8021 static tree
8022 fold_builtin_strlen (location_t loc, tree type, tree arg)
8023 {
8024 if (!validate_arg (arg, POINTER_TYPE))
8025 return NULL_TREE;
8026 else
8027 {
8028 tree len = c_strlen (arg, 0);
8029
8030 if (len)
8031 return fold_convert_loc (loc, type, len);
8032
8033 return NULL_TREE;
8034 }
8035 }
8036
8037 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8038
8039 static tree
8040 fold_builtin_inf (location_t loc, tree type, int warn)
8041 {
8042 REAL_VALUE_TYPE real;
8043
8044 /* __builtin_inff is intended to be usable to define INFINITY on all
8045 targets. If an infinity is not available, INFINITY expands "to a
8046 positive constant of type float that overflows at translation
8047 time", footnote "In this case, using INFINITY will violate the
8048 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8049 Thus we pedwarn to ensure this constraint violation is
8050 diagnosed. */
8051 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8052 pedwarn (loc, 0, "target format does not support infinity");
8053
8054 real_inf (&real);
8055 return build_real (type, real);
8056 }
8057
8058 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8059 NULL_TREE if no simplification can be made. */
8060
8061 static tree
8062 fold_builtin_sincos (location_t loc,
8063 tree arg0, tree arg1, tree arg2)
8064 {
8065 tree type;
8066 tree fndecl, call = NULL_TREE;
8067
8068 if (!validate_arg (arg0, REAL_TYPE)
8069 || !validate_arg (arg1, POINTER_TYPE)
8070 || !validate_arg (arg2, POINTER_TYPE))
8071 return NULL_TREE;
8072
8073 type = TREE_TYPE (arg0);
8074
8075 /* Calculate the result when the argument is a constant. */
8076 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8077 if (fn == END_BUILTINS)
8078 return NULL_TREE;
8079
8080 /* Canonicalize sincos to cexpi. */
8081 if (TREE_CODE (arg0) == REAL_CST)
8082 {
8083 tree complex_type = build_complex_type (type);
8084 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8085 }
8086 if (!call)
8087 {
8088 if (!targetm.libc_has_function (function_c99_math_complex)
8089 || !builtin_decl_implicit_p (fn))
8090 return NULL_TREE;
8091 fndecl = builtin_decl_explicit (fn);
8092 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8093 call = builtin_save_expr (call);
8094 }
8095
8096 return build2 (COMPOUND_EXPR, void_type_node,
8097 build2 (MODIFY_EXPR, void_type_node,
8098 build_fold_indirect_ref_loc (loc, arg1),
8099 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8100 build2 (MODIFY_EXPR, void_type_node,
8101 build_fold_indirect_ref_loc (loc, arg2),
8102 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8103 }
8104
8105 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8106 Return NULL_TREE if no simplification can be made. */
8107
8108 static tree
8109 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8110 {
8111 if (!validate_arg (arg1, POINTER_TYPE)
8112 || !validate_arg (arg2, POINTER_TYPE)
8113 || !validate_arg (len, INTEGER_TYPE))
8114 return NULL_TREE;
8115
8116 /* If the LEN parameter is zero, return zero. */
8117 if (integer_zerop (len))
8118 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8119 arg1, arg2);
8120
8121 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8122 if (operand_equal_p (arg1, arg2, 0))
8123 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8124
8125 /* If len parameter is one, return an expression corresponding to
8126 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8127 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8128 {
8129 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8130 tree cst_uchar_ptr_node
8131 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8132
8133 tree ind1
8134 = fold_convert_loc (loc, integer_type_node,
8135 build1 (INDIRECT_REF, cst_uchar_node,
8136 fold_convert_loc (loc,
8137 cst_uchar_ptr_node,
8138 arg1)));
8139 tree ind2
8140 = fold_convert_loc (loc, integer_type_node,
8141 build1 (INDIRECT_REF, cst_uchar_node,
8142 fold_convert_loc (loc,
8143 cst_uchar_ptr_node,
8144 arg2)));
8145 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8146 }
8147
8148 return NULL_TREE;
8149 }
8150
8151 /* Fold a call to builtin isascii with argument ARG. */
8152
8153 static tree
8154 fold_builtin_isascii (location_t loc, tree arg)
8155 {
8156 if (!validate_arg (arg, INTEGER_TYPE))
8157 return NULL_TREE;
8158 else
8159 {
8160 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8161 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8162 build_int_cst (integer_type_node,
8163 ~ (unsigned HOST_WIDE_INT) 0x7f));
8164 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8165 arg, integer_zero_node);
8166 }
8167 }
8168
8169 /* Fold a call to builtin toascii with argument ARG. */
8170
8171 static tree
8172 fold_builtin_toascii (location_t loc, tree arg)
8173 {
8174 if (!validate_arg (arg, INTEGER_TYPE))
8175 return NULL_TREE;
8176
8177 /* Transform toascii(c) -> (c & 0x7f). */
8178 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8179 build_int_cst (integer_type_node, 0x7f));
8180 }
8181
8182 /* Fold a call to builtin isdigit with argument ARG. */
8183
8184 static tree
8185 fold_builtin_isdigit (location_t loc, tree arg)
8186 {
8187 if (!validate_arg (arg, INTEGER_TYPE))
8188 return NULL_TREE;
8189 else
8190 {
8191 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8192 /* According to the C standard, isdigit is unaffected by locale.
8193 However, it definitely is affected by the target character set. */
8194 unsigned HOST_WIDE_INT target_digit0
8195 = lang_hooks.to_target_charset ('0');
8196
8197 if (target_digit0 == 0)
8198 return NULL_TREE;
8199
8200 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8201 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8202 build_int_cst (unsigned_type_node, target_digit0));
8203 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8204 build_int_cst (unsigned_type_node, 9));
8205 }
8206 }
8207
8208 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8209
8210 static tree
8211 fold_builtin_fabs (location_t loc, tree arg, tree type)
8212 {
8213 if (!validate_arg (arg, REAL_TYPE))
8214 return NULL_TREE;
8215
8216 arg = fold_convert_loc (loc, type, arg);
8217 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8218 }
8219
8220 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8221
8222 static tree
8223 fold_builtin_abs (location_t loc, tree arg, tree type)
8224 {
8225 if (!validate_arg (arg, INTEGER_TYPE))
8226 return NULL_TREE;
8227
8228 arg = fold_convert_loc (loc, type, arg);
8229 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8230 }
8231
8232 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8233
8234 static tree
8235 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8236 {
8237 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8238 if (validate_arg (arg0, REAL_TYPE)
8239 && validate_arg (arg1, REAL_TYPE)
8240 && validate_arg (arg2, REAL_TYPE)
8241 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8242 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8243
8244 return NULL_TREE;
8245 }
8246
8247 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8248
8249 static tree
8250 fold_builtin_carg (location_t loc, tree arg, tree type)
8251 {
8252 if (validate_arg (arg, COMPLEX_TYPE)
8253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8254 {
8255 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8256
8257 if (atan2_fn)
8258 {
8259 tree new_arg = builtin_save_expr (arg);
8260 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8261 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8262 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8263 }
8264 }
8265
8266 return NULL_TREE;
8267 }
8268
8269 /* Fold a call to builtin frexp, we can assume the base is 2. */
8270
8271 static tree
8272 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8273 {
8274 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8275 return NULL_TREE;
8276
8277 STRIP_NOPS (arg0);
8278
8279 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8280 return NULL_TREE;
8281
8282 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8283
8284 /* Proceed if a valid pointer type was passed in. */
8285 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8286 {
8287 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8288 tree frac, exp;
8289
8290 switch (value->cl)
8291 {
8292 case rvc_zero:
8293 /* For +-0, return (*exp = 0, +-0). */
8294 exp = integer_zero_node;
8295 frac = arg0;
8296 break;
8297 case rvc_nan:
8298 case rvc_inf:
8299 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8300 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8301 case rvc_normal:
8302 {
8303 /* Since the frexp function always expects base 2, and in
8304 GCC normalized significands are already in the range
8305 [0.5, 1.0), we have exactly what frexp wants. */
8306 REAL_VALUE_TYPE frac_rvt = *value;
8307 SET_REAL_EXP (&frac_rvt, 0);
8308 frac = build_real (rettype, frac_rvt);
8309 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8310 }
8311 break;
8312 default:
8313 gcc_unreachable ();
8314 }
8315
8316 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8317 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8318 TREE_SIDE_EFFECTS (arg1) = 1;
8319 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8320 }
8321
8322 return NULL_TREE;
8323 }
8324
8325 /* Fold a call to builtin modf. */
8326
8327 static tree
8328 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8329 {
8330 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8331 return NULL_TREE;
8332
8333 STRIP_NOPS (arg0);
8334
8335 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8336 return NULL_TREE;
8337
8338 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8339
8340 /* Proceed if a valid pointer type was passed in. */
8341 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8342 {
8343 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8344 REAL_VALUE_TYPE trunc, frac;
8345
8346 switch (value->cl)
8347 {
8348 case rvc_nan:
8349 case rvc_zero:
8350 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8351 trunc = frac = *value;
8352 break;
8353 case rvc_inf:
8354 /* For +-Inf, return (*arg1 = arg0, +-0). */
8355 frac = dconst0;
8356 frac.sign = value->sign;
8357 trunc = *value;
8358 break;
8359 case rvc_normal:
8360 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8361 real_trunc (&trunc, VOIDmode, value);
8362 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8363 /* If the original number was negative and already
8364 integral, then the fractional part is -0.0. */
8365 if (value->sign && frac.cl == rvc_zero)
8366 frac.sign = value->sign;
8367 break;
8368 }
8369
8370 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8371 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8372 build_real (rettype, trunc));
8373 TREE_SIDE_EFFECTS (arg1) = 1;
8374 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8375 build_real (rettype, frac));
8376 }
8377
8378 return NULL_TREE;
8379 }
8380
8381 /* Given a location LOC, an interclass builtin function decl FNDECL
8382 and its single argument ARG, return an folded expression computing
8383 the same, or NULL_TREE if we either couldn't or didn't want to fold
8384 (the latter happen if there's an RTL instruction available). */
8385
8386 static tree
8387 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8388 {
8389 machine_mode mode;
8390
8391 if (!validate_arg (arg, REAL_TYPE))
8392 return NULL_TREE;
8393
8394 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8395 return NULL_TREE;
8396
8397 mode = TYPE_MODE (TREE_TYPE (arg));
8398
8399 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8400
8401 /* If there is no optab, try generic code. */
8402 switch (DECL_FUNCTION_CODE (fndecl))
8403 {
8404 tree result;
8405
8406 CASE_FLT_FN (BUILT_IN_ISINF):
8407 {
8408 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8409 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8410 tree type = TREE_TYPE (arg);
8411 REAL_VALUE_TYPE r;
8412 char buf[128];
8413
8414 if (is_ibm_extended)
8415 {
8416 /* NaN and Inf are encoded in the high-order double value
8417 only. The low-order value is not significant. */
8418 type = double_type_node;
8419 mode = DFmode;
8420 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8421 }
8422 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8423 real_from_string (&r, buf);
8424 result = build_call_expr (isgr_fn, 2,
8425 fold_build1_loc (loc, ABS_EXPR, type, arg),
8426 build_real (type, r));
8427 return result;
8428 }
8429 CASE_FLT_FN (BUILT_IN_FINITE):
8430 case BUILT_IN_ISFINITE:
8431 {
8432 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8433 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8434 tree type = TREE_TYPE (arg);
8435 REAL_VALUE_TYPE r;
8436 char buf[128];
8437
8438 if (is_ibm_extended)
8439 {
8440 /* NaN and Inf are encoded in the high-order double value
8441 only. The low-order value is not significant. */
8442 type = double_type_node;
8443 mode = DFmode;
8444 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8445 }
8446 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8447 real_from_string (&r, buf);
8448 result = build_call_expr (isle_fn, 2,
8449 fold_build1_loc (loc, ABS_EXPR, type, arg),
8450 build_real (type, r));
8451 /*result = fold_build2_loc (loc, UNGT_EXPR,
8452 TREE_TYPE (TREE_TYPE (fndecl)),
8453 fold_build1_loc (loc, ABS_EXPR, type, arg),
8454 build_real (type, r));
8455 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8456 TREE_TYPE (TREE_TYPE (fndecl)),
8457 result);*/
8458 return result;
8459 }
8460 case BUILT_IN_ISNORMAL:
8461 {
8462 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8463 islessequal(fabs(x),DBL_MAX). */
8464 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8465 tree type = TREE_TYPE (arg);
8466 tree orig_arg, max_exp, min_exp;
8467 machine_mode orig_mode = mode;
8468 REAL_VALUE_TYPE rmax, rmin;
8469 char buf[128];
8470
8471 orig_arg = arg = builtin_save_expr (arg);
8472 if (is_ibm_extended)
8473 {
8474 /* Use double to test the normal range of IBM extended
8475 precision. Emin for IBM extended precision is
8476 different to emin for IEEE double, being 53 higher
8477 since the low double exponent is at least 53 lower
8478 than the high double exponent. */
8479 type = double_type_node;
8480 mode = DFmode;
8481 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8482 }
8483 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8484
8485 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8486 real_from_string (&rmax, buf);
8487 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8488 real_from_string (&rmin, buf);
8489 max_exp = build_real (type, rmax);
8490 min_exp = build_real (type, rmin);
8491
8492 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8493 if (is_ibm_extended)
8494 {
8495 /* Testing the high end of the range is done just using
8496 the high double, using the same test as isfinite().
8497 For the subnormal end of the range we first test the
8498 high double, then if its magnitude is equal to the
8499 limit of 0x1p-969, we test whether the low double is
8500 non-zero and opposite sign to the high double. */
8501 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8502 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8503 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8504 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8505 arg, min_exp);
8506 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8507 complex_double_type_node, orig_arg);
8508 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8509 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8510 tree zero = build_real (type, dconst0);
8511 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8512 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8513 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8514 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8515 fold_build3 (COND_EXPR,
8516 integer_type_node,
8517 hilt, logt, lolt));
8518 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8519 eq_min, ok_lo);
8520 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8521 gt_min, eq_min);
8522 }
8523 else
8524 {
8525 tree const isge_fn
8526 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8527 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8528 }
8529 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8530 max_exp, min_exp);
8531 return result;
8532 }
8533 default:
8534 break;
8535 }
8536
8537 return NULL_TREE;
8538 }
8539
8540 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8541 ARG is the argument for the call. */
8542
8543 static tree
8544 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8545 {
8546 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8547
8548 if (!validate_arg (arg, REAL_TYPE))
8549 return NULL_TREE;
8550
8551 switch (builtin_index)
8552 {
8553 case BUILT_IN_ISINF:
8554 if (!HONOR_INFINITIES (arg))
8555 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8556
8557 return NULL_TREE;
8558
8559 case BUILT_IN_ISINF_SIGN:
8560 {
8561 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8562 /* In a boolean context, GCC will fold the inner COND_EXPR to
8563 1. So e.g. "if (isinf_sign(x))" would be folded to just
8564 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8565 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8566 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8567 tree tmp = NULL_TREE;
8568
8569 arg = builtin_save_expr (arg);
8570
8571 if (signbit_fn && isinf_fn)
8572 {
8573 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8574 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8575
8576 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8577 signbit_call, integer_zero_node);
8578 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8579 isinf_call, integer_zero_node);
8580
8581 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8582 integer_minus_one_node, integer_one_node);
8583 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8584 isinf_call, tmp,
8585 integer_zero_node);
8586 }
8587
8588 return tmp;
8589 }
8590
8591 case BUILT_IN_ISFINITE:
8592 if (!HONOR_NANS (arg)
8593 && !HONOR_INFINITIES (arg))
8594 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8595
8596 return NULL_TREE;
8597
8598 case BUILT_IN_ISNAN:
8599 if (!HONOR_NANS (arg))
8600 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8601
8602 {
8603 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8604 if (is_ibm_extended)
8605 {
8606 /* NaN and Inf are encoded in the high-order double value
8607 only. The low-order value is not significant. */
8608 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8609 }
8610 }
8611 arg = builtin_save_expr (arg);
8612 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8613
8614 default:
8615 gcc_unreachable ();
8616 }
8617 }
8618
8619 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8620 This builtin will generate code to return the appropriate floating
8621 point classification depending on the value of the floating point
8622 number passed in. The possible return values must be supplied as
8623 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8624 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8625 one floating point argument which is "type generic". */
8626
8627 static tree
8628 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8629 {
8630 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8631 arg, type, res, tmp;
8632 machine_mode mode;
8633 REAL_VALUE_TYPE r;
8634 char buf[128];
8635
8636 /* Verify the required arguments in the original call. */
8637 if (nargs != 6
8638 || !validate_arg (args[0], INTEGER_TYPE)
8639 || !validate_arg (args[1], INTEGER_TYPE)
8640 || !validate_arg (args[2], INTEGER_TYPE)
8641 || !validate_arg (args[3], INTEGER_TYPE)
8642 || !validate_arg (args[4], INTEGER_TYPE)
8643 || !validate_arg (args[5], REAL_TYPE))
8644 return NULL_TREE;
8645
8646 fp_nan = args[0];
8647 fp_infinite = args[1];
8648 fp_normal = args[2];
8649 fp_subnormal = args[3];
8650 fp_zero = args[4];
8651 arg = args[5];
8652 type = TREE_TYPE (arg);
8653 mode = TYPE_MODE (type);
8654 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8655
8656 /* fpclassify(x) ->
8657 isnan(x) ? FP_NAN :
8658 (fabs(x) == Inf ? FP_INFINITE :
8659 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8660 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8661
8662 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8663 build_real (type, dconst0));
8664 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8665 tmp, fp_zero, fp_subnormal);
8666
8667 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8668 real_from_string (&r, buf);
8669 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8670 arg, build_real (type, r));
8671 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8672
8673 if (HONOR_INFINITIES (mode))
8674 {
8675 real_inf (&r);
8676 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8677 build_real (type, r));
8678 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8679 fp_infinite, res);
8680 }
8681
8682 if (HONOR_NANS (mode))
8683 {
8684 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8685 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8686 }
8687
8688 return res;
8689 }
8690
8691 /* Fold a call to an unordered comparison function such as
8692 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8693 being called and ARG0 and ARG1 are the arguments for the call.
8694 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8695 the opposite of the desired result. UNORDERED_CODE is used
8696 for modes that can hold NaNs and ORDERED_CODE is used for
8697 the rest. */
8698
8699 static tree
8700 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8701 enum tree_code unordered_code,
8702 enum tree_code ordered_code)
8703 {
8704 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8705 enum tree_code code;
8706 tree type0, type1;
8707 enum tree_code code0, code1;
8708 tree cmp_type = NULL_TREE;
8709
8710 type0 = TREE_TYPE (arg0);
8711 type1 = TREE_TYPE (arg1);
8712
8713 code0 = TREE_CODE (type0);
8714 code1 = TREE_CODE (type1);
8715
8716 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8717 /* Choose the wider of two real types. */
8718 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8719 ? type0 : type1;
8720 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8721 cmp_type = type0;
8722 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8723 cmp_type = type1;
8724
8725 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8726 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8727
8728 if (unordered_code == UNORDERED_EXPR)
8729 {
8730 if (!HONOR_NANS (arg0))
8731 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8732 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8733 }
8734
8735 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8736 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8737 fold_build2_loc (loc, code, type, arg0, arg1));
8738 }
8739
8740 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8741 arithmetics if it can never overflow, or into internal functions that
8742 return both result of arithmetics and overflowed boolean flag in
8743 a complex integer result, or some other check for overflow.
8744 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8745 checking part of that. */
8746
8747 static tree
8748 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8749 tree arg0, tree arg1, tree arg2)
8750 {
8751 enum internal_fn ifn = IFN_LAST;
8752 /* The code of the expression corresponding to the type-generic
8753 built-in, or ERROR_MARK for the type-specific ones. */
8754 enum tree_code opcode = ERROR_MARK;
8755 bool ovf_only = false;
8756
8757 switch (fcode)
8758 {
8759 case BUILT_IN_ADD_OVERFLOW_P:
8760 ovf_only = true;
8761 /* FALLTHRU */
8762 case BUILT_IN_ADD_OVERFLOW:
8763 opcode = PLUS_EXPR;
8764 /* FALLTHRU */
8765 case BUILT_IN_SADD_OVERFLOW:
8766 case BUILT_IN_SADDL_OVERFLOW:
8767 case BUILT_IN_SADDLL_OVERFLOW:
8768 case BUILT_IN_UADD_OVERFLOW:
8769 case BUILT_IN_UADDL_OVERFLOW:
8770 case BUILT_IN_UADDLL_OVERFLOW:
8771 ifn = IFN_ADD_OVERFLOW;
8772 break;
8773 case BUILT_IN_SUB_OVERFLOW_P:
8774 ovf_only = true;
8775 /* FALLTHRU */
8776 case BUILT_IN_SUB_OVERFLOW:
8777 opcode = MINUS_EXPR;
8778 /* FALLTHRU */
8779 case BUILT_IN_SSUB_OVERFLOW:
8780 case BUILT_IN_SSUBL_OVERFLOW:
8781 case BUILT_IN_SSUBLL_OVERFLOW:
8782 case BUILT_IN_USUB_OVERFLOW:
8783 case BUILT_IN_USUBL_OVERFLOW:
8784 case BUILT_IN_USUBLL_OVERFLOW:
8785 ifn = IFN_SUB_OVERFLOW;
8786 break;
8787 case BUILT_IN_MUL_OVERFLOW_P:
8788 ovf_only = true;
8789 /* FALLTHRU */
8790 case BUILT_IN_MUL_OVERFLOW:
8791 opcode = MULT_EXPR;
8792 /* FALLTHRU */
8793 case BUILT_IN_SMUL_OVERFLOW:
8794 case BUILT_IN_SMULL_OVERFLOW:
8795 case BUILT_IN_SMULLL_OVERFLOW:
8796 case BUILT_IN_UMUL_OVERFLOW:
8797 case BUILT_IN_UMULL_OVERFLOW:
8798 case BUILT_IN_UMULLL_OVERFLOW:
8799 ifn = IFN_MUL_OVERFLOW;
8800 break;
8801 default:
8802 gcc_unreachable ();
8803 }
8804
8805 /* For the "generic" overloads, the first two arguments can have different
8806 types and the last argument determines the target type to use to check
8807 for overflow. The arguments of the other overloads all have the same
8808 type. */
8809 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8810
8811 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8812 arguments are constant, attempt to fold the built-in call into a constant
8813 expression indicating whether or not it detected an overflow. */
8814 if (ovf_only
8815 && TREE_CODE (arg0) == INTEGER_CST
8816 && TREE_CODE (arg1) == INTEGER_CST)
8817 /* Perform the computation in the target type and check for overflow. */
8818 return omit_one_operand_loc (loc, boolean_type_node,
8819 arith_overflowed_p (opcode, type, arg0, arg1)
8820 ? boolean_true_node : boolean_false_node,
8821 arg2);
8822
8823 tree ctype = build_complex_type (type);
8824 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8825 2, arg0, arg1);
8826 tree tgt = save_expr (call);
8827 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8828 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8829 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8830
8831 if (ovf_only)
8832 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8833
8834 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8835 tree store
8836 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8837 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8838 }
8839
8840 /* Fold a call to __builtin_FILE to a constant string. */
8841
8842 static inline tree
8843 fold_builtin_FILE (location_t loc)
8844 {
8845 if (const char *fname = LOCATION_FILE (loc))
8846 return build_string_literal (strlen (fname) + 1, fname);
8847
8848 return build_string_literal (1, "");
8849 }
8850
8851 /* Fold a call to __builtin_FUNCTION to a constant string. */
8852
8853 static inline tree
8854 fold_builtin_FUNCTION ()
8855 {
8856 const char *name = "";
8857
8858 if (current_function_decl)
8859 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8860
8861 return build_string_literal (strlen (name) + 1, name);
8862 }
8863
8864 /* Fold a call to __builtin_LINE to an integer constant. */
8865
8866 static inline tree
8867 fold_builtin_LINE (location_t loc, tree type)
8868 {
8869 return build_int_cst (type, LOCATION_LINE (loc));
8870 }
8871
8872 /* Fold a call to built-in function FNDECL with 0 arguments.
8873 This function returns NULL_TREE if no simplification was possible. */
8874
8875 static tree
8876 fold_builtin_0 (location_t loc, tree fndecl)
8877 {
8878 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8879 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8880 switch (fcode)
8881 {
8882 case BUILT_IN_FILE:
8883 return fold_builtin_FILE (loc);
8884
8885 case BUILT_IN_FUNCTION:
8886 return fold_builtin_FUNCTION ();
8887
8888 case BUILT_IN_LINE:
8889 return fold_builtin_LINE (loc, type);
8890
8891 CASE_FLT_FN (BUILT_IN_INF):
8892 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8893 case BUILT_IN_INFD32:
8894 case BUILT_IN_INFD64:
8895 case BUILT_IN_INFD128:
8896 return fold_builtin_inf (loc, type, true);
8897
8898 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8899 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8900 return fold_builtin_inf (loc, type, false);
8901
8902 case BUILT_IN_CLASSIFY_TYPE:
8903 return fold_builtin_classify_type (NULL_TREE);
8904
8905 default:
8906 break;
8907 }
8908 return NULL_TREE;
8909 }
8910
8911 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8912 This function returns NULL_TREE if no simplification was possible. */
8913
8914 static tree
8915 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8916 {
8917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8918 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8919
8920 if (TREE_CODE (arg0) == ERROR_MARK)
8921 return NULL_TREE;
8922
8923 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8924 return ret;
8925
8926 switch (fcode)
8927 {
8928 case BUILT_IN_CONSTANT_P:
8929 {
8930 tree val = fold_builtin_constant_p (arg0);
8931
8932 /* Gimplification will pull the CALL_EXPR for the builtin out of
8933 an if condition. When not optimizing, we'll not CSE it back.
8934 To avoid link error types of regressions, return false now. */
8935 if (!val && !optimize)
8936 val = integer_zero_node;
8937
8938 return val;
8939 }
8940
8941 case BUILT_IN_CLASSIFY_TYPE:
8942 return fold_builtin_classify_type (arg0);
8943
8944 case BUILT_IN_STRLEN:
8945 return fold_builtin_strlen (loc, type, arg0);
8946
8947 CASE_FLT_FN (BUILT_IN_FABS):
8948 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8949 case BUILT_IN_FABSD32:
8950 case BUILT_IN_FABSD64:
8951 case BUILT_IN_FABSD128:
8952 return fold_builtin_fabs (loc, arg0, type);
8953
8954 case BUILT_IN_ABS:
8955 case BUILT_IN_LABS:
8956 case BUILT_IN_LLABS:
8957 case BUILT_IN_IMAXABS:
8958 return fold_builtin_abs (loc, arg0, type);
8959
8960 CASE_FLT_FN (BUILT_IN_CONJ):
8961 if (validate_arg (arg0, COMPLEX_TYPE)
8962 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8963 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8964 break;
8965
8966 CASE_FLT_FN (BUILT_IN_CREAL):
8967 if (validate_arg (arg0, COMPLEX_TYPE)
8968 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8969 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8970 break;
8971
8972 CASE_FLT_FN (BUILT_IN_CIMAG):
8973 if (validate_arg (arg0, COMPLEX_TYPE)
8974 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8975 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8976 break;
8977
8978 CASE_FLT_FN (BUILT_IN_CARG):
8979 return fold_builtin_carg (loc, arg0, type);
8980
8981 case BUILT_IN_ISASCII:
8982 return fold_builtin_isascii (loc, arg0);
8983
8984 case BUILT_IN_TOASCII:
8985 return fold_builtin_toascii (loc, arg0);
8986
8987 case BUILT_IN_ISDIGIT:
8988 return fold_builtin_isdigit (loc, arg0);
8989
8990 CASE_FLT_FN (BUILT_IN_FINITE):
8991 case BUILT_IN_FINITED32:
8992 case BUILT_IN_FINITED64:
8993 case BUILT_IN_FINITED128:
8994 case BUILT_IN_ISFINITE:
8995 {
8996 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8997 if (ret)
8998 return ret;
8999 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9000 }
9001
9002 CASE_FLT_FN (BUILT_IN_ISINF):
9003 case BUILT_IN_ISINFD32:
9004 case BUILT_IN_ISINFD64:
9005 case BUILT_IN_ISINFD128:
9006 {
9007 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9008 if (ret)
9009 return ret;
9010 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9011 }
9012
9013 case BUILT_IN_ISNORMAL:
9014 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9015
9016 case BUILT_IN_ISINF_SIGN:
9017 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9018
9019 CASE_FLT_FN (BUILT_IN_ISNAN):
9020 case BUILT_IN_ISNAND32:
9021 case BUILT_IN_ISNAND64:
9022 case BUILT_IN_ISNAND128:
9023 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9024
9025 case BUILT_IN_FREE:
9026 if (integer_zerop (arg0))
9027 return build_empty_stmt (loc);
9028 break;
9029
9030 default:
9031 break;
9032 }
9033
9034 return NULL_TREE;
9035
9036 }
9037
9038 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9039 This function returns NULL_TREE if no simplification was possible. */
9040
9041 static tree
9042 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9043 {
9044 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9045 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9046
9047 if (TREE_CODE (arg0) == ERROR_MARK
9048 || TREE_CODE (arg1) == ERROR_MARK)
9049 return NULL_TREE;
9050
9051 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9052 return ret;
9053
9054 switch (fcode)
9055 {
9056 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9057 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9058 if (validate_arg (arg0, REAL_TYPE)
9059 && validate_arg (arg1, POINTER_TYPE))
9060 return do_mpfr_lgamma_r (arg0, arg1, type);
9061 break;
9062
9063 CASE_FLT_FN (BUILT_IN_FREXP):
9064 return fold_builtin_frexp (loc, arg0, arg1, type);
9065
9066 CASE_FLT_FN (BUILT_IN_MODF):
9067 return fold_builtin_modf (loc, arg0, arg1, type);
9068
9069 case BUILT_IN_STRSPN:
9070 return fold_builtin_strspn (loc, arg0, arg1);
9071
9072 case BUILT_IN_STRCSPN:
9073 return fold_builtin_strcspn (loc, arg0, arg1);
9074
9075 case BUILT_IN_STRPBRK:
9076 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9077
9078 case BUILT_IN_EXPECT:
9079 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9080
9081 case BUILT_IN_ISGREATER:
9082 return fold_builtin_unordered_cmp (loc, fndecl,
9083 arg0, arg1, UNLE_EXPR, LE_EXPR);
9084 case BUILT_IN_ISGREATEREQUAL:
9085 return fold_builtin_unordered_cmp (loc, fndecl,
9086 arg0, arg1, UNLT_EXPR, LT_EXPR);
9087 case BUILT_IN_ISLESS:
9088 return fold_builtin_unordered_cmp (loc, fndecl,
9089 arg0, arg1, UNGE_EXPR, GE_EXPR);
9090 case BUILT_IN_ISLESSEQUAL:
9091 return fold_builtin_unordered_cmp (loc, fndecl,
9092 arg0, arg1, UNGT_EXPR, GT_EXPR);
9093 case BUILT_IN_ISLESSGREATER:
9094 return fold_builtin_unordered_cmp (loc, fndecl,
9095 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9096 case BUILT_IN_ISUNORDERED:
9097 return fold_builtin_unordered_cmp (loc, fndecl,
9098 arg0, arg1, UNORDERED_EXPR,
9099 NOP_EXPR);
9100
9101 /* We do the folding for va_start in the expander. */
9102 case BUILT_IN_VA_START:
9103 break;
9104
9105 case BUILT_IN_OBJECT_SIZE:
9106 return fold_builtin_object_size (arg0, arg1);
9107
9108 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9109 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9110
9111 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9112 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9113
9114 default:
9115 break;
9116 }
9117 return NULL_TREE;
9118 }
9119
9120 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9121 and ARG2.
9122 This function returns NULL_TREE if no simplification was possible. */
9123
9124 static tree
9125 fold_builtin_3 (location_t loc, tree fndecl,
9126 tree arg0, tree arg1, tree arg2)
9127 {
9128 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9129 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9130
9131 if (TREE_CODE (arg0) == ERROR_MARK
9132 || TREE_CODE (arg1) == ERROR_MARK
9133 || TREE_CODE (arg2) == ERROR_MARK)
9134 return NULL_TREE;
9135
9136 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9137 arg0, arg1, arg2))
9138 return ret;
9139
9140 switch (fcode)
9141 {
9142
9143 CASE_FLT_FN (BUILT_IN_SINCOS):
9144 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9145
9146 CASE_FLT_FN (BUILT_IN_FMA):
9147 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9148 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9149
9150 CASE_FLT_FN (BUILT_IN_REMQUO):
9151 if (validate_arg (arg0, REAL_TYPE)
9152 && validate_arg (arg1, REAL_TYPE)
9153 && validate_arg (arg2, POINTER_TYPE))
9154 return do_mpfr_remquo (arg0, arg1, arg2);
9155 break;
9156
9157 case BUILT_IN_MEMCMP:
9158 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9159
9160 case BUILT_IN_EXPECT:
9161 return fold_builtin_expect (loc, arg0, arg1, arg2);
9162
9163 case BUILT_IN_ADD_OVERFLOW:
9164 case BUILT_IN_SUB_OVERFLOW:
9165 case BUILT_IN_MUL_OVERFLOW:
9166 case BUILT_IN_ADD_OVERFLOW_P:
9167 case BUILT_IN_SUB_OVERFLOW_P:
9168 case BUILT_IN_MUL_OVERFLOW_P:
9169 case BUILT_IN_SADD_OVERFLOW:
9170 case BUILT_IN_SADDL_OVERFLOW:
9171 case BUILT_IN_SADDLL_OVERFLOW:
9172 case BUILT_IN_SSUB_OVERFLOW:
9173 case BUILT_IN_SSUBL_OVERFLOW:
9174 case BUILT_IN_SSUBLL_OVERFLOW:
9175 case BUILT_IN_SMUL_OVERFLOW:
9176 case BUILT_IN_SMULL_OVERFLOW:
9177 case BUILT_IN_SMULLL_OVERFLOW:
9178 case BUILT_IN_UADD_OVERFLOW:
9179 case BUILT_IN_UADDL_OVERFLOW:
9180 case BUILT_IN_UADDLL_OVERFLOW:
9181 case BUILT_IN_USUB_OVERFLOW:
9182 case BUILT_IN_USUBL_OVERFLOW:
9183 case BUILT_IN_USUBLL_OVERFLOW:
9184 case BUILT_IN_UMUL_OVERFLOW:
9185 case BUILT_IN_UMULL_OVERFLOW:
9186 case BUILT_IN_UMULLL_OVERFLOW:
9187 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9188
9189 default:
9190 break;
9191 }
9192 return NULL_TREE;
9193 }
9194
9195 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9196 arguments. IGNORE is true if the result of the
9197 function call is ignored. This function returns NULL_TREE if no
9198 simplification was possible. */
9199
9200 tree
9201 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9202 {
9203 tree ret = NULL_TREE;
9204
9205 switch (nargs)
9206 {
9207 case 0:
9208 ret = fold_builtin_0 (loc, fndecl);
9209 break;
9210 case 1:
9211 ret = fold_builtin_1 (loc, fndecl, args[0]);
9212 break;
9213 case 2:
9214 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9215 break;
9216 case 3:
9217 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9218 break;
9219 default:
9220 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9221 break;
9222 }
9223 if (ret)
9224 {
9225 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9226 SET_EXPR_LOCATION (ret, loc);
9227 TREE_NO_WARNING (ret) = 1;
9228 return ret;
9229 }
9230 return NULL_TREE;
9231 }
9232
9233 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9234 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9235 of arguments in ARGS to be omitted. OLDNARGS is the number of
9236 elements in ARGS. */
9237
9238 static tree
9239 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9240 int skip, tree fndecl, int n, va_list newargs)
9241 {
9242 int nargs = oldnargs - skip + n;
9243 tree *buffer;
9244
9245 if (n > 0)
9246 {
9247 int i, j;
9248
9249 buffer = XALLOCAVEC (tree, nargs);
9250 for (i = 0; i < n; i++)
9251 buffer[i] = va_arg (newargs, tree);
9252 for (j = skip; j < oldnargs; j++, i++)
9253 buffer[i] = args[j];
9254 }
9255 else
9256 buffer = args + skip;
9257
9258 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9259 }
9260
9261 /* Return true if FNDECL shouldn't be folded right now.
9262 If a built-in function has an inline attribute always_inline
9263 wrapper, defer folding it after always_inline functions have
9264 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9265 might not be performed. */
9266
9267 bool
9268 avoid_folding_inline_builtin (tree fndecl)
9269 {
9270 return (DECL_DECLARED_INLINE_P (fndecl)
9271 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9272 && cfun
9273 && !cfun->always_inline_functions_inlined
9274 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9275 }
9276
9277 /* A wrapper function for builtin folding that prevents warnings for
9278 "statement without effect" and the like, caused by removing the
9279 call node earlier than the warning is generated. */
9280
9281 tree
9282 fold_call_expr (location_t loc, tree exp, bool ignore)
9283 {
9284 tree ret = NULL_TREE;
9285 tree fndecl = get_callee_fndecl (exp);
9286 if (fndecl
9287 && TREE_CODE (fndecl) == FUNCTION_DECL
9288 && DECL_BUILT_IN (fndecl)
9289 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9290 yet. Defer folding until we see all the arguments
9291 (after inlining). */
9292 && !CALL_EXPR_VA_ARG_PACK (exp))
9293 {
9294 int nargs = call_expr_nargs (exp);
9295
9296 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9297 instead last argument is __builtin_va_arg_pack (). Defer folding
9298 even in that case, until arguments are finalized. */
9299 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9300 {
9301 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9302 if (fndecl2
9303 && TREE_CODE (fndecl2) == FUNCTION_DECL
9304 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9305 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9306 return NULL_TREE;
9307 }
9308
9309 if (avoid_folding_inline_builtin (fndecl))
9310 return NULL_TREE;
9311
9312 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9313 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9314 CALL_EXPR_ARGP (exp), ignore);
9315 else
9316 {
9317 tree *args = CALL_EXPR_ARGP (exp);
9318 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9319 if (ret)
9320 return ret;
9321 }
9322 }
9323 return NULL_TREE;
9324 }
9325
9326 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9327 N arguments are passed in the array ARGARRAY. Return a folded
9328 expression or NULL_TREE if no simplification was possible. */
9329
9330 tree
9331 fold_builtin_call_array (location_t loc, tree,
9332 tree fn,
9333 int n,
9334 tree *argarray)
9335 {
9336 if (TREE_CODE (fn) != ADDR_EXPR)
9337 return NULL_TREE;
9338
9339 tree fndecl = TREE_OPERAND (fn, 0);
9340 if (TREE_CODE (fndecl) == FUNCTION_DECL
9341 && DECL_BUILT_IN (fndecl))
9342 {
9343 /* If last argument is __builtin_va_arg_pack (), arguments to this
9344 function are not finalized yet. Defer folding until they are. */
9345 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9346 {
9347 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9348 if (fndecl2
9349 && TREE_CODE (fndecl2) == FUNCTION_DECL
9350 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9351 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9352 return NULL_TREE;
9353 }
9354 if (avoid_folding_inline_builtin (fndecl))
9355 return NULL_TREE;
9356 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9357 return targetm.fold_builtin (fndecl, n, argarray, false);
9358 else
9359 return fold_builtin_n (loc, fndecl, argarray, n, false);
9360 }
9361
9362 return NULL_TREE;
9363 }
9364
9365 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9366 along with N new arguments specified as the "..." parameters. SKIP
9367 is the number of arguments in EXP to be omitted. This function is used
9368 to do varargs-to-varargs transformations. */
9369
9370 static tree
9371 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9372 {
9373 va_list ap;
9374 tree t;
9375
9376 va_start (ap, n);
9377 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9378 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9379 va_end (ap);
9380
9381 return t;
9382 }
9383
9384 /* Validate a single argument ARG against a tree code CODE representing
9385 a type. Return true when argument is valid. */
9386
9387 static bool
9388 validate_arg (const_tree arg, enum tree_code code)
9389 {
9390 if (!arg)
9391 return false;
9392 else if (code == POINTER_TYPE)
9393 return POINTER_TYPE_P (TREE_TYPE (arg));
9394 else if (code == INTEGER_TYPE)
9395 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9396 return code == TREE_CODE (TREE_TYPE (arg));
9397 }
9398
9399 /* This function validates the types of a function call argument list
9400 against a specified list of tree_codes. If the last specifier is a 0,
9401 that represents an ellipses, otherwise the last specifier must be a
9402 VOID_TYPE.
9403
9404 This is the GIMPLE version of validate_arglist. Eventually we want to
9405 completely convert builtins.c to work from GIMPLEs and the tree based
9406 validate_arglist will then be removed. */
9407
9408 bool
9409 validate_gimple_arglist (const gcall *call, ...)
9410 {
9411 enum tree_code code;
9412 bool res = 0;
9413 va_list ap;
9414 const_tree arg;
9415 size_t i;
9416
9417 va_start (ap, call);
9418 i = 0;
9419
9420 do
9421 {
9422 code = (enum tree_code) va_arg (ap, int);
9423 switch (code)
9424 {
9425 case 0:
9426 /* This signifies an ellipses, any further arguments are all ok. */
9427 res = true;
9428 goto end;
9429 case VOID_TYPE:
9430 /* This signifies an endlink, if no arguments remain, return
9431 true, otherwise return false. */
9432 res = (i == gimple_call_num_args (call));
9433 goto end;
9434 default:
9435 /* If no parameters remain or the parameter's code does not
9436 match the specified code, return false. Otherwise continue
9437 checking any remaining arguments. */
9438 arg = gimple_call_arg (call, i++);
9439 if (!validate_arg (arg, code))
9440 goto end;
9441 break;
9442 }
9443 }
9444 while (1);
9445
9446 /* We need gotos here since we can only have one VA_CLOSE in a
9447 function. */
9448 end: ;
9449 va_end (ap);
9450
9451 return res;
9452 }
9453
9454 /* Default target-specific builtin expander that does nothing. */
9455
9456 rtx
9457 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9458 rtx target ATTRIBUTE_UNUSED,
9459 rtx subtarget ATTRIBUTE_UNUSED,
9460 machine_mode mode ATTRIBUTE_UNUSED,
9461 int ignore ATTRIBUTE_UNUSED)
9462 {
9463 return NULL_RTX;
9464 }
9465
9466 /* Returns true is EXP represents data that would potentially reside
9467 in a readonly section. */
9468
9469 bool
9470 readonly_data_expr (tree exp)
9471 {
9472 STRIP_NOPS (exp);
9473
9474 if (TREE_CODE (exp) != ADDR_EXPR)
9475 return false;
9476
9477 exp = get_base_address (TREE_OPERAND (exp, 0));
9478 if (!exp)
9479 return false;
9480
9481 /* Make sure we call decl_readonly_section only for trees it
9482 can handle (since it returns true for everything it doesn't
9483 understand). */
9484 if (TREE_CODE (exp) == STRING_CST
9485 || TREE_CODE (exp) == CONSTRUCTOR
9486 || (VAR_P (exp) && TREE_STATIC (exp)))
9487 return decl_readonly_section (exp, 0);
9488 else
9489 return false;
9490 }
9491
9492 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9493 to the call, and TYPE is its return type.
9494
9495 Return NULL_TREE if no simplification was possible, otherwise return the
9496 simplified form of the call as a tree.
9497
9498 The simplified form may be a constant or other expression which
9499 computes the same value, but in a more efficient manner (including
9500 calls to other builtin functions).
9501
9502 The call may contain arguments which need to be evaluated, but
9503 which are not useful to determine the result of the call. In
9504 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9505 COMPOUND_EXPR will be an argument which must be evaluated.
9506 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9507 COMPOUND_EXPR in the chain will contain the tree for the simplified
9508 form of the builtin function call. */
9509
9510 static tree
9511 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9512 {
9513 if (!validate_arg (s1, POINTER_TYPE)
9514 || !validate_arg (s2, POINTER_TYPE))
9515 return NULL_TREE;
9516 else
9517 {
9518 tree fn;
9519 const char *p1, *p2;
9520
9521 p2 = c_getstr (s2);
9522 if (p2 == NULL)
9523 return NULL_TREE;
9524
9525 p1 = c_getstr (s1);
9526 if (p1 != NULL)
9527 {
9528 const char *r = strpbrk (p1, p2);
9529 tree tem;
9530
9531 if (r == NULL)
9532 return build_int_cst (TREE_TYPE (s1), 0);
9533
9534 /* Return an offset into the constant string argument. */
9535 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9536 return fold_convert_loc (loc, type, tem);
9537 }
9538
9539 if (p2[0] == '\0')
9540 /* strpbrk(x, "") == NULL.
9541 Evaluate and ignore s1 in case it had side-effects. */
9542 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9543
9544 if (p2[1] != '\0')
9545 return NULL_TREE; /* Really call strpbrk. */
9546
9547 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9548 if (!fn)
9549 return NULL_TREE;
9550
9551 /* New argument list transforming strpbrk(s1, s2) to
9552 strchr(s1, s2[0]). */
9553 return build_call_expr_loc (loc, fn, 2, s1,
9554 build_int_cst (integer_type_node, p2[0]));
9555 }
9556 }
9557
9558 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9559 to the call.
9560
9561 Return NULL_TREE if no simplification was possible, otherwise return the
9562 simplified form of the call as a tree.
9563
9564 The simplified form may be a constant or other expression which
9565 computes the same value, but in a more efficient manner (including
9566 calls to other builtin functions).
9567
9568 The call may contain arguments which need to be evaluated, but
9569 which are not useful to determine the result of the call. In
9570 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9571 COMPOUND_EXPR will be an argument which must be evaluated.
9572 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9573 COMPOUND_EXPR in the chain will contain the tree for the simplified
9574 form of the builtin function call. */
9575
9576 static tree
9577 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9578 {
9579 if (!validate_arg (s1, POINTER_TYPE)
9580 || !validate_arg (s2, POINTER_TYPE))
9581 return NULL_TREE;
9582 else
9583 {
9584 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9585
9586 /* If either argument is "", return NULL_TREE. */
9587 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9588 /* Evaluate and ignore both arguments in case either one has
9589 side-effects. */
9590 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9591 s1, s2);
9592 return NULL_TREE;
9593 }
9594 }
9595
9596 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9597 to the call.
9598
9599 Return NULL_TREE if no simplification was possible, otherwise return the
9600 simplified form of the call as a tree.
9601
9602 The simplified form may be a constant or other expression which
9603 computes the same value, but in a more efficient manner (including
9604 calls to other builtin functions).
9605
9606 The call may contain arguments which need to be evaluated, but
9607 which are not useful to determine the result of the call. In
9608 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9609 COMPOUND_EXPR will be an argument which must be evaluated.
9610 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9611 COMPOUND_EXPR in the chain will contain the tree for the simplified
9612 form of the builtin function call. */
9613
9614 static tree
9615 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9616 {
9617 if (!validate_arg (s1, POINTER_TYPE)
9618 || !validate_arg (s2, POINTER_TYPE))
9619 return NULL_TREE;
9620 else
9621 {
9622 /* If the first argument is "", return NULL_TREE. */
9623 const char *p1 = c_getstr (s1);
9624 if (p1 && *p1 == '\0')
9625 {
9626 /* Evaluate and ignore argument s2 in case it has
9627 side-effects. */
9628 return omit_one_operand_loc (loc, size_type_node,
9629 size_zero_node, s2);
9630 }
9631
9632 /* If the second argument is "", return __builtin_strlen(s1). */
9633 const char *p2 = c_getstr (s2);
9634 if (p2 && *p2 == '\0')
9635 {
9636 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9637
9638 /* If the replacement _DECL isn't initialized, don't do the
9639 transformation. */
9640 if (!fn)
9641 return NULL_TREE;
9642
9643 return build_call_expr_loc (loc, fn, 1, s1);
9644 }
9645 return NULL_TREE;
9646 }
9647 }
9648
9649 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9650 produced. False otherwise. This is done so that we don't output the error
9651 or warning twice or three times. */
9652
9653 bool
9654 fold_builtin_next_arg (tree exp, bool va_start_p)
9655 {
9656 tree fntype = TREE_TYPE (current_function_decl);
9657 int nargs = call_expr_nargs (exp);
9658 tree arg;
9659 /* There is good chance the current input_location points inside the
9660 definition of the va_start macro (perhaps on the token for
9661 builtin) in a system header, so warnings will not be emitted.
9662 Use the location in real source code. */
9663 source_location current_location =
9664 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9665 NULL);
9666
9667 if (!stdarg_p (fntype))
9668 {
9669 error ("%<va_start%> used in function with fixed args");
9670 return true;
9671 }
9672
9673 if (va_start_p)
9674 {
9675 if (va_start_p && (nargs != 2))
9676 {
9677 error ("wrong number of arguments to function %<va_start%>");
9678 return true;
9679 }
9680 arg = CALL_EXPR_ARG (exp, 1);
9681 }
9682 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9683 when we checked the arguments and if needed issued a warning. */
9684 else
9685 {
9686 if (nargs == 0)
9687 {
9688 /* Evidently an out of date version of <stdarg.h>; can't validate
9689 va_start's second argument, but can still work as intended. */
9690 warning_at (current_location,
9691 OPT_Wvarargs,
9692 "%<__builtin_next_arg%> called without an argument");
9693 return true;
9694 }
9695 else if (nargs > 1)
9696 {
9697 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9698 return true;
9699 }
9700 arg = CALL_EXPR_ARG (exp, 0);
9701 }
9702
9703 if (TREE_CODE (arg) == SSA_NAME)
9704 arg = SSA_NAME_VAR (arg);
9705
9706 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9707 or __builtin_next_arg (0) the first time we see it, after checking
9708 the arguments and if needed issuing a warning. */
9709 if (!integer_zerop (arg))
9710 {
9711 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9712
9713 /* Strip off all nops for the sake of the comparison. This
9714 is not quite the same as STRIP_NOPS. It does more.
9715 We must also strip off INDIRECT_EXPR for C++ reference
9716 parameters. */
9717 while (CONVERT_EXPR_P (arg)
9718 || TREE_CODE (arg) == INDIRECT_REF)
9719 arg = TREE_OPERAND (arg, 0);
9720 if (arg != last_parm)
9721 {
9722 /* FIXME: Sometimes with the tree optimizers we can get the
9723 not the last argument even though the user used the last
9724 argument. We just warn and set the arg to be the last
9725 argument so that we will get wrong-code because of
9726 it. */
9727 warning_at (current_location,
9728 OPT_Wvarargs,
9729 "second parameter of %<va_start%> not last named argument");
9730 }
9731
9732 /* Undefined by C99 7.15.1.4p4 (va_start):
9733 "If the parameter parmN is declared with the register storage
9734 class, with a function or array type, or with a type that is
9735 not compatible with the type that results after application of
9736 the default argument promotions, the behavior is undefined."
9737 */
9738 else if (DECL_REGISTER (arg))
9739 {
9740 warning_at (current_location,
9741 OPT_Wvarargs,
9742 "undefined behavior when second parameter of "
9743 "%<va_start%> is declared with %<register%> storage");
9744 }
9745
9746 /* We want to verify the second parameter just once before the tree
9747 optimizers are run and then avoid keeping it in the tree,
9748 as otherwise we could warn even for correct code like:
9749 void foo (int i, ...)
9750 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9751 if (va_start_p)
9752 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9753 else
9754 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9755 }
9756 return false;
9757 }
9758
9759
9760 /* Expand a call EXP to __builtin_object_size. */
9761
9762 static rtx
9763 expand_builtin_object_size (tree exp)
9764 {
9765 tree ost;
9766 int object_size_type;
9767 tree fndecl = get_callee_fndecl (exp);
9768
9769 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9770 {
9771 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9772 exp, fndecl);
9773 expand_builtin_trap ();
9774 return const0_rtx;
9775 }
9776
9777 ost = CALL_EXPR_ARG (exp, 1);
9778 STRIP_NOPS (ost);
9779
9780 if (TREE_CODE (ost) != INTEGER_CST
9781 || tree_int_cst_sgn (ost) < 0
9782 || compare_tree_int (ost, 3) > 0)
9783 {
9784 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9785 exp, fndecl);
9786 expand_builtin_trap ();
9787 return const0_rtx;
9788 }
9789
9790 object_size_type = tree_to_shwi (ost);
9791
9792 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9793 }
9794
9795 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9796 FCODE is the BUILT_IN_* to use.
9797 Return NULL_RTX if we failed; the caller should emit a normal call,
9798 otherwise try to get the result in TARGET, if convenient (and in
9799 mode MODE if that's convenient). */
9800
9801 static rtx
9802 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9803 enum built_in_function fcode)
9804 {
9805 if (!validate_arglist (exp,
9806 POINTER_TYPE,
9807 fcode == BUILT_IN_MEMSET_CHK
9808 ? INTEGER_TYPE : POINTER_TYPE,
9809 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9810 return NULL_RTX;
9811
9812 tree dest = CALL_EXPR_ARG (exp, 0);
9813 tree src = CALL_EXPR_ARG (exp, 1);
9814 tree len = CALL_EXPR_ARG (exp, 2);
9815 tree size = CALL_EXPR_ARG (exp, 3);
9816
9817 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9818 /*str=*/NULL_TREE, size);
9819
9820 if (!tree_fits_uhwi_p (size))
9821 return NULL_RTX;
9822
9823 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9824 {
9825 /* Avoid transforming the checking call to an ordinary one when
9826 an overflow has been detected or when the call couldn't be
9827 validated because the size is not constant. */
9828 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9829 return NULL_RTX;
9830
9831 tree fn = NULL_TREE;
9832 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9833 mem{cpy,pcpy,move,set} is available. */
9834 switch (fcode)
9835 {
9836 case BUILT_IN_MEMCPY_CHK:
9837 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9838 break;
9839 case BUILT_IN_MEMPCPY_CHK:
9840 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9841 break;
9842 case BUILT_IN_MEMMOVE_CHK:
9843 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9844 break;
9845 case BUILT_IN_MEMSET_CHK:
9846 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9847 break;
9848 default:
9849 break;
9850 }
9851
9852 if (! fn)
9853 return NULL_RTX;
9854
9855 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9856 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9857 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9858 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9859 }
9860 else if (fcode == BUILT_IN_MEMSET_CHK)
9861 return NULL_RTX;
9862 else
9863 {
9864 unsigned int dest_align = get_pointer_alignment (dest);
9865
9866 /* If DEST is not a pointer type, call the normal function. */
9867 if (dest_align == 0)
9868 return NULL_RTX;
9869
9870 /* If SRC and DEST are the same (and not volatile), do nothing. */
9871 if (operand_equal_p (src, dest, 0))
9872 {
9873 tree expr;
9874
9875 if (fcode != BUILT_IN_MEMPCPY_CHK)
9876 {
9877 /* Evaluate and ignore LEN in case it has side-effects. */
9878 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9879 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9880 }
9881
9882 expr = fold_build_pointer_plus (dest, len);
9883 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9884 }
9885
9886 /* __memmove_chk special case. */
9887 if (fcode == BUILT_IN_MEMMOVE_CHK)
9888 {
9889 unsigned int src_align = get_pointer_alignment (src);
9890
9891 if (src_align == 0)
9892 return NULL_RTX;
9893
9894 /* If src is categorized for a readonly section we can use
9895 normal __memcpy_chk. */
9896 if (readonly_data_expr (src))
9897 {
9898 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9899 if (!fn)
9900 return NULL_RTX;
9901 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9902 dest, src, len, size);
9903 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9904 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9905 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9906 }
9907 }
9908 return NULL_RTX;
9909 }
9910 }
9911
9912 /* Emit warning if a buffer overflow is detected at compile time. */
9913
9914 static void
9915 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9916 {
9917 /* The source string. */
9918 tree srcstr = NULL_TREE;
9919 /* The size of the destination object. */
9920 tree objsize = NULL_TREE;
9921 /* The string that is being concatenated with (as in __strcat_chk)
9922 or null if it isn't. */
9923 tree catstr = NULL_TREE;
9924 /* The maximum length of the source sequence in a bounded operation
9925 (such as __strncat_chk) or null if the operation isn't bounded
9926 (such as __strcat_chk). */
9927 tree maxread = NULL_TREE;
9928 /* The exact size of the access (such as in __strncpy_chk). */
9929 tree size = NULL_TREE;
9930
9931 switch (fcode)
9932 {
9933 case BUILT_IN_STRCPY_CHK:
9934 case BUILT_IN_STPCPY_CHK:
9935 srcstr = CALL_EXPR_ARG (exp, 1);
9936 objsize = CALL_EXPR_ARG (exp, 2);
9937 break;
9938
9939 case BUILT_IN_STRCAT_CHK:
9940 /* For __strcat_chk the warning will be emitted only if overflowing
9941 by at least strlen (dest) + 1 bytes. */
9942 catstr = CALL_EXPR_ARG (exp, 0);
9943 srcstr = CALL_EXPR_ARG (exp, 1);
9944 objsize = CALL_EXPR_ARG (exp, 2);
9945 break;
9946
9947 case BUILT_IN_STRNCAT_CHK:
9948 catstr = CALL_EXPR_ARG (exp, 0);
9949 srcstr = CALL_EXPR_ARG (exp, 1);
9950 maxread = CALL_EXPR_ARG (exp, 2);
9951 objsize = CALL_EXPR_ARG (exp, 3);
9952 break;
9953
9954 case BUILT_IN_STRNCPY_CHK:
9955 case BUILT_IN_STPNCPY_CHK:
9956 srcstr = CALL_EXPR_ARG (exp, 1);
9957 size = CALL_EXPR_ARG (exp, 2);
9958 objsize = CALL_EXPR_ARG (exp, 3);
9959 break;
9960
9961 case BUILT_IN_SNPRINTF_CHK:
9962 case BUILT_IN_VSNPRINTF_CHK:
9963 maxread = CALL_EXPR_ARG (exp, 1);
9964 objsize = CALL_EXPR_ARG (exp, 3);
9965 break;
9966 default:
9967 gcc_unreachable ();
9968 }
9969
9970 if (catstr && maxread)
9971 {
9972 /* Check __strncat_chk. There is no way to determine the length
9973 of the string to which the source string is being appended so
9974 just warn when the length of the source string is not known. */
9975 check_strncat_sizes (exp, objsize);
9976 return;
9977 }
9978
9979 /* The destination argument is the first one for all built-ins above. */
9980 tree dst = CALL_EXPR_ARG (exp, 0);
9981
9982 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
9983 }
9984
9985 /* Emit warning if a buffer overflow is detected at compile time
9986 in __sprintf_chk/__vsprintf_chk calls. */
9987
9988 static void
9989 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9990 {
9991 tree size, len, fmt;
9992 const char *fmt_str;
9993 int nargs = call_expr_nargs (exp);
9994
9995 /* Verify the required arguments in the original call. */
9996
9997 if (nargs < 4)
9998 return;
9999 size = CALL_EXPR_ARG (exp, 2);
10000 fmt = CALL_EXPR_ARG (exp, 3);
10001
10002 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10003 return;
10004
10005 /* Check whether the format is a literal string constant. */
10006 fmt_str = c_getstr (fmt);
10007 if (fmt_str == NULL)
10008 return;
10009
10010 if (!init_target_chars ())
10011 return;
10012
10013 /* If the format doesn't contain % args or %%, we know its size. */
10014 if (strchr (fmt_str, target_percent) == 0)
10015 len = build_int_cstu (size_type_node, strlen (fmt_str));
10016 /* If the format is "%s" and first ... argument is a string literal,
10017 we know it too. */
10018 else if (fcode == BUILT_IN_SPRINTF_CHK
10019 && strcmp (fmt_str, target_percent_s) == 0)
10020 {
10021 tree arg;
10022
10023 if (nargs < 5)
10024 return;
10025 arg = CALL_EXPR_ARG (exp, 4);
10026 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10027 return;
10028
10029 len = c_strlen (arg, 1);
10030 if (!len || ! tree_fits_uhwi_p (len))
10031 return;
10032 }
10033 else
10034 return;
10035
10036 /* Add one for the terminating nul. */
10037 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10038
10039 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10040 /*maxread=*/NULL_TREE, len, size);
10041 }
10042
10043 /* Emit warning if a free is called with address of a variable. */
10044
10045 static void
10046 maybe_emit_free_warning (tree exp)
10047 {
10048 tree arg = CALL_EXPR_ARG (exp, 0);
10049
10050 STRIP_NOPS (arg);
10051 if (TREE_CODE (arg) != ADDR_EXPR)
10052 return;
10053
10054 arg = get_base_address (TREE_OPERAND (arg, 0));
10055 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10056 return;
10057
10058 if (SSA_VAR_P (arg))
10059 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10060 "%Kattempt to free a non-heap object %qD", exp, arg);
10061 else
10062 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10063 "%Kattempt to free a non-heap object", exp);
10064 }
10065
10066 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10067 if possible. */
10068
10069 static tree
10070 fold_builtin_object_size (tree ptr, tree ost)
10071 {
10072 unsigned HOST_WIDE_INT bytes;
10073 int object_size_type;
10074
10075 if (!validate_arg (ptr, POINTER_TYPE)
10076 || !validate_arg (ost, INTEGER_TYPE))
10077 return NULL_TREE;
10078
10079 STRIP_NOPS (ost);
10080
10081 if (TREE_CODE (ost) != INTEGER_CST
10082 || tree_int_cst_sgn (ost) < 0
10083 || compare_tree_int (ost, 3) > 0)
10084 return NULL_TREE;
10085
10086 object_size_type = tree_to_shwi (ost);
10087
10088 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10089 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10090 and (size_t) 0 for types 2 and 3. */
10091 if (TREE_SIDE_EFFECTS (ptr))
10092 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10093
10094 if (TREE_CODE (ptr) == ADDR_EXPR)
10095 {
10096 compute_builtin_object_size (ptr, object_size_type, &bytes);
10097 if (wi::fits_to_tree_p (bytes, size_type_node))
10098 return build_int_cstu (size_type_node, bytes);
10099 }
10100 else if (TREE_CODE (ptr) == SSA_NAME)
10101 {
10102 /* If object size is not known yet, delay folding until
10103 later. Maybe subsequent passes will help determining
10104 it. */
10105 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10106 && wi::fits_to_tree_p (bytes, size_type_node))
10107 return build_int_cstu (size_type_node, bytes);
10108 }
10109
10110 return NULL_TREE;
10111 }
10112
10113 /* Builtins with folding operations that operate on "..." arguments
10114 need special handling; we need to store the arguments in a convenient
10115 data structure before attempting any folding. Fortunately there are
10116 only a few builtins that fall into this category. FNDECL is the
10117 function, EXP is the CALL_EXPR for the call. */
10118
10119 static tree
10120 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10121 {
10122 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10123 tree ret = NULL_TREE;
10124
10125 switch (fcode)
10126 {
10127 case BUILT_IN_FPCLASSIFY:
10128 ret = fold_builtin_fpclassify (loc, args, nargs);
10129 break;
10130
10131 default:
10132 break;
10133 }
10134 if (ret)
10135 {
10136 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10137 SET_EXPR_LOCATION (ret, loc);
10138 TREE_NO_WARNING (ret) = 1;
10139 return ret;
10140 }
10141 return NULL_TREE;
10142 }
10143
10144 /* Initialize format string characters in the target charset. */
10145
10146 bool
10147 init_target_chars (void)
10148 {
10149 static bool init;
10150 if (!init)
10151 {
10152 target_newline = lang_hooks.to_target_charset ('\n');
10153 target_percent = lang_hooks.to_target_charset ('%');
10154 target_c = lang_hooks.to_target_charset ('c');
10155 target_s = lang_hooks.to_target_charset ('s');
10156 if (target_newline == 0 || target_percent == 0 || target_c == 0
10157 || target_s == 0)
10158 return false;
10159
10160 target_percent_c[0] = target_percent;
10161 target_percent_c[1] = target_c;
10162 target_percent_c[2] = '\0';
10163
10164 target_percent_s[0] = target_percent;
10165 target_percent_s[1] = target_s;
10166 target_percent_s[2] = '\0';
10167
10168 target_percent_s_newline[0] = target_percent;
10169 target_percent_s_newline[1] = target_s;
10170 target_percent_s_newline[2] = target_newline;
10171 target_percent_s_newline[3] = '\0';
10172
10173 init = true;
10174 }
10175 return true;
10176 }
10177
10178 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10179 and no overflow/underflow occurred. INEXACT is true if M was not
10180 exactly calculated. TYPE is the tree type for the result. This
10181 function assumes that you cleared the MPFR flags and then
10182 calculated M to see if anything subsequently set a flag prior to
10183 entering this function. Return NULL_TREE if any checks fail. */
10184
10185 static tree
10186 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10187 {
10188 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10189 overflow/underflow occurred. If -frounding-math, proceed iff the
10190 result of calling FUNC was exact. */
10191 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10192 && (!flag_rounding_math || !inexact))
10193 {
10194 REAL_VALUE_TYPE rr;
10195
10196 real_from_mpfr (&rr, m, type, GMP_RNDN);
10197 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10198 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10199 but the mpft_t is not, then we underflowed in the
10200 conversion. */
10201 if (real_isfinite (&rr)
10202 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10203 {
10204 REAL_VALUE_TYPE rmode;
10205
10206 real_convert (&rmode, TYPE_MODE (type), &rr);
10207 /* Proceed iff the specified mode can hold the value. */
10208 if (real_identical (&rmode, &rr))
10209 return build_real (type, rmode);
10210 }
10211 }
10212 return NULL_TREE;
10213 }
10214
10215 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10216 number and no overflow/underflow occurred. INEXACT is true if M
10217 was not exactly calculated. TYPE is the tree type for the result.
10218 This function assumes that you cleared the MPFR flags and then
10219 calculated M to see if anything subsequently set a flag prior to
10220 entering this function. Return NULL_TREE if any checks fail, if
10221 FORCE_CONVERT is true, then bypass the checks. */
10222
10223 static tree
10224 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10225 {
10226 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10227 overflow/underflow occurred. If -frounding-math, proceed iff the
10228 result of calling FUNC was exact. */
10229 if (force_convert
10230 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10231 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10232 && (!flag_rounding_math || !inexact)))
10233 {
10234 REAL_VALUE_TYPE re, im;
10235
10236 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10237 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10238 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10239 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10240 but the mpft_t is not, then we underflowed in the
10241 conversion. */
10242 if (force_convert
10243 || (real_isfinite (&re) && real_isfinite (&im)
10244 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10245 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10246 {
10247 REAL_VALUE_TYPE re_mode, im_mode;
10248
10249 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10250 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10251 /* Proceed iff the specified mode can hold the value. */
10252 if (force_convert
10253 || (real_identical (&re_mode, &re)
10254 && real_identical (&im_mode, &im)))
10255 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10256 build_real (TREE_TYPE (type), im_mode));
10257 }
10258 }
10259 return NULL_TREE;
10260 }
10261
10262 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10263 the pointer *(ARG_QUO) and return the result. The type is taken
10264 from the type of ARG0 and is used for setting the precision of the
10265 calculation and results. */
10266
10267 static tree
10268 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10269 {
10270 tree const type = TREE_TYPE (arg0);
10271 tree result = NULL_TREE;
10272
10273 STRIP_NOPS (arg0);
10274 STRIP_NOPS (arg1);
10275
10276 /* To proceed, MPFR must exactly represent the target floating point
10277 format, which only happens when the target base equals two. */
10278 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10279 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10280 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10281 {
10282 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10283 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10284
10285 if (real_isfinite (ra0) && real_isfinite (ra1))
10286 {
10287 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10288 const int prec = fmt->p;
10289 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10290 tree result_rem;
10291 long integer_quo;
10292 mpfr_t m0, m1;
10293
10294 mpfr_inits2 (prec, m0, m1, NULL);
10295 mpfr_from_real (m0, ra0, GMP_RNDN);
10296 mpfr_from_real (m1, ra1, GMP_RNDN);
10297 mpfr_clear_flags ();
10298 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10299 /* Remquo is independent of the rounding mode, so pass
10300 inexact=0 to do_mpfr_ckconv(). */
10301 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10302 mpfr_clears (m0, m1, NULL);
10303 if (result_rem)
10304 {
10305 /* MPFR calculates quo in the host's long so it may
10306 return more bits in quo than the target int can hold
10307 if sizeof(host long) > sizeof(target int). This can
10308 happen even for native compilers in LP64 mode. In
10309 these cases, modulo the quo value with the largest
10310 number that the target int can hold while leaving one
10311 bit for the sign. */
10312 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10313 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10314
10315 /* Dereference the quo pointer argument. */
10316 arg_quo = build_fold_indirect_ref (arg_quo);
10317 /* Proceed iff a valid pointer type was passed in. */
10318 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10319 {
10320 /* Set the value. */
10321 tree result_quo
10322 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10323 build_int_cst (TREE_TYPE (arg_quo),
10324 integer_quo));
10325 TREE_SIDE_EFFECTS (result_quo) = 1;
10326 /* Combine the quo assignment with the rem. */
10327 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10328 result_quo, result_rem));
10329 }
10330 }
10331 }
10332 }
10333 return result;
10334 }
10335
10336 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10337 resulting value as a tree with type TYPE. The mpfr precision is
10338 set to the precision of TYPE. We assume that this mpfr function
10339 returns zero if the result could be calculated exactly within the
10340 requested precision. In addition, the integer pointer represented
10341 by ARG_SG will be dereferenced and set to the appropriate signgam
10342 (-1,1) value. */
10343
10344 static tree
10345 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10346 {
10347 tree result = NULL_TREE;
10348
10349 STRIP_NOPS (arg);
10350
10351 /* To proceed, MPFR must exactly represent the target floating point
10352 format, which only happens when the target base equals two. Also
10353 verify ARG is a constant and that ARG_SG is an int pointer. */
10354 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10355 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10356 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10357 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10358 {
10359 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10360
10361 /* In addition to NaN and Inf, the argument cannot be zero or a
10362 negative integer. */
10363 if (real_isfinite (ra)
10364 && ra->cl != rvc_zero
10365 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10366 {
10367 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10368 const int prec = fmt->p;
10369 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10370 int inexact, sg;
10371 mpfr_t m;
10372 tree result_lg;
10373
10374 mpfr_init2 (m, prec);
10375 mpfr_from_real (m, ra, GMP_RNDN);
10376 mpfr_clear_flags ();
10377 inexact = mpfr_lgamma (m, &sg, m, rnd);
10378 result_lg = do_mpfr_ckconv (m, type, inexact);
10379 mpfr_clear (m);
10380 if (result_lg)
10381 {
10382 tree result_sg;
10383
10384 /* Dereference the arg_sg pointer argument. */
10385 arg_sg = build_fold_indirect_ref (arg_sg);
10386 /* Assign the signgam value into *arg_sg. */
10387 result_sg = fold_build2 (MODIFY_EXPR,
10388 TREE_TYPE (arg_sg), arg_sg,
10389 build_int_cst (TREE_TYPE (arg_sg), sg));
10390 TREE_SIDE_EFFECTS (result_sg) = 1;
10391 /* Combine the signgam assignment with the lgamma result. */
10392 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10393 result_sg, result_lg));
10394 }
10395 }
10396 }
10397
10398 return result;
10399 }
10400
10401 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10402 mpc function FUNC on it and return the resulting value as a tree
10403 with type TYPE. The mpfr precision is set to the precision of
10404 TYPE. We assume that function FUNC returns zero if the result
10405 could be calculated exactly within the requested precision. If
10406 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10407 in the arguments and/or results. */
10408
10409 tree
10410 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10411 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10412 {
10413 tree result = NULL_TREE;
10414
10415 STRIP_NOPS (arg0);
10416 STRIP_NOPS (arg1);
10417
10418 /* To proceed, MPFR must exactly represent the target floating point
10419 format, which only happens when the target base equals two. */
10420 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10422 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10423 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10424 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10425 {
10426 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10427 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10428 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10429 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10430
10431 if (do_nonfinite
10432 || (real_isfinite (re0) && real_isfinite (im0)
10433 && real_isfinite (re1) && real_isfinite (im1)))
10434 {
10435 const struct real_format *const fmt =
10436 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10437 const int prec = fmt->p;
10438 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10439 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10440 int inexact;
10441 mpc_t m0, m1;
10442
10443 mpc_init2 (m0, prec);
10444 mpc_init2 (m1, prec);
10445 mpfr_from_real (mpc_realref (m0), re0, rnd);
10446 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10447 mpfr_from_real (mpc_realref (m1), re1, rnd);
10448 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10449 mpfr_clear_flags ();
10450 inexact = func (m0, m0, m1, crnd);
10451 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10452 mpc_clear (m0);
10453 mpc_clear (m1);
10454 }
10455 }
10456
10457 return result;
10458 }
10459
10460 /* A wrapper function for builtin folding that prevents warnings for
10461 "statement without effect" and the like, caused by removing the
10462 call node earlier than the warning is generated. */
10463
10464 tree
10465 fold_call_stmt (gcall *stmt, bool ignore)
10466 {
10467 tree ret = NULL_TREE;
10468 tree fndecl = gimple_call_fndecl (stmt);
10469 location_t loc = gimple_location (stmt);
10470 if (fndecl
10471 && TREE_CODE (fndecl) == FUNCTION_DECL
10472 && DECL_BUILT_IN (fndecl)
10473 && !gimple_call_va_arg_pack_p (stmt))
10474 {
10475 int nargs = gimple_call_num_args (stmt);
10476 tree *args = (nargs > 0
10477 ? gimple_call_arg_ptr (stmt, 0)
10478 : &error_mark_node);
10479
10480 if (avoid_folding_inline_builtin (fndecl))
10481 return NULL_TREE;
10482 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10483 {
10484 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10485 }
10486 else
10487 {
10488 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10489 if (ret)
10490 {
10491 /* Propagate location information from original call to
10492 expansion of builtin. Otherwise things like
10493 maybe_emit_chk_warning, that operate on the expansion
10494 of a builtin, will use the wrong location information. */
10495 if (gimple_has_location (stmt))
10496 {
10497 tree realret = ret;
10498 if (TREE_CODE (ret) == NOP_EXPR)
10499 realret = TREE_OPERAND (ret, 0);
10500 if (CAN_HAVE_LOCATION_P (realret)
10501 && !EXPR_HAS_LOCATION (realret))
10502 SET_EXPR_LOCATION (realret, loc);
10503 return realret;
10504 }
10505 return ret;
10506 }
10507 }
10508 }
10509 return NULL_TREE;
10510 }
10511
10512 /* Look up the function in builtin_decl that corresponds to DECL
10513 and set ASMSPEC as its user assembler name. DECL must be a
10514 function decl that declares a builtin. */
10515
10516 void
10517 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10518 {
10519 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10520 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10521 && asmspec != 0);
10522
10523 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10524 set_user_assembler_name (builtin, asmspec);
10525
10526 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10527 && INT_TYPE_SIZE < BITS_PER_WORD)
10528 {
10529 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10530 set_user_assembler_libfunc ("ffs", asmspec);
10531 set_optab_libfunc (ffs_optab, mode, "ffs");
10532 }
10533 }
10534
10535 /* Return true if DECL is a builtin that expands to a constant or similarly
10536 simple code. */
10537 bool
10538 is_simple_builtin (tree decl)
10539 {
10540 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10541 switch (DECL_FUNCTION_CODE (decl))
10542 {
10543 /* Builtins that expand to constants. */
10544 case BUILT_IN_CONSTANT_P:
10545 case BUILT_IN_EXPECT:
10546 case BUILT_IN_OBJECT_SIZE:
10547 case BUILT_IN_UNREACHABLE:
10548 /* Simple register moves or loads from stack. */
10549 case BUILT_IN_ASSUME_ALIGNED:
10550 case BUILT_IN_RETURN_ADDRESS:
10551 case BUILT_IN_EXTRACT_RETURN_ADDR:
10552 case BUILT_IN_FROB_RETURN_ADDR:
10553 case BUILT_IN_RETURN:
10554 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10555 case BUILT_IN_FRAME_ADDRESS:
10556 case BUILT_IN_VA_END:
10557 case BUILT_IN_STACK_SAVE:
10558 case BUILT_IN_STACK_RESTORE:
10559 /* Exception state returns or moves registers around. */
10560 case BUILT_IN_EH_FILTER:
10561 case BUILT_IN_EH_POINTER:
10562 case BUILT_IN_EH_COPY_VALUES:
10563 return true;
10564
10565 default:
10566 return false;
10567 }
10568
10569 return false;
10570 }
10571
10572 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10573 most probably expanded inline into reasonably simple code. This is a
10574 superset of is_simple_builtin. */
10575 bool
10576 is_inexpensive_builtin (tree decl)
10577 {
10578 if (!decl)
10579 return false;
10580 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10581 return true;
10582 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10583 switch (DECL_FUNCTION_CODE (decl))
10584 {
10585 case BUILT_IN_ABS:
10586 CASE_BUILT_IN_ALLOCA:
10587 case BUILT_IN_BSWAP16:
10588 case BUILT_IN_BSWAP32:
10589 case BUILT_IN_BSWAP64:
10590 case BUILT_IN_CLZ:
10591 case BUILT_IN_CLZIMAX:
10592 case BUILT_IN_CLZL:
10593 case BUILT_IN_CLZLL:
10594 case BUILT_IN_CTZ:
10595 case BUILT_IN_CTZIMAX:
10596 case BUILT_IN_CTZL:
10597 case BUILT_IN_CTZLL:
10598 case BUILT_IN_FFS:
10599 case BUILT_IN_FFSIMAX:
10600 case BUILT_IN_FFSL:
10601 case BUILT_IN_FFSLL:
10602 case BUILT_IN_IMAXABS:
10603 case BUILT_IN_FINITE:
10604 case BUILT_IN_FINITEF:
10605 case BUILT_IN_FINITEL:
10606 case BUILT_IN_FINITED32:
10607 case BUILT_IN_FINITED64:
10608 case BUILT_IN_FINITED128:
10609 case BUILT_IN_FPCLASSIFY:
10610 case BUILT_IN_ISFINITE:
10611 case BUILT_IN_ISINF_SIGN:
10612 case BUILT_IN_ISINF:
10613 case BUILT_IN_ISINFF:
10614 case BUILT_IN_ISINFL:
10615 case BUILT_IN_ISINFD32:
10616 case BUILT_IN_ISINFD64:
10617 case BUILT_IN_ISINFD128:
10618 case BUILT_IN_ISNAN:
10619 case BUILT_IN_ISNANF:
10620 case BUILT_IN_ISNANL:
10621 case BUILT_IN_ISNAND32:
10622 case BUILT_IN_ISNAND64:
10623 case BUILT_IN_ISNAND128:
10624 case BUILT_IN_ISNORMAL:
10625 case BUILT_IN_ISGREATER:
10626 case BUILT_IN_ISGREATEREQUAL:
10627 case BUILT_IN_ISLESS:
10628 case BUILT_IN_ISLESSEQUAL:
10629 case BUILT_IN_ISLESSGREATER:
10630 case BUILT_IN_ISUNORDERED:
10631 case BUILT_IN_VA_ARG_PACK:
10632 case BUILT_IN_VA_ARG_PACK_LEN:
10633 case BUILT_IN_VA_COPY:
10634 case BUILT_IN_TRAP:
10635 case BUILT_IN_SAVEREGS:
10636 case BUILT_IN_POPCOUNTL:
10637 case BUILT_IN_POPCOUNTLL:
10638 case BUILT_IN_POPCOUNTIMAX:
10639 case BUILT_IN_POPCOUNT:
10640 case BUILT_IN_PARITYL:
10641 case BUILT_IN_PARITYLL:
10642 case BUILT_IN_PARITYIMAX:
10643 case BUILT_IN_PARITY:
10644 case BUILT_IN_LABS:
10645 case BUILT_IN_LLABS:
10646 case BUILT_IN_PREFETCH:
10647 case BUILT_IN_ACC_ON_DEVICE:
10648 return true;
10649
10650 default:
10651 return is_simple_builtin (decl);
10652 }
10653
10654 return false;
10655 }
10656
10657 /* Return true if T is a constant and the value cast to a target char
10658 can be represented by a host char.
10659 Store the casted char constant in *P if so. */
10660
10661 bool
10662 target_char_cst_p (tree t, char *p)
10663 {
10664 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10665 return false;
10666
10667 *p = (char)tree_to_uhwi (t);
10668 return true;
10669 }
10670
10671 /* Return the maximum object size. */
10672
10673 tree
10674 max_object_size (void)
10675 {
10676 /* To do: Make this a configurable parameter. */
10677 return TYPE_MAX_VALUE (ptrdiff_type_node);
10678 }