]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
78
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
82
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
85 {
86 #include "builtins.def"
87 };
88
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
92
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
95
96 static rtx c_readstr (const char *, machine_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
175
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194
195 /* Return true if NAME starts with __builtin_ or __sync_. */
196
197 static bool
198 is_builtin_name (const char *name)
199 {
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
211 }
212
213
214 /* Return true if DECL is a function symbol representing a built-in. */
215
216 bool
217 is_builtin_fn (tree decl)
218 {
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 }
221
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
225
226 bool
227 called_as_built_in (tree node)
228 {
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
234 }
235
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
240
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
247
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
250
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
254 {
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
261
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
266
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
270 {
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
277 }
278 else if (TREE_CODE (exp) == LABEL_DECL)
279 ;
280 else if (TREE_CODE (exp) == CONST_DECL)
281 {
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
287
288 known_alignment = true;
289 }
290 else if (DECL_P (exp))
291 {
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
294 }
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
298 {
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
303
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 {
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
312 }
313
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
317
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
320
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
324 {
325 if (TMR_INDEX (exp))
326 {
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
331 }
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
335 }
336
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
348 {
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
354 }
355 }
356 else if (TREE_CODE (exp) == STRING_CST)
357 {
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
363
364 known_alignment = true;
365 }
366
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
370 {
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
373 {
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
377 }
378 }
379
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
383 }
384
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
393 {
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 }
396
397 /* Return the alignment in bits of EXP, an object. */
398
399 unsigned int
400 get_object_alignment (tree exp)
401 {
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
404
405 get_object_alignment_1 (exp, &align, &bitpos);
406
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
409
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
413 }
414
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
419
420 If EXP is not a pointer, false is returned too. */
421
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
425 {
426 STRIP_NOPS (exp);
427
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 {
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
440 {
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
443 {
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
447 }
448 }
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
452 }
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 {
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 {
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
469 }
470 else
471 {
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
475 }
476 }
477 else if (TREE_CODE (exp) == INTEGER_CST)
478 {
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
483 }
484
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
488 }
489
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
496
497 unsigned int
498 get_pointer_alignment (tree exp)
499 {
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
502
503 get_pointer_alignment_1 (exp, &align, &bitpos);
504
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
507
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
510
511 return align;
512 }
513
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 {
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522
523 unsigned n;
524
525 if (eltsize == 1)
526 {
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
529 {
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
533 }
534 }
535 else
536 {
537 for (n = 0; n < maxelts; n++)
538 {
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
542 }
543 }
544 return n;
545 }
546
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
552
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
559
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
563
564 The value returned is of type `ssizetype'.
565
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
568
569 tree
570 c_strlen (tree src, int only_value)
571 {
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 {
576 tree len1, len2;
577
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
582 }
583
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
587
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
589
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
595
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
599
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
603
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
607
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
609 {
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
614 {
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
617 }
618
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
625
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
627 }
628
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
631
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
640
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
644 {
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
648 {
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
652 }
653 return NULL_TREE;
654 }
655
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
659
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
664
665 return ssize_int (len);
666 }
667
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
670
671 static rtx
672 c_readstr (const char *str, machine_mode mode)
673 {
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
681
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
685
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
688 {
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
696
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
700 }
701
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
704 }
705
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
709
710 static int
711 target_char_cast (tree cst, char *p)
712 {
713 unsigned HOST_WIDE_INT val, hostval;
714
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
718
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
721
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734 }
735
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740 static tree
741 builtin_save_expr (tree exp)
742 {
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
748
749 return save_expr (exp);
750 }
751
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
755
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 {
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
762 {
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
767
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
775 {
776 tem = hard_frame_pointer_rtx;
777
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
780 }
781 }
782
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
792
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
795 {
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
802 }
803
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
808
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
818 }
819
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
822
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
826
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
829 {
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
833
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
836
837 buf_addr = convert_memory_address (Pmode, buf_addr);
838
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
840
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
844
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
848
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
852
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
855
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
861
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
865
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
868 }
869
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
873
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
876 {
877 rtx chain;
878
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
882
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
888
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
892 {
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
897
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
912 }
913
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
915 {
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
923
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
928
929 if (i == ARRAY_SIZE (elim_regs))
930 {
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
935 }
936 }
937
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
944
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
949 }
950
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
955
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
958 {
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
962
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
967
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
970
971 buf_addr = convert_memory_address (Pmode, buf_addr);
972
973 buf_addr = force_reg (Pmode, buf_addr);
974
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
978
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
983 {
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
987
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
993
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1002 {
1003 lab = copy_to_reg (lab);
1004
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1007
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1010
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1014 }
1015 }
1016
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1023 {
1024 gcc_assert (insn != last);
1025
1026 if (JUMP_P (insn))
1027 {
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1030 }
1031 else if (CALL_P (insn))
1032 break;
1033 }
1034 }
1035
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1038 {
1039 return (iter->i < iter->n);
1040 }
1041
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1046
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1049 {
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1055
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1058
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1062
1063 for (unsigned argno = 1; ; ++argno)
1064 {
1065 code = (enum tree_code) va_arg (ap, int);
1066
1067 switch (code)
1068 {
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1084 {
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1089 }
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1099 }
1100 }
1101
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1106
1107 BITMAP_FREE (argmap);
1108
1109 return res;
1110 }
1111
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1114
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1117 {
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1121
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1124
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1127
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1139
1140 crtl->has_nonlocal_goto = 1;
1141
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1146 {
1147 r_label = copy_to_reg (r_label);
1148
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1151
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1155
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1160
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1173
1174 emit_indirect_jump (r_label);
1175 }
1176
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1180 {
1181 if (JUMP_P (insn))
1182 {
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1185 }
1186 else if (CALL_P (insn))
1187 break;
1188 }
1189
1190 return const0_rtx;
1191 }
1192
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1197
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1200 {
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 rtx stack_save
1203 = gen_rtx_MEM (sa_mode,
1204 memory_address
1205 (sa_mode,
1206 plus_constant (Pmode, buf_addr,
1207 2 * GET_MODE_SIZE (Pmode))));
1208
1209 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1210 }
1211
1212 /* Expand a call to __builtin_prefetch. For a target that does not support
1213 data prefetch, evaluate the memory address argument in case it has side
1214 effects. */
1215
1216 static void
1217 expand_builtin_prefetch (tree exp)
1218 {
1219 tree arg0, arg1, arg2;
1220 int nargs;
1221 rtx op0, op1, op2;
1222
1223 if (!validate_arglist (exp, POINTER_TYPE, 0))
1224 return;
1225
1226 arg0 = CALL_EXPR_ARG (exp, 0);
1227
1228 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1229 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1230 locality). */
1231 nargs = call_expr_nargs (exp);
1232 if (nargs > 1)
1233 arg1 = CALL_EXPR_ARG (exp, 1);
1234 else
1235 arg1 = integer_zero_node;
1236 if (nargs > 2)
1237 arg2 = CALL_EXPR_ARG (exp, 2);
1238 else
1239 arg2 = integer_three_node;
1240
1241 /* Argument 0 is an address. */
1242 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1243
1244 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1245 if (TREE_CODE (arg1) != INTEGER_CST)
1246 {
1247 error ("second argument to %<__builtin_prefetch%> must be a constant");
1248 arg1 = integer_zero_node;
1249 }
1250 op1 = expand_normal (arg1);
1251 /* Argument 1 must be either zero or one. */
1252 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1253 {
1254 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1255 " using zero");
1256 op1 = const0_rtx;
1257 }
1258
1259 /* Argument 2 (locality) must be a compile-time constant int. */
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 {
1262 error ("third argument to %<__builtin_prefetch%> must be a constant");
1263 arg2 = integer_zero_node;
1264 }
1265 op2 = expand_normal (arg2);
1266 /* Argument 2 must be 0, 1, 2, or 3. */
1267 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1268 {
1269 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1270 op2 = const0_rtx;
1271 }
1272
1273 if (targetm.have_prefetch ())
1274 {
1275 struct expand_operand ops[3];
1276
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1280 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1281 return;
1282 }
1283
1284 /* Don't do anything with direct references to volatile memory, but
1285 generate code to handle other side effects. */
1286 if (!MEM_P (op0) && side_effects_p (op0))
1287 emit_insn (op0);
1288 }
1289
1290 /* Get a MEM rtx for expression EXP which is the address of an operand
1291 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1292 the maximum length of the block of memory that might be accessed or
1293 NULL if unknown. */
1294
1295 static rtx
1296 get_memory_rtx (tree exp, tree len)
1297 {
1298 tree orig_exp = exp;
1299 rtx addr, mem;
1300
1301 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1302 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1303 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1304 exp = TREE_OPERAND (exp, 0);
1305
1306 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1307 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1308
1309 /* Get an expression we can use to find the attributes to assign to MEM.
1310 First remove any nops. */
1311 while (CONVERT_EXPR_P (exp)
1312 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1313 exp = TREE_OPERAND (exp, 0);
1314
1315 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1316 (as builtin stringops may alias with anything). */
1317 exp = fold_build2 (MEM_REF,
1318 build_array_type (char_type_node,
1319 build_range_type (sizetype,
1320 size_one_node, len)),
1321 exp, build_int_cst (ptr_type_node, 0));
1322
1323 /* If the MEM_REF has no acceptable address, try to get the base object
1324 from the original address we got, and build an all-aliasing
1325 unknown-sized access to that one. */
1326 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1327 set_mem_attributes (mem, exp, 0);
1328 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1329 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1330 0))))
1331 {
1332 exp = build_fold_addr_expr (exp);
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_zero_node,
1337 NULL)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339 set_mem_attributes (mem, exp, 0);
1340 }
1341 set_mem_alias_set (mem, 0);
1342 return mem;
1343 }
1344 \f
1345 /* Built-in functions to perform an untyped call and return. */
1346
1347 #define apply_args_mode \
1348 (this_target_builtins->x_apply_args_mode)
1349 #define apply_result_mode \
1350 (this_target_builtins->x_apply_result_mode)
1351
1352 /* Return the size required for the block returned by __builtin_apply_args,
1353 and initialize apply_args_mode. */
1354
1355 static int
1356 apply_args_size (void)
1357 {
1358 static int size = -1;
1359 int align;
1360 unsigned int regno;
1361 machine_mode mode;
1362
1363 /* The values computed by this function never change. */
1364 if (size < 0)
1365 {
1366 /* The first value is the incoming arg-pointer. */
1367 size = GET_MODE_SIZE (Pmode);
1368
1369 /* The second value is the structure value address unless this is
1370 passed as an "invisible" first argument. */
1371 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1372 size += GET_MODE_SIZE (Pmode);
1373
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if (FUNCTION_ARG_REGNO_P (regno))
1376 {
1377 mode = targetm.calls.get_raw_arg_mode (regno);
1378
1379 gcc_assert (mode != VOIDmode);
1380
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 size += GET_MODE_SIZE (mode);
1385 apply_args_mode[regno] = mode;
1386 }
1387 else
1388 {
1389 apply_args_mode[regno] = VOIDmode;
1390 }
1391 }
1392 return size;
1393 }
1394
1395 /* Return the size required for the block returned by __builtin_apply,
1396 and initialize apply_result_mode. */
1397
1398 static int
1399 apply_result_size (void)
1400 {
1401 static int size = -1;
1402 int align, regno;
1403 machine_mode mode;
1404
1405 /* The values computed by this function never change. */
1406 if (size < 0)
1407 {
1408 size = 0;
1409
1410 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1411 if (targetm.calls.function_value_regno_p (regno))
1412 {
1413 mode = targetm.calls.get_raw_result_mode (regno);
1414
1415 gcc_assert (mode != VOIDmode);
1416
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 size += GET_MODE_SIZE (mode);
1421 apply_result_mode[regno] = mode;
1422 }
1423 else
1424 apply_result_mode[regno] = VOIDmode;
1425
1426 /* Allow targets that use untyped_call and untyped_return to override
1427 the size so that machine-specific information can be stored here. */
1428 #ifdef APPLY_RESULT_SIZE
1429 size = APPLY_RESULT_SIZE;
1430 #endif
1431 }
1432 return size;
1433 }
1434
1435 /* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1438
1439 static rtx
1440 result_vector (int savep, rtx result)
1441 {
1442 int regno, size, align, nelts;
1443 machine_mode mode;
1444 rtx reg, mem;
1445 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1446
1447 size = nelts = 0;
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if ((mode = apply_result_mode[regno]) != VOIDmode)
1450 {
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1455 mem = adjust_address (result, mode, size);
1456 savevec[nelts++] = (savep
1457 ? gen_rtx_SET (mem, reg)
1458 : gen_rtx_SET (reg, mem));
1459 size += GET_MODE_SIZE (mode);
1460 }
1461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462 }
1463
1464 /* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1466
1467 static rtx
1468 expand_builtin_apply_args_1 (void)
1469 {
1470 rtx registers, tem;
1471 int size, align, regno;
1472 machine_mode mode;
1473 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1474
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1478
1479 /* Walk past the arg-pointer and structure value address. */
1480 size = GET_MODE_SIZE (Pmode);
1481 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1482 size += GET_MODE_SIZE (Pmode);
1483
1484 /* Save each register used in calling a function to the block. */
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_args_mode[regno]) != VOIDmode)
1487 {
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491
1492 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1493
1494 emit_move_insn (adjust_address (registers, mode, size), tem);
1495 size += GET_MODE_SIZE (mode);
1496 }
1497
1498 /* Save the arg pointer to the block. */
1499 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1503 if (STACK_GROWS_DOWNWARD)
1504 tem
1505 = force_operand (plus_constant (Pmode, tem,
1506 crtl->args.pretend_args_size),
1507 NULL_RTX);
1508 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1509
1510 size = GET_MODE_SIZE (Pmode);
1511
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
1514 if (struct_incoming_value)
1515 {
1516 emit_move_insn (adjust_address (registers, Pmode, size),
1517 copy_to_reg (struct_incoming_value));
1518 size += GET_MODE_SIZE (Pmode);
1519 }
1520
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers, 0));
1523 }
1524
1525 /* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1530 saved. */
1531
1532 static rtx
1533 expand_builtin_apply_args (void)
1534 {
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value != 0)
1538 return apply_args_value;
1539 {
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1543 rtx temp;
1544
1545 start_sequence ();
1546 temp = expand_builtin_apply_args_1 ();
1547 rtx_insn *seq = get_insns ();
1548 end_sequence ();
1549
1550 apply_args_value = temp;
1551
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1557 that pseudo. */
1558 push_topmost_sequence ();
1559 if (REG_P (crtl->args.internal_arg_pointer)
1560 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1561 emit_insn_before (seq, parm_birth_insn);
1562 else
1563 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1565 return temp;
1566 }
1567 }
1568
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1571
1572 static rtx
1573 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1574 {
1575 int size, align, regno;
1576 machine_mode mode;
1577 rtx incoming_args, result, reg, dest, src;
1578 rtx_call_insn *call_insn;
1579 rtx old_stack_level = 0;
1580 rtx call_fusage = 0;
1581 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1582
1583 arguments = convert_memory_address (Pmode, arguments);
1584
1585 /* Create a block where the return registers can be saved. */
1586 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1587
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args = gen_reg_rtx (Pmode);
1590 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1591 if (!STACK_GROWS_DOWNWARD)
1592 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1593 incoming_args, 0, OPTAB_LIB_WIDEN);
1594
1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1597 manipulations. */
1598 do_pending_stack_adjust ();
1599 NO_DEFER_POP;
1600
1601 /* Save the stack with nonlocal if available. */
1602 if (targetm.have_save_stack_nonlocal ())
1603 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1604 else
1605 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1606
1607 /* Allocate a block of memory onto the stack and copy the memory
1608 arguments to the outgoing arguments address. We can pass TRUE
1609 as the 4th argument because we just saved the stack pointer
1610 and will restore it right after the call. */
1611 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1612
1613 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1614 may have already set current_function_calls_alloca to true.
1615 current_function_calls_alloca won't be set if argsize is zero,
1616 so we have to guarantee need_drap is true here. */
1617 if (SUPPORTS_STACK_ALIGNMENT)
1618 crtl->need_drap = true;
1619
1620 dest = virtual_outgoing_args_rtx;
1621 if (!STACK_GROWS_DOWNWARD)
1622 {
1623 if (CONST_INT_P (argsize))
1624 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1625 else
1626 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1627 }
1628 dest = gen_rtx_MEM (BLKmode, dest);
1629 set_mem_align (dest, PARM_BOUNDARY);
1630 src = gen_rtx_MEM (BLKmode, incoming_args);
1631 set_mem_align (src, PARM_BOUNDARY);
1632 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1633
1634 /* Refer to the argument block. */
1635 apply_args_size ();
1636 arguments = gen_rtx_MEM (BLKmode, arguments);
1637 set_mem_align (arguments, PARM_BOUNDARY);
1638
1639 /* Walk past the arg-pointer and structure value address. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1642 size += GET_MODE_SIZE (Pmode);
1643
1644 /* Restore each of the registers previously saved. Make USE insns
1645 for each of these registers for use in making the call. */
1646 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1647 if ((mode = apply_args_mode[regno]) != VOIDmode)
1648 {
1649 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1650 if (size % align != 0)
1651 size = CEIL (size, align) * align;
1652 reg = gen_rtx_REG (mode, regno);
1653 emit_move_insn (reg, adjust_address (arguments, mode, size));
1654 use_reg (&call_fusage, reg);
1655 size += GET_MODE_SIZE (mode);
1656 }
1657
1658 /* Restore the structure value address unless this is passed as an
1659 "invisible" first argument. */
1660 size = GET_MODE_SIZE (Pmode);
1661 if (struct_value)
1662 {
1663 rtx value = gen_reg_rtx (Pmode);
1664 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1665 emit_move_insn (struct_value, value);
1666 if (REG_P (struct_value))
1667 use_reg (&call_fusage, struct_value);
1668 size += GET_MODE_SIZE (Pmode);
1669 }
1670
1671 /* All arguments and registers used for the call are set up by now! */
1672 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1673
1674 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1675 and we don't want to load it into a register as an optimization,
1676 because prepare_call_address already did it if it should be done. */
1677 if (GET_CODE (function) != SYMBOL_REF)
1678 function = memory_address (FUNCTION_MODE, function);
1679
1680 /* Generate the actual call instruction and save the return value. */
1681 if (targetm.have_untyped_call ())
1682 {
1683 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1684 emit_call_insn (targetm.gen_untyped_call (mem, result,
1685 result_vector (1, result)));
1686 }
1687 else if (targetm.have_call_value ())
1688 {
1689 rtx valreg = 0;
1690
1691 /* Locate the unique return register. It is not possible to
1692 express a call that sets more than one return register using
1693 call_value; use untyped_call for that. In fact, untyped_call
1694 only needs to save the return registers in the given block. */
1695 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1696 if ((mode = apply_result_mode[regno]) != VOIDmode)
1697 {
1698 gcc_assert (!valreg); /* have_untyped_call required. */
1699
1700 valreg = gen_rtx_REG (mode, regno);
1701 }
1702
1703 emit_insn (targetm.gen_call_value (valreg,
1704 gen_rtx_MEM (FUNCTION_MODE, function),
1705 const0_rtx, NULL_RTX, const0_rtx));
1706
1707 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1708 }
1709 else
1710 gcc_unreachable ();
1711
1712 /* Find the CALL insn we just emitted, and attach the register usage
1713 information. */
1714 call_insn = last_call_insn ();
1715 add_function_usage_to (call_insn, call_fusage);
1716
1717 /* Restore the stack. */
1718 if (targetm.have_save_stack_nonlocal ())
1719 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1720 else
1721 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1722 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1723
1724 OK_DEFER_POP;
1725
1726 /* Return the address of the result block. */
1727 result = copy_addr_to_reg (XEXP (result, 0));
1728 return convert_memory_address (ptr_mode, result);
1729 }
1730
1731 /* Perform an untyped return. */
1732
1733 static void
1734 expand_builtin_return (rtx result)
1735 {
1736 int size, align, regno;
1737 machine_mode mode;
1738 rtx reg;
1739 rtx_insn *call_fusage = 0;
1740
1741 result = convert_memory_address (Pmode, result);
1742
1743 apply_result_size ();
1744 result = gen_rtx_MEM (BLKmode, result);
1745
1746 if (targetm.have_untyped_return ())
1747 {
1748 rtx vector = result_vector (0, result);
1749 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1750 emit_barrier ();
1751 return;
1752 }
1753
1754 /* Restore the return value and note that each value is used. */
1755 size = 0;
1756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1757 if ((mode = apply_result_mode[regno]) != VOIDmode)
1758 {
1759 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1760 if (size % align != 0)
1761 size = CEIL (size, align) * align;
1762 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1763 emit_move_insn (reg, adjust_address (result, mode, size));
1764
1765 push_to_sequence (call_fusage);
1766 emit_use (reg);
1767 call_fusage = get_insns ();
1768 end_sequence ();
1769 size += GET_MODE_SIZE (mode);
1770 }
1771
1772 /* Put the USE insns before the return. */
1773 emit_insn (call_fusage);
1774
1775 /* Return whatever values was restored by jumping directly to the end
1776 of the function. */
1777 expand_naked_return ();
1778 }
1779
1780 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1781
1782 static enum type_class
1783 type_to_class (tree type)
1784 {
1785 switch (TREE_CODE (type))
1786 {
1787 case VOID_TYPE: return void_type_class;
1788 case INTEGER_TYPE: return integer_type_class;
1789 case ENUMERAL_TYPE: return enumeral_type_class;
1790 case BOOLEAN_TYPE: return boolean_type_class;
1791 case POINTER_TYPE: return pointer_type_class;
1792 case REFERENCE_TYPE: return reference_type_class;
1793 case OFFSET_TYPE: return offset_type_class;
1794 case REAL_TYPE: return real_type_class;
1795 case COMPLEX_TYPE: return complex_type_class;
1796 case FUNCTION_TYPE: return function_type_class;
1797 case METHOD_TYPE: return method_type_class;
1798 case RECORD_TYPE: return record_type_class;
1799 case UNION_TYPE:
1800 case QUAL_UNION_TYPE: return union_type_class;
1801 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1802 ? string_type_class : array_type_class);
1803 case LANG_TYPE: return lang_type_class;
1804 default: return no_type_class;
1805 }
1806 }
1807
1808 /* Expand a call EXP to __builtin_classify_type. */
1809
1810 static rtx
1811 expand_builtin_classify_type (tree exp)
1812 {
1813 if (call_expr_nargs (exp))
1814 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1815 return GEN_INT (no_type_class);
1816 }
1817
1818 /* This helper macro, meant to be used in mathfn_built_in below,
1819 determines which among a set of three builtin math functions is
1820 appropriate for a given type mode. The `F' and `L' cases are
1821 automatically generated from the `double' case. */
1822 #define CASE_MATHFN(MATHFN) \
1823 CASE_CFN_##MATHFN: \
1824 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1825 fcodel = BUILT_IN_##MATHFN##L ; break;
1826 /* Similar to above, but appends _R after any F/L suffix. */
1827 #define CASE_MATHFN_REENT(MATHFN) \
1828 case CFN_BUILT_IN_##MATHFN##_R: \
1829 case CFN_BUILT_IN_##MATHFN##F_R: \
1830 case CFN_BUILT_IN_##MATHFN##L_R: \
1831 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1832 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1833
1834 /* Return a function equivalent to FN but operating on floating-point
1835 values of type TYPE, or END_BUILTINS if no such function exists.
1836 This is purely an operation on function codes; it does not guarantee
1837 that the target actually has an implementation of the function. */
1838
1839 static built_in_function
1840 mathfn_built_in_2 (tree type, combined_fn fn)
1841 {
1842 built_in_function fcode, fcodef, fcodel;
1843
1844 switch (fn)
1845 {
1846 CASE_MATHFN (ACOS)
1847 CASE_MATHFN (ACOSH)
1848 CASE_MATHFN (ASIN)
1849 CASE_MATHFN (ASINH)
1850 CASE_MATHFN (ATAN)
1851 CASE_MATHFN (ATAN2)
1852 CASE_MATHFN (ATANH)
1853 CASE_MATHFN (CBRT)
1854 CASE_MATHFN (CEIL)
1855 CASE_MATHFN (CEXPI)
1856 CASE_MATHFN (COPYSIGN)
1857 CASE_MATHFN (COS)
1858 CASE_MATHFN (COSH)
1859 CASE_MATHFN (DREM)
1860 CASE_MATHFN (ERF)
1861 CASE_MATHFN (ERFC)
1862 CASE_MATHFN (EXP)
1863 CASE_MATHFN (EXP10)
1864 CASE_MATHFN (EXP2)
1865 CASE_MATHFN (EXPM1)
1866 CASE_MATHFN (FABS)
1867 CASE_MATHFN (FDIM)
1868 CASE_MATHFN (FLOOR)
1869 CASE_MATHFN (FMA)
1870 CASE_MATHFN (FMAX)
1871 CASE_MATHFN (FMIN)
1872 CASE_MATHFN (FMOD)
1873 CASE_MATHFN (FREXP)
1874 CASE_MATHFN (GAMMA)
1875 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1876 CASE_MATHFN (HUGE_VAL)
1877 CASE_MATHFN (HYPOT)
1878 CASE_MATHFN (ILOGB)
1879 CASE_MATHFN (ICEIL)
1880 CASE_MATHFN (IFLOOR)
1881 CASE_MATHFN (INF)
1882 CASE_MATHFN (IRINT)
1883 CASE_MATHFN (IROUND)
1884 CASE_MATHFN (ISINF)
1885 CASE_MATHFN (J0)
1886 CASE_MATHFN (J1)
1887 CASE_MATHFN (JN)
1888 CASE_MATHFN (LCEIL)
1889 CASE_MATHFN (LDEXP)
1890 CASE_MATHFN (LFLOOR)
1891 CASE_MATHFN (LGAMMA)
1892 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1893 CASE_MATHFN (LLCEIL)
1894 CASE_MATHFN (LLFLOOR)
1895 CASE_MATHFN (LLRINT)
1896 CASE_MATHFN (LLROUND)
1897 CASE_MATHFN (LOG)
1898 CASE_MATHFN (LOG10)
1899 CASE_MATHFN (LOG1P)
1900 CASE_MATHFN (LOG2)
1901 CASE_MATHFN (LOGB)
1902 CASE_MATHFN (LRINT)
1903 CASE_MATHFN (LROUND)
1904 CASE_MATHFN (MODF)
1905 CASE_MATHFN (NAN)
1906 CASE_MATHFN (NANS)
1907 CASE_MATHFN (NEARBYINT)
1908 CASE_MATHFN (NEXTAFTER)
1909 CASE_MATHFN (NEXTTOWARD)
1910 CASE_MATHFN (POW)
1911 CASE_MATHFN (POWI)
1912 CASE_MATHFN (POW10)
1913 CASE_MATHFN (REMAINDER)
1914 CASE_MATHFN (REMQUO)
1915 CASE_MATHFN (RINT)
1916 CASE_MATHFN (ROUND)
1917 CASE_MATHFN (SCALB)
1918 CASE_MATHFN (SCALBLN)
1919 CASE_MATHFN (SCALBN)
1920 CASE_MATHFN (SIGNBIT)
1921 CASE_MATHFN (SIGNIFICAND)
1922 CASE_MATHFN (SIN)
1923 CASE_MATHFN (SINCOS)
1924 CASE_MATHFN (SINH)
1925 CASE_MATHFN (SQRT)
1926 CASE_MATHFN (TAN)
1927 CASE_MATHFN (TANH)
1928 CASE_MATHFN (TGAMMA)
1929 CASE_MATHFN (TRUNC)
1930 CASE_MATHFN (Y0)
1931 CASE_MATHFN (Y1)
1932 CASE_MATHFN (YN)
1933
1934 default:
1935 return END_BUILTINS;
1936 }
1937
1938 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1939 return fcode;
1940 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1941 return fcodef;
1942 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1943 return fcodel;
1944 else
1945 return END_BUILTINS;
1946 }
1947
1948 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1949 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1950 otherwise use the explicit declaration. If we can't do the conversion,
1951 return null. */
1952
1953 static tree
1954 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1955 {
1956 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1957 if (fcode2 == END_BUILTINS)
1958 return NULL_TREE;
1959
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1962
1963 return builtin_decl_explicit (fcode2);
1964 }
1965
1966 /* Like mathfn_built_in_1, but always use the implicit array. */
1967
1968 tree
1969 mathfn_built_in (tree type, combined_fn fn)
1970 {
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1972 }
1973
1974 /* Like mathfn_built_in_1, but take a built_in_function and
1975 always use the implicit array. */
1976
1977 tree
1978 mathfn_built_in (tree type, enum built_in_function fn)
1979 {
1980 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1981 }
1982
1983 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1984 return its code, otherwise return IFN_LAST. Note that this function
1985 only tests whether the function is defined in internals.def, not whether
1986 it is actually available on the target. */
1987
1988 internal_fn
1989 associated_internal_fn (tree fndecl)
1990 {
1991 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1992 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1997 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1998 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1999 #include "internal-fn.def"
2000
2001 CASE_FLT_FN (BUILT_IN_POW10):
2002 return IFN_EXP10;
2003
2004 CASE_FLT_FN (BUILT_IN_DREM):
2005 return IFN_REMAINDER;
2006
2007 CASE_FLT_FN (BUILT_IN_SCALBN):
2008 CASE_FLT_FN (BUILT_IN_SCALBLN):
2009 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2010 return IFN_LDEXP;
2011 return IFN_LAST;
2012
2013 default:
2014 return IFN_LAST;
2015 }
2016 }
2017
2018 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2019 on the current target by a call to an internal function, return the
2020 code of that internal function, otherwise return IFN_LAST. The caller
2021 is responsible for ensuring that any side-effects of the built-in
2022 call are dealt with correctly. E.g. if CALL sets errno, the caller
2023 must decide that the errno result isn't needed or make it available
2024 in some other way. */
2025
2026 internal_fn
2027 replacement_internal_fn (gcall *call)
2028 {
2029 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2030 {
2031 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2032 if (ifn != IFN_LAST)
2033 {
2034 tree_pair types = direct_internal_fn_types (ifn, call);
2035 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2036 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2037 return ifn;
2038 }
2039 }
2040 return IFN_LAST;
2041 }
2042
2043 /* Expand a call to the builtin trinary math functions (fma).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2049
2050 static rtx
2051 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2052 {
2053 optab builtin_optab;
2054 rtx op0, op1, op2, result;
2055 rtx_insn *insns;
2056 tree fndecl = get_callee_fndecl (exp);
2057 tree arg0, arg1, arg2;
2058 machine_mode mode;
2059
2060 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2061 return NULL_RTX;
2062
2063 arg0 = CALL_EXPR_ARG (exp, 0);
2064 arg1 = CALL_EXPR_ARG (exp, 1);
2065 arg2 = CALL_EXPR_ARG (exp, 2);
2066
2067 switch (DECL_FUNCTION_CODE (fndecl))
2068 {
2069 CASE_FLT_FN (BUILT_IN_FMA):
2070 builtin_optab = fma_optab; break;
2071 default:
2072 gcc_unreachable ();
2073 }
2074
2075 /* Make a suitable register to place result in. */
2076 mode = TYPE_MODE (TREE_TYPE (exp));
2077
2078 /* Before working hard, check whether the instruction is available. */
2079 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2080 return NULL_RTX;
2081
2082 result = gen_reg_rtx (mode);
2083
2084 /* Always stabilize the argument list. */
2085 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2086 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2087 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2088
2089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2090 op1 = expand_normal (arg1);
2091 op2 = expand_normal (arg2);
2092
2093 start_sequence ();
2094
2095 /* Compute into RESULT.
2096 Set RESULT to wherever the result comes back. */
2097 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2098 result, 0);
2099
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
2103 if (result == 0)
2104 {
2105 end_sequence ();
2106 return expand_call (exp, target, target == const0_rtx);
2107 }
2108
2109 /* Output the entire sequence. */
2110 insns = get_insns ();
2111 end_sequence ();
2112 emit_insn (insns);
2113
2114 return result;
2115 }
2116
2117 /* Expand a call to the builtin sin and cos math functions.
2118 Return NULL_RTX if a normal call should be emitted rather than expanding the
2119 function in-line. EXP is the expression that is a call to the builtin
2120 function; if convenient, the result should be placed in TARGET.
2121 SUBTARGET may be used as the target for computing one of EXP's
2122 operands. */
2123
2124 static rtx
2125 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2126 {
2127 optab builtin_optab;
2128 rtx op0;
2129 rtx_insn *insns;
2130 tree fndecl = get_callee_fndecl (exp);
2131 machine_mode mode;
2132 tree arg;
2133
2134 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2135 return NULL_RTX;
2136
2137 arg = CALL_EXPR_ARG (exp, 0);
2138
2139 switch (DECL_FUNCTION_CODE (fndecl))
2140 {
2141 CASE_FLT_FN (BUILT_IN_SIN):
2142 CASE_FLT_FN (BUILT_IN_COS):
2143 builtin_optab = sincos_optab; break;
2144 default:
2145 gcc_unreachable ();
2146 }
2147
2148 /* Make a suitable register to place result in. */
2149 mode = TYPE_MODE (TREE_TYPE (exp));
2150
2151 /* Check if sincos insn is available, otherwise fallback
2152 to sin or cos insn. */
2153 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SIN):
2157 builtin_optab = sin_optab; break;
2158 CASE_FLT_FN (BUILT_IN_COS):
2159 builtin_optab = cos_optab; break;
2160 default:
2161 gcc_unreachable ();
2162 }
2163
2164 /* Before working hard, check whether the instruction is available. */
2165 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2166 {
2167 rtx result = gen_reg_rtx (mode);
2168
2169 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2170 need to expand the argument again. This way, we will not perform
2171 side-effects more the once. */
2172 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2173
2174 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2175
2176 start_sequence ();
2177
2178 /* Compute into RESULT.
2179 Set RESULT to wherever the result comes back. */
2180 if (builtin_optab == sincos_optab)
2181 {
2182 int ok;
2183
2184 switch (DECL_FUNCTION_CODE (fndecl))
2185 {
2186 CASE_FLT_FN (BUILT_IN_SIN):
2187 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2188 break;
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2191 break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195 gcc_assert (ok);
2196 }
2197 else
2198 result = expand_unop (mode, builtin_optab, op0, result, 0);
2199
2200 if (result != 0)
2201 {
2202 /* Output the entire sequence. */
2203 insns = get_insns ();
2204 end_sequence ();
2205 emit_insn (insns);
2206 return result;
2207 }
2208
2209 /* If we were unable to expand via the builtin, stop the sequence
2210 (without outputting the insns) and call to the library function
2211 with the stabilized argument list. */
2212 end_sequence ();
2213 }
2214
2215 return expand_call (exp, target, target == const0_rtx);
2216 }
2217
2218 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2219 return an RTL instruction code that implements the functionality.
2220 If that isn't possible or available return CODE_FOR_nothing. */
2221
2222 static enum insn_code
2223 interclass_mathfn_icode (tree arg, tree fndecl)
2224 {
2225 bool errno_set = false;
2226 optab builtin_optab = unknown_optab;
2227 machine_mode mode;
2228
2229 switch (DECL_FUNCTION_CODE (fndecl))
2230 {
2231 CASE_FLT_FN (BUILT_IN_ILOGB):
2232 errno_set = true; builtin_optab = ilogb_optab; break;
2233 CASE_FLT_FN (BUILT_IN_ISINF):
2234 builtin_optab = isinf_optab; break;
2235 case BUILT_IN_ISNORMAL:
2236 case BUILT_IN_ISFINITE:
2237 CASE_FLT_FN (BUILT_IN_FINITE):
2238 case BUILT_IN_FINITED32:
2239 case BUILT_IN_FINITED64:
2240 case BUILT_IN_FINITED128:
2241 case BUILT_IN_ISINFD32:
2242 case BUILT_IN_ISINFD64:
2243 case BUILT_IN_ISINFD128:
2244 /* These builtins have no optabs (yet). */
2245 break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* There's no easy way to detect the case we need to set EDOM. */
2251 if (flag_errno_math && errno_set)
2252 return CODE_FOR_nothing;
2253
2254 /* Optab mode depends on the mode of the input argument. */
2255 mode = TYPE_MODE (TREE_TYPE (arg));
2256
2257 if (builtin_optab)
2258 return optab_handler (builtin_optab, mode);
2259 return CODE_FOR_nothing;
2260 }
2261
2262 /* Expand a call to one of the builtin math functions that operate on
2263 floating point argument and output an integer result (ilogb, isinf,
2264 isnan, etc).
2265 Return 0 if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET. */
2268
2269 static rtx
2270 expand_builtin_interclass_mathfn (tree exp, rtx target)
2271 {
2272 enum insn_code icode = CODE_FOR_nothing;
2273 rtx op0;
2274 tree fndecl = get_callee_fndecl (exp);
2275 machine_mode mode;
2276 tree arg;
2277
2278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2280
2281 arg = CALL_EXPR_ARG (exp, 0);
2282 icode = interclass_mathfn_icode (arg, fndecl);
2283 mode = TYPE_MODE (TREE_TYPE (arg));
2284
2285 if (icode != CODE_FOR_nothing)
2286 {
2287 struct expand_operand ops[1];
2288 rtx_insn *last = get_last_insn ();
2289 tree orig_arg = arg;
2290
2291 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2292 need to expand the argument again. This way, we will not perform
2293 side-effects more the once. */
2294 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2295
2296 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2297
2298 if (mode != GET_MODE (op0))
2299 op0 = convert_to_mode (mode, op0, 0);
2300
2301 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2302 if (maybe_legitimize_operands (icode, 0, 1, ops)
2303 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2304 return ops[0].value;
2305
2306 delete_insns_since (last);
2307 CALL_EXPR_ARG (exp, 0) = orig_arg;
2308 }
2309
2310 return NULL_RTX;
2311 }
2312
2313 /* Expand a call to the builtin sincos math function.
2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
2315 function in-line. EXP is the expression that is a call to the builtin
2316 function. */
2317
2318 static rtx
2319 expand_builtin_sincos (tree exp)
2320 {
2321 rtx op0, op1, op2, target1, target2;
2322 machine_mode mode;
2323 tree arg, sinp, cosp;
2324 int result;
2325 location_t loc = EXPR_LOCATION (exp);
2326 tree alias_type, alias_off;
2327
2328 if (!validate_arglist (exp, REAL_TYPE,
2329 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2330 return NULL_RTX;
2331
2332 arg = CALL_EXPR_ARG (exp, 0);
2333 sinp = CALL_EXPR_ARG (exp, 1);
2334 cosp = CALL_EXPR_ARG (exp, 2);
2335
2336 /* Make a suitable register to place result in. */
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2338
2339 /* Check if sincos insn is available, otherwise emit the call. */
2340 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2341 return NULL_RTX;
2342
2343 target1 = gen_reg_rtx (mode);
2344 target2 = gen_reg_rtx (mode);
2345
2346 op0 = expand_normal (arg);
2347 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2348 alias_off = build_int_cst (alias_type, 0);
2349 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 sinp, alias_off));
2351 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2352 cosp, alias_off));
2353
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2357 gcc_assert (result);
2358
2359 /* Move target1 and target2 to the memory locations indicated
2360 by op1 and op2. */
2361 emit_move_insn (op1, target1);
2362 emit_move_insn (op2, target2);
2363
2364 return const0_rtx;
2365 }
2366
2367 /* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
2369 the result should be placed in TARGET. */
2370
2371 static rtx
2372 expand_builtin_cexpi (tree exp, rtx target)
2373 {
2374 tree fndecl = get_callee_fndecl (exp);
2375 tree arg, type;
2376 machine_mode mode;
2377 rtx op0, op1, op2;
2378 location_t loc = EXPR_LOCATION (exp);
2379
2380 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2381 return NULL_RTX;
2382
2383 arg = CALL_EXPR_ARG (exp, 0);
2384 type = TREE_TYPE (arg);
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2386
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
2390 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2391 {
2392 op1 = gen_reg_rtx (mode);
2393 op2 = gen_reg_rtx (mode);
2394
2395 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2396
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2399 }
2400 else if (targetm.libc_has_function (function_sincos))
2401 {
2402 tree call, fn = NULL_TREE;
2403 tree top1, top2;
2404 rtx op1a, op2a;
2405
2406 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2407 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2409 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2410 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2411 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2412 else
2413 gcc_unreachable ();
2414
2415 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2416 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2417 op1a = copy_addr_to_reg (XEXP (op1, 0));
2418 op2a = copy_addr_to_reg (XEXP (op2, 0));
2419 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2420 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2421
2422 /* Make sure not to fold the sincos call again. */
2423 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2425 call, 3, arg, top1, top2));
2426 }
2427 else
2428 {
2429 tree call, fn = NULL_TREE, narg;
2430 tree ctype = build_complex_type (type);
2431
2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2433 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2435 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2437 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2438 else
2439 gcc_unreachable ();
2440
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn == NULL_TREE)
2445 {
2446 tree fntype;
2447 const char *name = NULL;
2448
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 name = "cexpf";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 name = "cexp";
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 name = "cexpl";
2455
2456 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2457 fn = build_fn_decl (name, fntype);
2458 }
2459
2460 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2461 build_real (type, dconst0), arg);
2462
2463 /* Make sure not to fold the cexp call again. */
2464 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2465 return expand_expr (build_call_nary (ctype, call, 1, narg),
2466 target, VOIDmode, EXPAND_NORMAL);
2467 }
2468
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2471 make_tree (TREE_TYPE (arg), op2),
2472 make_tree (TREE_TYPE (arg), op1)),
2473 target, VOIDmode, EXPAND_NORMAL);
2474 }
2475
2476 /* Conveniently construct a function call expression. FNDECL names the
2477 function to be called, N is the number of arguments, and the "..."
2478 parameters are the argument expressions. Unlike build_call_exr
2479 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2480
2481 static tree
2482 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2483 {
2484 va_list ap;
2485 tree fntype = TREE_TYPE (fndecl);
2486 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2487
2488 va_start (ap, n);
2489 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2490 va_end (ap);
2491 SET_EXPR_LOCATION (fn, loc);
2492 return fn;
2493 }
2494
2495 /* Expand a call to one of the builtin rounding functions gcc defines
2496 as an extension (lfloor and lceil). As these are gcc extensions we
2497 do not need to worry about setting errno to EDOM.
2498 If expanding via optab fails, lower expression to (int)(floor(x)).
2499 EXP is the expression that is a call to the builtin function;
2500 if convenient, the result should be placed in TARGET. */
2501
2502 static rtx
2503 expand_builtin_int_roundingfn (tree exp, rtx target)
2504 {
2505 convert_optab builtin_optab;
2506 rtx op0, tmp;
2507 rtx_insn *insns;
2508 tree fndecl = get_callee_fndecl (exp);
2509 enum built_in_function fallback_fn;
2510 tree fallback_fndecl;
2511 machine_mode mode;
2512 tree arg;
2513
2514 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2515 gcc_unreachable ();
2516
2517 arg = CALL_EXPR_ARG (exp, 0);
2518
2519 switch (DECL_FUNCTION_CODE (fndecl))
2520 {
2521 CASE_FLT_FN (BUILT_IN_ICEIL):
2522 CASE_FLT_FN (BUILT_IN_LCEIL):
2523 CASE_FLT_FN (BUILT_IN_LLCEIL):
2524 builtin_optab = lceil_optab;
2525 fallback_fn = BUILT_IN_CEIL;
2526 break;
2527
2528 CASE_FLT_FN (BUILT_IN_IFLOOR):
2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2533 break;
2534
2535 default:
2536 gcc_unreachable ();
2537 }
2538
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2541
2542 target = gen_reg_rtx (mode);
2543
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2548
2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2550
2551 start_sequence ();
2552
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2555 {
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
2558 end_sequence ();
2559 emit_insn (insns);
2560 return target;
2561 }
2562
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2565 end_sequence ();
2566
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2569
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2575 {
2576 tree fntype;
2577 const char *name = NULL;
2578
2579 switch (DECL_FUNCTION_CODE (fndecl))
2580 {
2581 case BUILT_IN_ICEIL:
2582 case BUILT_IN_LCEIL:
2583 case BUILT_IN_LLCEIL:
2584 name = "ceil";
2585 break;
2586 case BUILT_IN_ICEILF:
2587 case BUILT_IN_LCEILF:
2588 case BUILT_IN_LLCEILF:
2589 name = "ceilf";
2590 break;
2591 case BUILT_IN_ICEILL:
2592 case BUILT_IN_LCEILL:
2593 case BUILT_IN_LLCEILL:
2594 name = "ceill";
2595 break;
2596 case BUILT_IN_IFLOOR:
2597 case BUILT_IN_LFLOOR:
2598 case BUILT_IN_LLFLOOR:
2599 name = "floor";
2600 break;
2601 case BUILT_IN_IFLOORF:
2602 case BUILT_IN_LFLOORF:
2603 case BUILT_IN_LLFLOORF:
2604 name = "floorf";
2605 break;
2606 case BUILT_IN_IFLOORL:
2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2609 name = "floorl";
2610 break;
2611 default:
2612 gcc_unreachable ();
2613 }
2614
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2618 }
2619
2620 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2621
2622 tmp = expand_normal (exp);
2623 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2624
2625 /* Truncate the result of floating point optab to integer
2626 via expand_fix (). */
2627 target = gen_reg_rtx (mode);
2628 expand_fix (target, tmp, 0);
2629
2630 return target;
2631 }
2632
2633 /* Expand a call to one of the builtin math functions doing integer
2634 conversion (lrint).
2635 Return 0 if a normal call should be emitted rather than expanding the
2636 function in-line. EXP is the expression that is a call to the builtin
2637 function; if convenient, the result should be placed in TARGET. */
2638
2639 static rtx
2640 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2641 {
2642 convert_optab builtin_optab;
2643 rtx op0;
2644 rtx_insn *insns;
2645 tree fndecl = get_callee_fndecl (exp);
2646 tree arg;
2647 machine_mode mode;
2648 enum built_in_function fallback_fn = BUILT_IN_NONE;
2649
2650 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2651 gcc_unreachable ();
2652
2653 arg = CALL_EXPR_ARG (exp, 0);
2654
2655 switch (DECL_FUNCTION_CODE (fndecl))
2656 {
2657 CASE_FLT_FN (BUILT_IN_IRINT):
2658 fallback_fn = BUILT_IN_LRINT;
2659 gcc_fallthrough ();
2660 CASE_FLT_FN (BUILT_IN_LRINT):
2661 CASE_FLT_FN (BUILT_IN_LLRINT):
2662 builtin_optab = lrint_optab;
2663 break;
2664
2665 CASE_FLT_FN (BUILT_IN_IROUND):
2666 fallback_fn = BUILT_IN_LROUND;
2667 gcc_fallthrough ();
2668 CASE_FLT_FN (BUILT_IN_LROUND):
2669 CASE_FLT_FN (BUILT_IN_LLROUND):
2670 builtin_optab = lround_optab;
2671 break;
2672
2673 default:
2674 gcc_unreachable ();
2675 }
2676
2677 /* There's no easy way to detect the case we need to set EDOM. */
2678 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2679 return NULL_RTX;
2680
2681 /* Make a suitable register to place result in. */
2682 mode = TYPE_MODE (TREE_TYPE (exp));
2683
2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (!flag_errno_math)
2686 {
2687 rtx result = gen_reg_rtx (mode);
2688
2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2693
2694 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2695
2696 start_sequence ();
2697
2698 if (expand_sfix_optab (result, op0, builtin_optab))
2699 {
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2702 end_sequence ();
2703 emit_insn (insns);
2704 return result;
2705 }
2706
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
2710 end_sequence ();
2711 }
2712
2713 if (fallback_fn != BUILT_IN_NONE)
2714 {
2715 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2716 targets, (int) round (x) should never be transformed into
2717 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2718 a call to lround in the hope that the target provides at least some
2719 C99 functions. This should result in the best user experience for
2720 not full C99 targets. */
2721 tree fallback_fndecl = mathfn_built_in_1
2722 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2723
2724 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2725 fallback_fndecl, 1, arg);
2726
2727 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2728 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2729 return convert_to_mode (mode, target, 0);
2730 }
2731
2732 return expand_call (exp, target, target == const0_rtx);
2733 }
2734
2735 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2736 a normal call should be emitted rather than expanding the function
2737 in-line. EXP is the expression that is a call to the builtin
2738 function; if convenient, the result should be placed in TARGET. */
2739
2740 static rtx
2741 expand_builtin_powi (tree exp, rtx target)
2742 {
2743 tree arg0, arg1;
2744 rtx op0, op1;
2745 machine_mode mode;
2746 machine_mode mode2;
2747
2748 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2749 return NULL_RTX;
2750
2751 arg0 = CALL_EXPR_ARG (exp, 0);
2752 arg1 = CALL_EXPR_ARG (exp, 1);
2753 mode = TYPE_MODE (TREE_TYPE (exp));
2754
2755 /* Emit a libcall to libgcc. */
2756
2757 /* Mode of the 2nd argument must match that of an int. */
2758 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2759
2760 if (target == NULL_RTX)
2761 target = gen_reg_rtx (mode);
2762
2763 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2764 if (GET_MODE (op0) != mode)
2765 op0 = convert_to_mode (mode, op0, 0);
2766 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2767 if (GET_MODE (op1) != mode2)
2768 op1 = convert_to_mode (mode2, op1, 0);
2769
2770 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2771 target, LCT_CONST, mode, 2,
2772 op0, mode, op1, mode2);
2773
2774 return target;
2775 }
2776
2777 /* Expand expression EXP which is a call to the strlen builtin. Return
2778 NULL_RTX if we failed the caller should emit a normal call, otherwise
2779 try to get the result in TARGET, if convenient. */
2780
2781 static rtx
2782 expand_builtin_strlen (tree exp, rtx target,
2783 machine_mode target_mode)
2784 {
2785 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2786 return NULL_RTX;
2787 else
2788 {
2789 struct expand_operand ops[4];
2790 rtx pat;
2791 tree len;
2792 tree src = CALL_EXPR_ARG (exp, 0);
2793 rtx src_reg;
2794 rtx_insn *before_strlen;
2795 machine_mode insn_mode = target_mode;
2796 enum insn_code icode = CODE_FOR_nothing;
2797 unsigned int align;
2798
2799 /* If the length can be computed at compile-time, return it. */
2800 len = c_strlen (src, 0);
2801 if (len)
2802 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2803
2804 /* If the length can be computed at compile-time and is constant
2805 integer, but there are side-effects in src, evaluate
2806 src for side-effects, then return len.
2807 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2808 can be optimized into: i++; x = 3; */
2809 len = c_strlen (src, 1);
2810 if (len && TREE_CODE (len) == INTEGER_CST)
2811 {
2812 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2813 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2814 }
2815
2816 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2817
2818 /* If SRC is not a pointer type, don't do this operation inline. */
2819 if (align == 0)
2820 return NULL_RTX;
2821
2822 /* Bail out if we can't compute strlen in the right mode. */
2823 while (insn_mode != VOIDmode)
2824 {
2825 icode = optab_handler (strlen_optab, insn_mode);
2826 if (icode != CODE_FOR_nothing)
2827 break;
2828
2829 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2830 }
2831 if (insn_mode == VOIDmode)
2832 return NULL_RTX;
2833
2834 /* Make a place to hold the source address. We will not expand
2835 the actual source until we are sure that the expansion will
2836 not fail -- there are trees that cannot be expanded twice. */
2837 src_reg = gen_reg_rtx (Pmode);
2838
2839 /* Mark the beginning of the strlen sequence so we can emit the
2840 source operand later. */
2841 before_strlen = get_last_insn ();
2842
2843 create_output_operand (&ops[0], target, insn_mode);
2844 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2845 create_integer_operand (&ops[2], 0);
2846 create_integer_operand (&ops[3], align);
2847 if (!maybe_expand_insn (icode, 4, ops))
2848 return NULL_RTX;
2849
2850 /* Now that we are assured of success, expand the source. */
2851 start_sequence ();
2852 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2853 if (pat != src_reg)
2854 {
2855 #ifdef POINTERS_EXTEND_UNSIGNED
2856 if (GET_MODE (pat) != Pmode)
2857 pat = convert_to_mode (Pmode, pat,
2858 POINTERS_EXTEND_UNSIGNED);
2859 #endif
2860 emit_move_insn (src_reg, pat);
2861 }
2862 pat = get_insns ();
2863 end_sequence ();
2864
2865 if (before_strlen)
2866 emit_insn_after (pat, before_strlen);
2867 else
2868 emit_insn_before (pat, get_insns ());
2869
2870 /* Return the value in the proper mode for this function. */
2871 if (GET_MODE (ops[0].value) == target_mode)
2872 target = ops[0].value;
2873 else if (target != 0)
2874 convert_move (target, ops[0].value, 0);
2875 else
2876 target = convert_to_mode (target_mode, ops[0].value, 0);
2877
2878 return target;
2879 }
2880 }
2881
2882 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2883 bytes from constant string DATA + OFFSET and return it as target
2884 constant. */
2885
2886 static rtx
2887 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2888 machine_mode mode)
2889 {
2890 const char *str = (const char *) data;
2891
2892 gcc_assert (offset >= 0
2893 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2894 <= strlen (str) + 1));
2895
2896 return c_readstr (str + offset, mode);
2897 }
2898
2899 /* LEN specify length of the block of memcpy/memset operation.
2900 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2901 In some cases we can make very likely guess on max size, then we
2902 set it into PROBABLE_MAX_SIZE. */
2903
2904 static void
2905 determine_block_size (tree len, rtx len_rtx,
2906 unsigned HOST_WIDE_INT *min_size,
2907 unsigned HOST_WIDE_INT *max_size,
2908 unsigned HOST_WIDE_INT *probable_max_size)
2909 {
2910 if (CONST_INT_P (len_rtx))
2911 {
2912 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2913 return;
2914 }
2915 else
2916 {
2917 wide_int min, max;
2918 enum value_range_type range_type = VR_UNDEFINED;
2919
2920 /* Determine bounds from the type. */
2921 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2922 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2923 else
2924 *min_size = 0;
2925 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2926 *probable_max_size = *max_size
2927 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2928 else
2929 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2930
2931 if (TREE_CODE (len) == SSA_NAME)
2932 range_type = get_range_info (len, &min, &max);
2933 if (range_type == VR_RANGE)
2934 {
2935 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2936 *min_size = min.to_uhwi ();
2937 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2938 *probable_max_size = *max_size = max.to_uhwi ();
2939 }
2940 else if (range_type == VR_ANTI_RANGE)
2941 {
2942 /* Anti range 0...N lets us to determine minimal size to N+1. */
2943 if (min == 0)
2944 {
2945 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2946 *min_size = max.to_uhwi () + 1;
2947 }
2948 /* Code like
2949
2950 int n;
2951 if (n < 100)
2952 memcpy (a, b, n)
2953
2954 Produce anti range allowing negative values of N. We still
2955 can use the information and make a guess that N is not negative.
2956 */
2957 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2958 *probable_max_size = min.to_uhwi () - 1;
2959 }
2960 }
2961 gcc_checking_assert (*max_size <=
2962 (unsigned HOST_WIDE_INT)
2963 GET_MODE_MASK (GET_MODE (len_rtx)));
2964 }
2965
2966 /* Try to verify that the sizes and lengths of the arguments to a string
2967 manipulation function given by EXP are within valid bounds and that
2968 the operation does not lead to buffer overflow. Arguments other than
2969 EXP may be null. When non-null, the arguments have the following
2970 meaning:
2971 SIZE is the user-supplied size argument to the function (such as in
2972 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2973 number of bytes to write.
2974 MAXLEN is the user-supplied bound on the length of the source sequence
2975 (such as in strncat(d, s, N). It specifies the upper limit on the number
2976 of bytes to write.
2977 SRC is the source string (such as in strcpy(d, s)) when the expression
2978 EXP is a string function call (as opposed to a memory call like memcpy).
2979 As an exception, SRC can also be an integer denoting the precomputed
2980 size of the source string or object (for functions like memcpy).
2981 OBJSIZE is the size of the destination object specified by the last
2982 argument to the _chk builtins, typically resulting from the expansion
2983 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2984 OBJSIZE).
2985
2986 When SIZE is null LEN is checked to verify that it doesn't exceed
2987 SIZE_MAX.
2988
2989 If the call is successfully verified as safe from buffer overflow
2990 the function returns true, otherwise false.. */
2991
2992 static bool
2993 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
2994 {
2995 /* The size of the largest object is half the address space, or
2996 SSIZE_MAX. (This is way too permissive.) */
2997 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
2998
2999 tree slen = NULL_TREE;
3000
3001 tree range[2] = { NULL_TREE, NULL_TREE };
3002
3003 /* Set to true when the exact number of bytes written by a string
3004 function like strcpy is not known and the only thing that is
3005 known is that it must be at least one (for the terminating nul). */
3006 bool at_least_one = false;
3007 if (src)
3008 {
3009 /* SRC is normally a pointer to string but as a special case
3010 it can be an integer denoting the length of a string. */
3011 if (POINTER_TYPE_P (TREE_TYPE (src)))
3012 {
3013 /* Try to determine the range of lengths the source string
3014 refers to. If it can be determined and is less than
3015 the upper bound given by MAXLEN add one to it for
3016 the terminating nul. Otherwise, set it to one for
3017 the same reason, or to MAXLEN as appropriate. */
3018 get_range_strlen (src, range);
3019 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3020 {
3021 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3022 range[0] = range[1] = maxlen;
3023 else
3024 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3025 range[0], size_one_node);
3026
3027 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3028 range[1] = maxlen;
3029 else if (!integer_all_onesp (range[1]))
3030 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3031 range[1], size_one_node);
3032
3033 slen = range[0];
3034 }
3035 else
3036 {
3037 at_least_one = true;
3038 slen = size_one_node;
3039 }
3040 }
3041 else
3042 slen = src;
3043 }
3044
3045 if (!size && !maxlen)
3046 {
3047 /* When the only available piece of data is the object size
3048 there is nothing to do. */
3049 if (!slen)
3050 return true;
3051
3052 /* Otherwise, when the length of the source sequence is known
3053 (as with with strlen), set SIZE to it. */
3054 if (!range[0])
3055 size = slen;
3056 }
3057
3058 if (!objsize)
3059 objsize = maxobjsize;
3060
3061 /* The SIZE is exact if it's non-null, constant, and in range of
3062 unsigned HOST_WIDE_INT. */
3063 bool exactsize = size && tree_fits_uhwi_p (size);
3064
3065 if (size)
3066 get_size_range (size, range);
3067
3068 /* First check the number of bytes to be written against the maximum
3069 object size. */
3070 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3071 {
3072 location_t loc = tree_nonartificial_location (exp);
3073 loc = expansion_point_location_if_in_system_header (loc);
3074
3075 if (range[0] == range[1])
3076 warning_at (loc, opt,
3077 "%K%qD specified size %E "
3078 "exceeds maximum object size %E",
3079 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3080 else
3081 warning_at (loc, opt,
3082 "%K%qD specified size between %E and %E "
3083 "exceeds maximum object size %E",
3084 exp, get_callee_fndecl (exp),
3085 range[0], range[1], maxobjsize);
3086 return false;
3087 }
3088
3089 /* Next check the number of bytes to be written against the destination
3090 object size. */
3091 if (range[0] || !exactsize || integer_all_onesp (size))
3092 {
3093 if (range[0]
3094 && ((tree_fits_uhwi_p (objsize)
3095 && tree_int_cst_lt (objsize, range[0]))
3096 || (tree_fits_uhwi_p (size)
3097 && tree_int_cst_lt (size, range[0]))))
3098 {
3099 location_t loc = tree_nonartificial_location (exp);
3100 loc = expansion_point_location_if_in_system_header (loc);
3101
3102 if (size == slen && at_least_one)
3103 {
3104 /* This is a call to strcpy with a destination of 0 size
3105 and a source of unknown length. The call will write
3106 at least one byte past the end of the destination. */
3107 warning_at (loc, opt,
3108 "%K%qD writing %E or more bytes into a region "
3109 "of size %E overflows the destination",
3110 exp, get_callee_fndecl (exp), range[0], objsize);
3111 }
3112 else if (tree_int_cst_equal (range[0], range[1]))
3113 warning_at (loc, opt,
3114 (integer_onep (range[0])
3115 ? G_("%K%qD writing %E byte into a region "
3116 "of size %E overflows the destination")
3117 : G_("%K%qD writing %E bytes into a region "
3118 "of size %E overflows the destination")),
3119 exp, get_callee_fndecl (exp), range[0], objsize);
3120 else if (tree_int_cst_sign_bit (range[1]))
3121 {
3122 /* Avoid printing the upper bound if it's invalid. */
3123 warning_at (loc, opt,
3124 "%K%qD writing %E or more bytes into a region "
3125 "of size %E overflows the destination",
3126 exp, get_callee_fndecl (exp), range[0], objsize);
3127 }
3128 else
3129 warning_at (loc, opt,
3130 "%K%qD writing between %E and %E bytes into "
3131 "a region of size %E overflows the destination",
3132 exp, get_callee_fndecl (exp), range[0], range[1],
3133 objsize);
3134
3135 /* Return error when an overflow has been detected. */
3136 return false;
3137 }
3138 }
3139
3140 /* Check the maximum length of the source sequence against the size
3141 of the destination object if known, or against the maximum size
3142 of an object. */
3143 if (maxlen)
3144 {
3145 get_size_range (maxlen, range);
3146
3147 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3148 {
3149 location_t loc = tree_nonartificial_location (exp);
3150 loc = expansion_point_location_if_in_system_header (loc);
3151
3152 if (tree_int_cst_lt (maxobjsize, range[0]))
3153 {
3154 /* Warn about crazy big sizes first since that's more
3155 likely to be meaningful than saying that the bound
3156 is greater than the object size if both are big. */
3157 if (range[0] == range[1])
3158 warning_at (loc, opt,
3159 "%K%qD specified bound %E "
3160 "exceeds maximum object size %E",
3161 exp, get_callee_fndecl (exp),
3162 range[0], maxobjsize);
3163 else
3164 warning_at (loc, opt,
3165 "%K%qD specified bound between %E and %E "
3166 "exceeds maximum object size %E",
3167 exp, get_callee_fndecl (exp),
3168 range[0], range[1], maxobjsize);
3169
3170 return false;
3171 }
3172
3173 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3174 {
3175 if (tree_int_cst_equal (range[0], range[1]))
3176 warning_at (loc, opt,
3177 "%K%qD specified bound %E "
3178 "exceeds destination size %E",
3179 exp, get_callee_fndecl (exp),
3180 range[0], objsize);
3181 else
3182 warning_at (loc, opt,
3183 "%K%qD specified bound between %E and %E "
3184 "exceeds destination size %E",
3185 exp, get_callee_fndecl (exp),
3186 range[0], range[1], objsize);
3187 return false;
3188 }
3189 }
3190 }
3191
3192 if (slen
3193 && slen == src
3194 && size && range[0]
3195 && tree_int_cst_lt (slen, range[0]))
3196 {
3197 location_t loc = tree_nonartificial_location (exp);
3198
3199 if (tree_int_cst_equal (range[0], range[1]))
3200 warning_at (loc, opt,
3201 (tree_int_cst_equal (range[0], integer_one_node)
3202 ? G_("%K%qD reading %E byte from a region of size %E")
3203 : G_("%K%qD reading %E bytes from a region of size %E")),
3204 exp, get_callee_fndecl (exp), range[0], slen);
3205 else if (tree_int_cst_sign_bit (range[1]))
3206 {
3207 /* Avoid printing the upper bound if it's invalid. */
3208 warning_at (loc, opt,
3209 "%K%qD reading %E or more bytes from a region "
3210 "of size %E",
3211 exp, get_callee_fndecl (exp), range[0], slen);
3212 }
3213 else
3214 warning_at (loc, opt,
3215 "%K%qD reading between %E and %E bytes from a region "
3216 "of size %E",
3217 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3218 return false;
3219 }
3220
3221 return true;
3222 }
3223
3224 /* Helper to compute the size of the object referenced by the DEST
3225 expression which must of of pointer type, using Object Size type
3226 OSTYPE (only the least significant 2 bits are used). Return
3227 the size of the object if successful or NULL when the size cannot
3228 be determined. */
3229
3230 static inline tree
3231 compute_objsize (tree dest, int ostype)
3232 {
3233 unsigned HOST_WIDE_INT size;
3234 if (compute_builtin_object_size (dest, ostype & 3, &size))
3235 return build_int_cst (sizetype, size);
3236
3237 return NULL_TREE;
3238 }
3239
3240 /* Helper to determine and check the sizes of the source and the destination
3241 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3242 call expression, DEST is the destination argument, SRC is the source
3243 argument or null, and LEN is the number of bytes. Use Object Size type-0
3244 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3245 (no overflow or invalid sizes), false otherwise. */
3246
3247 static bool
3248 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3249 {
3250 if (!warn_stringop_overflow)
3251 return true;
3252
3253 /* For functions like memset and memcpy that operate on raw memory
3254 try to determine the size of the largest source and destination
3255 object using type-0 Object Size regardless of the object size
3256 type specified by the option. */
3257 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3258 tree dstsize = compute_objsize (dest, 0);
3259
3260 return check_sizes (OPT_Wstringop_overflow_, exp,
3261 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3262 }
3263
3264 /* Validate memchr arguments without performing any expansion.
3265 Return NULL_RTX. */
3266
3267 static rtx
3268 expand_builtin_memchr (tree exp, rtx)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273
3274 tree arg1 = CALL_EXPR_ARG (exp, 0);
3275 tree len = CALL_EXPR_ARG (exp, 2);
3276
3277 /* Diagnose calls where the specified length exceeds the size
3278 of the object. */
3279 if (warn_stringop_overflow)
3280 {
3281 tree size = compute_objsize (arg1, 0);
3282 check_sizes (OPT_Wstringop_overflow_,
3283 exp, len, /*maxlen=*/NULL_TREE,
3284 size, /*objsize=*/NULL_TREE);
3285 }
3286
3287 return NULL_RTX;
3288 }
3289
3290 /* Expand a call EXP to the memcpy builtin.
3291 Return NULL_RTX if we failed, the caller should emit a normal call,
3292 otherwise try to get the result in TARGET, if convenient (and in
3293 mode MODE if that's convenient). */
3294
3295 static rtx
3296 expand_builtin_memcpy (tree exp, rtx target)
3297 {
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3300 return NULL_RTX;
3301
3302 tree dest = CALL_EXPR_ARG (exp, 0);
3303 tree src = CALL_EXPR_ARG (exp, 1);
3304 tree len = CALL_EXPR_ARG (exp, 2);
3305
3306 check_memop_sizes (exp, dest, src, len);
3307
3308 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3309 /*endp=*/ 0);
3310 }
3311
3312 /* Check a call EXP to the memmove built-in for validity.
3313 Return NULL_RTX on both success and failure. */
3314
3315 static rtx
3316 expand_builtin_memmove (tree exp, rtx)
3317 {
3318 if (!validate_arglist (exp,
3319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 return NULL_RTX;
3321
3322 tree dest = CALL_EXPR_ARG (exp, 0);
3323 tree src = CALL_EXPR_ARG (exp, 1);
3324 tree len = CALL_EXPR_ARG (exp, 2);
3325
3326 check_memop_sizes (exp, dest, src, len);
3327
3328 return NULL_RTX;
3329 }
3330
3331 /* Expand an instrumented call EXP to the memcpy builtin.
3332 Return NULL_RTX if we failed, the caller should emit a normal call,
3333 otherwise try to get the result in TARGET, if convenient (and in
3334 mode MODE if that's convenient). */
3335
3336 static rtx
3337 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3338 {
3339 if (!validate_arglist (exp,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3342 INTEGER_TYPE, VOID_TYPE))
3343 return NULL_RTX;
3344 else
3345 {
3346 tree dest = CALL_EXPR_ARG (exp, 0);
3347 tree src = CALL_EXPR_ARG (exp, 2);
3348 tree len = CALL_EXPR_ARG (exp, 4);
3349 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3350 /*end_p=*/ 0);
3351
3352 /* Return src bounds with the result. */
3353 if (res)
3354 {
3355 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3356 expand_normal (CALL_EXPR_ARG (exp, 1)));
3357 res = chkp_join_splitted_slot (res, bnd);
3358 }
3359 return res;
3360 }
3361 }
3362
3363 /* Expand a call EXP to the mempcpy builtin.
3364 Return NULL_RTX if we failed; the caller should emit a normal call,
3365 otherwise try to get the result in TARGET, if convenient (and in
3366 mode MODE if that's convenient). If ENDP is 0 return the
3367 destination pointer, if ENDP is 1 return the end pointer ala
3368 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3369 stpcpy. */
3370
3371 static rtx
3372 expand_builtin_mempcpy (tree exp, rtx target)
3373 {
3374 if (!validate_arglist (exp,
3375 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3376 return NULL_RTX;
3377
3378 tree dest = CALL_EXPR_ARG (exp, 0);
3379 tree src = CALL_EXPR_ARG (exp, 1);
3380 tree len = CALL_EXPR_ARG (exp, 2);
3381
3382 /* Avoid expanding mempcpy into memcpy when the call is determined
3383 to overflow the buffer. This also prevents the same overflow
3384 from being diagnosed again when expanding memcpy. */
3385 if (!check_memop_sizes (exp, dest, src, len))
3386 return NULL_RTX;
3387
3388 return expand_builtin_mempcpy_args (dest, src, len,
3389 target, exp, /*endp=*/ 1);
3390 }
3391
3392 /* Expand an instrumented call EXP to the mempcpy builtin.
3393 Return NULL_RTX if we failed, the caller should emit a normal call,
3394 otherwise try to get the result in TARGET, if convenient (and in
3395 mode MODE if that's convenient). */
3396
3397 static rtx
3398 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3399 {
3400 if (!validate_arglist (exp,
3401 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3402 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3403 INTEGER_TYPE, VOID_TYPE))
3404 return NULL_RTX;
3405 else
3406 {
3407 tree dest = CALL_EXPR_ARG (exp, 0);
3408 tree src = CALL_EXPR_ARG (exp, 2);
3409 tree len = CALL_EXPR_ARG (exp, 4);
3410 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3411 exp, 1);
3412
3413 /* Return src bounds with the result. */
3414 if (res)
3415 {
3416 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3417 expand_normal (CALL_EXPR_ARG (exp, 1)));
3418 res = chkp_join_splitted_slot (res, bnd);
3419 }
3420 return res;
3421 }
3422 }
3423
3424 /* Helper function to do the actual work for expand of memory copy family
3425 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3426 of memory from SRC to DEST and assign to TARGET if convenient.
3427 If ENDP is 0 return the
3428 destination pointer, if ENDP is 1 return the end pointer ala
3429 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3430 stpcpy. */
3431
3432 static rtx
3433 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3434 rtx target, tree exp, int endp)
3435 {
3436 const char *src_str;
3437 unsigned int src_align = get_pointer_alignment (src);
3438 unsigned int dest_align = get_pointer_alignment (dest);
3439 rtx dest_mem, src_mem, dest_addr, len_rtx;
3440 HOST_WIDE_INT expected_size = -1;
3441 unsigned int expected_align = 0;
3442 unsigned HOST_WIDE_INT min_size;
3443 unsigned HOST_WIDE_INT max_size;
3444 unsigned HOST_WIDE_INT probable_max_size;
3445
3446 /* If DEST is not a pointer type, call the normal function. */
3447 if (dest_align == 0)
3448 return NULL_RTX;
3449
3450 /* If either SRC is not a pointer type, don't do this
3451 operation in-line. */
3452 if (src_align == 0)
3453 return NULL_RTX;
3454
3455 if (currently_expanding_gimple_stmt)
3456 stringop_block_profile (currently_expanding_gimple_stmt,
3457 &expected_align, &expected_size);
3458
3459 if (expected_align < dest_align)
3460 expected_align = dest_align;
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 len_rtx = expand_normal (len);
3464 determine_block_size (len, len_rtx, &min_size, &max_size,
3465 &probable_max_size);
3466 src_str = c_getstr (src);
3467
3468 /* If SRC is a string constant and block move would be done
3469 by pieces, we can avoid loading the string from memory
3470 and only stored the computed constants. */
3471 if (src_str
3472 && CONST_INT_P (len_rtx)
3473 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3474 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3475 CONST_CAST (char *, src_str),
3476 dest_align, false))
3477 {
3478 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3479 builtin_memcpy_read_str,
3480 CONST_CAST (char *, src_str),
3481 dest_align, false, endp);
3482 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3483 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3484 return dest_mem;
3485 }
3486
3487 src_mem = get_memory_rtx (src, len);
3488 set_mem_align (src_mem, src_align);
3489
3490 /* Copy word part most expediently. */
3491 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3492 CALL_EXPR_TAILCALL (exp)
3493 && (endp == 0 || target == const0_rtx)
3494 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3495 expected_align, expected_size,
3496 min_size, max_size, probable_max_size);
3497
3498 if (dest_addr == 0)
3499 {
3500 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3501 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3502 }
3503
3504 if (endp && target != const0_rtx)
3505 {
3506 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3507 /* stpcpy pointer to last byte. */
3508 if (endp == 2)
3509 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3510 }
3511
3512 return dest_addr;
3513 }
3514
3515 static rtx
3516 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3517 rtx target, tree orig_exp, int endp)
3518 {
3519 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3520 endp);
3521 }
3522
3523 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3524 we failed, the caller should emit a normal call, otherwise try to
3525 get the result in TARGET, if convenient. If ENDP is 0 return the
3526 destination pointer, if ENDP is 1 return the end pointer ala
3527 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3528 stpcpy. */
3529
3530 static rtx
3531 expand_movstr (tree dest, tree src, rtx target, int endp)
3532 {
3533 struct expand_operand ops[3];
3534 rtx dest_mem;
3535 rtx src_mem;
3536
3537 if (!targetm.have_movstr ())
3538 return NULL_RTX;
3539
3540 dest_mem = get_memory_rtx (dest, NULL);
3541 src_mem = get_memory_rtx (src, NULL);
3542 if (!endp)
3543 {
3544 target = force_reg (Pmode, XEXP (dest_mem, 0));
3545 dest_mem = replace_equiv_address (dest_mem, target);
3546 }
3547
3548 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3549 create_fixed_operand (&ops[1], dest_mem);
3550 create_fixed_operand (&ops[2], src_mem);
3551 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3552 return NULL_RTX;
3553
3554 if (endp && target != const0_rtx)
3555 {
3556 target = ops[0].value;
3557 /* movstr is supposed to set end to the address of the NUL
3558 terminator. If the caller requested a mempcpy-like return value,
3559 adjust it. */
3560 if (endp == 1)
3561 {
3562 rtx tem = plus_constant (GET_MODE (target),
3563 gen_lowpart (GET_MODE (target), target), 1);
3564 emit_move_insn (target, force_operand (tem, NULL_RTX));
3565 }
3566 }
3567 return target;
3568 }
3569
3570 /* Do some very basic size validation of a call to the strcpy builtin
3571 given by EXP. Return NULL_RTX to have the built-in expand to a call
3572 to the library function. */
3573
3574 static rtx
3575 expand_builtin_strcat (tree exp, rtx)
3576 {
3577 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3578 || !warn_stringop_overflow)
3579 return NULL_RTX;
3580
3581 tree dest = CALL_EXPR_ARG (exp, 0);
3582 tree src = CALL_EXPR_ARG (exp, 1);
3583
3584 /* There is no way here to determine the length of the string in
3585 the destination to which the SRC string is being appended so
3586 just diagnose cases when the souce string is longer than
3587 the destination object. */
3588
3589 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3590
3591 check_sizes (OPT_Wstringop_overflow_,
3592 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3593
3594 return NULL_RTX;
3595 }
3596
3597 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3598 NULL_RTX if we failed the caller should emit a normal call, otherwise
3599 try to get the result in TARGET, if convenient (and in mode MODE if that's
3600 convenient). */
3601
3602 static rtx
3603 expand_builtin_strcpy (tree exp, rtx target)
3604 {
3605 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3606 return NULL_RTX;
3607
3608 tree dest = CALL_EXPR_ARG (exp, 0);
3609 tree src = CALL_EXPR_ARG (exp, 1);
3610
3611 if (warn_stringop_overflow)
3612 {
3613 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3614 check_sizes (OPT_Wstringop_overflow_,
3615 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3616 }
3617
3618 return expand_builtin_strcpy_args (dest, src, target);
3619 }
3620
3621 /* Helper function to do the actual work for expand_builtin_strcpy. The
3622 arguments to the builtin_strcpy call DEST and SRC are broken out
3623 so that this can also be called without constructing an actual CALL_EXPR.
3624 The other arguments and return value are the same as for
3625 expand_builtin_strcpy. */
3626
3627 static rtx
3628 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3629 {
3630 return expand_movstr (dest, src, target, /*endp=*/0);
3631 }
3632
3633 /* Expand a call EXP to the stpcpy builtin.
3634 Return NULL_RTX if we failed the caller should emit a normal call,
3635 otherwise try to get the result in TARGET, if convenient (and in
3636 mode MODE if that's convenient). */
3637
3638 static rtx
3639 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3640 {
3641 tree dst, src;
3642 location_t loc = EXPR_LOCATION (exp);
3643
3644 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3645 return NULL_RTX;
3646
3647 dst = CALL_EXPR_ARG (exp, 0);
3648 src = CALL_EXPR_ARG (exp, 1);
3649
3650 if (warn_stringop_overflow)
3651 {
3652 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3653 check_sizes (OPT_Wstringop_overflow_,
3654 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3655 }
3656
3657 /* If return value is ignored, transform stpcpy into strcpy. */
3658 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3659 {
3660 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3661 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3662 return expand_expr (result, target, mode, EXPAND_NORMAL);
3663 }
3664 else
3665 {
3666 tree len, lenp1;
3667 rtx ret;
3668
3669 /* Ensure we get an actual string whose length can be evaluated at
3670 compile-time, not an expression containing a string. This is
3671 because the latter will potentially produce pessimized code
3672 when used to produce the return value. */
3673 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3674 return expand_movstr (dst, src, target, /*endp=*/2);
3675
3676 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3677 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3678 target, exp, /*endp=*/2);
3679
3680 if (ret)
3681 return ret;
3682
3683 if (TREE_CODE (len) == INTEGER_CST)
3684 {
3685 rtx len_rtx = expand_normal (len);
3686
3687 if (CONST_INT_P (len_rtx))
3688 {
3689 ret = expand_builtin_strcpy_args (dst, src, target);
3690
3691 if (ret)
3692 {
3693 if (! target)
3694 {
3695 if (mode != VOIDmode)
3696 target = gen_reg_rtx (mode);
3697 else
3698 target = gen_reg_rtx (GET_MODE (ret));
3699 }
3700 if (GET_MODE (target) != GET_MODE (ret))
3701 ret = gen_lowpart (GET_MODE (target), ret);
3702
3703 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3704 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3705 gcc_assert (ret);
3706
3707 return target;
3708 }
3709 }
3710 }
3711
3712 return expand_movstr (dst, src, target, /*endp=*/2);
3713 }
3714 }
3715
3716 /* Check a call EXP to the stpncpy built-in for validity.
3717 Return NULL_RTX on both success and failure. */
3718
3719 static rtx
3720 expand_builtin_stpncpy (tree exp, rtx)
3721 {
3722 if (!validate_arglist (exp,
3723 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3724 || !warn_stringop_overflow)
3725 return NULL_RTX;
3726
3727 /* The source and destination of the call. */
3728 tree dest = CALL_EXPR_ARG (exp, 0);
3729 tree src = CALL_EXPR_ARG (exp, 1);
3730
3731 /* The exact number of bytes to write (not the maximum). */
3732 tree len = CALL_EXPR_ARG (exp, 2);
3733
3734 /* The size of the destination object. */
3735 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3736
3737 check_sizes (OPT_Wstringop_overflow_,
3738 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3739
3740 return NULL_RTX;
3741 }
3742
3743 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3744 bytes from constant string DATA + OFFSET and return it as target
3745 constant. */
3746
3747 rtx
3748 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3749 machine_mode mode)
3750 {
3751 const char *str = (const char *) data;
3752
3753 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3754 return const0_rtx;
3755
3756 return c_readstr (str + offset, mode);
3757 }
3758
3759 /* Helper to check the sizes of sequences and the destination of calls
3760 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3761 success (no overflow or invalid sizes), false otherwise. */
3762
3763 static bool
3764 check_strncat_sizes (tree exp, tree objsize)
3765 {
3766 tree dest = CALL_EXPR_ARG (exp, 0);
3767 tree src = CALL_EXPR_ARG (exp, 1);
3768 tree maxlen = CALL_EXPR_ARG (exp, 2);
3769
3770 /* Try to determine the range of lengths that the source expression
3771 refers to. */
3772 tree lenrange[2];
3773 get_range_strlen (src, lenrange);
3774
3775 /* Try to verify that the destination is big enough for the shortest
3776 string. */
3777
3778 if (!objsize && warn_stringop_overflow)
3779 {
3780 /* If it hasn't been provided by __strncat_chk, try to determine
3781 the size of the destination object into which the source is
3782 being copied. */
3783 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3784 }
3785
3786 /* Add one for the terminating nul. */
3787 tree srclen = (lenrange[0]
3788 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3789 size_one_node)
3790 : NULL_TREE);
3791
3792 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3793 nul so the specified upper bound should never be equal to (or greater
3794 than) the size of the destination. */
3795 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3796 && tree_int_cst_equal (objsize, maxlen))
3797 {
3798 location_t loc = tree_nonartificial_location (exp);
3799 loc = expansion_point_location_if_in_system_header (loc);
3800
3801 warning_at (loc, OPT_Wstringop_overflow_,
3802 "%K%qD specified bound %E equals destination size",
3803 exp, get_callee_fndecl (exp), maxlen);
3804
3805 return false;
3806 }
3807
3808 if (!srclen
3809 || (maxlen && tree_fits_uhwi_p (maxlen)
3810 && tree_fits_uhwi_p (srclen)
3811 && tree_int_cst_lt (maxlen, srclen)))
3812 srclen = maxlen;
3813
3814 /* The number of bytes to write is LEN but check_sizes will also
3815 check SRCLEN if LEN's value isn't known. */
3816 return check_sizes (OPT_Wstringop_overflow_,
3817 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3818 }
3819
3820 /* Similar to expand_builtin_strcat, do some very basic size validation
3821 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3822 the built-in expand to a call to the library function. */
3823
3824 static rtx
3825 expand_builtin_strncat (tree exp, rtx)
3826 {
3827 if (!validate_arglist (exp,
3828 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3829 || !warn_stringop_overflow)
3830 return NULL_RTX;
3831
3832 tree dest = CALL_EXPR_ARG (exp, 0);
3833 tree src = CALL_EXPR_ARG (exp, 1);
3834 /* The upper bound on the number of bytes to write. */
3835 tree maxlen = CALL_EXPR_ARG (exp, 2);
3836 /* The length of the source sequence. */
3837 tree slen = c_strlen (src, 1);
3838
3839 /* Try to determine the range of lengths that the source expression
3840 refers to. */
3841 tree lenrange[2];
3842 if (slen)
3843 lenrange[0] = lenrange[1] = slen;
3844 else
3845 get_range_strlen (src, lenrange);
3846
3847 /* Try to verify that the destination is big enough for the shortest
3848 string. First try to determine the size of the destination object
3849 into which the source is being copied. */
3850 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3851
3852 /* Add one for the terminating nul. */
3853 tree srclen = (lenrange[0]
3854 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3855 size_one_node)
3856 : NULL_TREE);
3857
3858 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3859 nul so the specified upper bound should never be equal to (or greater
3860 than) the size of the destination. */
3861 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3862 && tree_int_cst_equal (destsize, maxlen))
3863 {
3864 location_t loc = tree_nonartificial_location (exp);
3865 loc = expansion_point_location_if_in_system_header (loc);
3866
3867 warning_at (loc, OPT_Wstringop_overflow_,
3868 "%K%qD specified bound %E equals destination size",
3869 exp, get_callee_fndecl (exp), maxlen);
3870
3871 return NULL_RTX;
3872 }
3873
3874 if (!srclen
3875 || (maxlen && tree_fits_uhwi_p (maxlen)
3876 && tree_fits_uhwi_p (srclen)
3877 && tree_int_cst_lt (maxlen, srclen)))
3878 srclen = maxlen;
3879
3880 /* The number of bytes to write is LEN but check_sizes will also
3881 check SRCLEN if LEN's value isn't known. */
3882 check_sizes (OPT_Wstringop_overflow_,
3883 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3884
3885 return NULL_RTX;
3886 }
3887
3888 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3889 NULL_RTX if we failed the caller should emit a normal call. */
3890
3891 static rtx
3892 expand_builtin_strncpy (tree exp, rtx target)
3893 {
3894 location_t loc = EXPR_LOCATION (exp);
3895
3896 if (validate_arglist (exp,
3897 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 {
3899 tree dest = CALL_EXPR_ARG (exp, 0);
3900 tree src = CALL_EXPR_ARG (exp, 1);
3901 /* The number of bytes to write (not the maximum). */
3902 tree len = CALL_EXPR_ARG (exp, 2);
3903 /* The length of the source sequence. */
3904 tree slen = c_strlen (src, 1);
3905
3906 if (warn_stringop_overflow)
3907 {
3908 tree destsize = compute_objsize (dest,
3909 warn_stringop_overflow - 1);
3910
3911 /* The number of bytes to write is LEN but check_sizes will also
3912 check SLEN if LEN's value isn't known. */
3913 check_sizes (OPT_Wstringop_overflow_,
3914 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3915 }
3916
3917 /* We must be passed a constant len and src parameter. */
3918 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3919 return NULL_RTX;
3920
3921 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3922
3923 /* We're required to pad with trailing zeros if the requested
3924 len is greater than strlen(s2)+1. In that case try to
3925 use store_by_pieces, if it fails, punt. */
3926 if (tree_int_cst_lt (slen, len))
3927 {
3928 unsigned int dest_align = get_pointer_alignment (dest);
3929 const char *p = c_getstr (src);
3930 rtx dest_mem;
3931
3932 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3933 || !can_store_by_pieces (tree_to_uhwi (len),
3934 builtin_strncpy_read_str,
3935 CONST_CAST (char *, p),
3936 dest_align, false))
3937 return NULL_RTX;
3938
3939 dest_mem = get_memory_rtx (dest, len);
3940 store_by_pieces (dest_mem, tree_to_uhwi (len),
3941 builtin_strncpy_read_str,
3942 CONST_CAST (char *, p), dest_align, false, 0);
3943 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3945 return dest_mem;
3946 }
3947 }
3948 return NULL_RTX;
3949 }
3950
3951 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3952 bytes from constant string DATA + OFFSET and return it as target
3953 constant. */
3954
3955 rtx
3956 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3957 machine_mode mode)
3958 {
3959 const char *c = (const char *) data;
3960 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3961
3962 memset (p, *c, GET_MODE_SIZE (mode));
3963
3964 return c_readstr (p, mode);
3965 }
3966
3967 /* Callback routine for store_by_pieces. Return the RTL of a register
3968 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3969 char value given in the RTL register data. For example, if mode is
3970 4 bytes wide, return the RTL for 0x01010101*data. */
3971
3972 static rtx
3973 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3974 machine_mode mode)
3975 {
3976 rtx target, coeff;
3977 size_t size;
3978 char *p;
3979
3980 size = GET_MODE_SIZE (mode);
3981 if (size == 1)
3982 return (rtx) data;
3983
3984 p = XALLOCAVEC (char, size);
3985 memset (p, 1, size);
3986 coeff = c_readstr (p, mode);
3987
3988 target = convert_to_mode (mode, (rtx) data, 1);
3989 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3990 return force_reg (mode, target);
3991 }
3992
3993 /* Expand expression EXP, which is a call to the memset builtin. Return
3994 NULL_RTX if we failed the caller should emit a normal call, otherwise
3995 try to get the result in TARGET, if convenient (and in mode MODE if that's
3996 convenient). */
3997
3998 static rtx
3999 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4000 {
4001 if (!validate_arglist (exp,
4002 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4003 return NULL_RTX;
4004
4005 tree dest = CALL_EXPR_ARG (exp, 0);
4006 tree val = CALL_EXPR_ARG (exp, 1);
4007 tree len = CALL_EXPR_ARG (exp, 2);
4008
4009 check_memop_sizes (exp, dest, NULL_TREE, len);
4010
4011 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4012 }
4013
4014 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4015 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4016 try to get the result in TARGET, if convenient (and in mode MODE if that's
4017 convenient). */
4018
4019 static rtx
4020 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4021 {
4022 if (!validate_arglist (exp,
4023 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4024 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4025 return NULL_RTX;
4026 else
4027 {
4028 tree dest = CALL_EXPR_ARG (exp, 0);
4029 tree val = CALL_EXPR_ARG (exp, 2);
4030 tree len = CALL_EXPR_ARG (exp, 3);
4031 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4032
4033 /* Return src bounds with the result. */
4034 if (res)
4035 {
4036 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4037 expand_normal (CALL_EXPR_ARG (exp, 1)));
4038 res = chkp_join_splitted_slot (res, bnd);
4039 }
4040 return res;
4041 }
4042 }
4043
4044 /* Helper function to do the actual work for expand_builtin_memset. The
4045 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4046 so that this can also be called without constructing an actual CALL_EXPR.
4047 The other arguments and return value are the same as for
4048 expand_builtin_memset. */
4049
4050 static rtx
4051 expand_builtin_memset_args (tree dest, tree val, tree len,
4052 rtx target, machine_mode mode, tree orig_exp)
4053 {
4054 tree fndecl, fn;
4055 enum built_in_function fcode;
4056 machine_mode val_mode;
4057 char c;
4058 unsigned int dest_align;
4059 rtx dest_mem, dest_addr, len_rtx;
4060 HOST_WIDE_INT expected_size = -1;
4061 unsigned int expected_align = 0;
4062 unsigned HOST_WIDE_INT min_size;
4063 unsigned HOST_WIDE_INT max_size;
4064 unsigned HOST_WIDE_INT probable_max_size;
4065
4066 dest_align = get_pointer_alignment (dest);
4067
4068 /* If DEST is not a pointer type, don't do this operation in-line. */
4069 if (dest_align == 0)
4070 return NULL_RTX;
4071
4072 if (currently_expanding_gimple_stmt)
4073 stringop_block_profile (currently_expanding_gimple_stmt,
4074 &expected_align, &expected_size);
4075
4076 if (expected_align < dest_align)
4077 expected_align = dest_align;
4078
4079 /* If the LEN parameter is zero, return DEST. */
4080 if (integer_zerop (len))
4081 {
4082 /* Evaluate and ignore VAL in case it has side-effects. */
4083 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4084 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4085 }
4086
4087 /* Stabilize the arguments in case we fail. */
4088 dest = builtin_save_expr (dest);
4089 val = builtin_save_expr (val);
4090 len = builtin_save_expr (len);
4091
4092 len_rtx = expand_normal (len);
4093 determine_block_size (len, len_rtx, &min_size, &max_size,
4094 &probable_max_size);
4095 dest_mem = get_memory_rtx (dest, len);
4096 val_mode = TYPE_MODE (unsigned_char_type_node);
4097
4098 if (TREE_CODE (val) != INTEGER_CST)
4099 {
4100 rtx val_rtx;
4101
4102 val_rtx = expand_normal (val);
4103 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4104
4105 /* Assume that we can memset by pieces if we can store
4106 * the coefficients by pieces (in the required modes).
4107 * We can't pass builtin_memset_gen_str as that emits RTL. */
4108 c = 1;
4109 if (tree_fits_uhwi_p (len)
4110 && can_store_by_pieces (tree_to_uhwi (len),
4111 builtin_memset_read_str, &c, dest_align,
4112 true))
4113 {
4114 val_rtx = force_reg (val_mode, val_rtx);
4115 store_by_pieces (dest_mem, tree_to_uhwi (len),
4116 builtin_memset_gen_str, val_rtx, dest_align,
4117 true, 0);
4118 }
4119 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4120 dest_align, expected_align,
4121 expected_size, min_size, max_size,
4122 probable_max_size))
4123 goto do_libcall;
4124
4125 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4126 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4127 return dest_mem;
4128 }
4129
4130 if (target_char_cast (val, &c))
4131 goto do_libcall;
4132
4133 if (c)
4134 {
4135 if (tree_fits_uhwi_p (len)
4136 && can_store_by_pieces (tree_to_uhwi (len),
4137 builtin_memset_read_str, &c, dest_align,
4138 true))
4139 store_by_pieces (dest_mem, tree_to_uhwi (len),
4140 builtin_memset_read_str, &c, dest_align, true, 0);
4141 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4142 gen_int_mode (c, val_mode),
4143 dest_align, expected_align,
4144 expected_size, min_size, max_size,
4145 probable_max_size))
4146 goto do_libcall;
4147
4148 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4149 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4150 return dest_mem;
4151 }
4152
4153 set_mem_align (dest_mem, dest_align);
4154 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4155 CALL_EXPR_TAILCALL (orig_exp)
4156 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4157 expected_align, expected_size,
4158 min_size, max_size,
4159 probable_max_size);
4160
4161 if (dest_addr == 0)
4162 {
4163 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4164 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4165 }
4166
4167 return dest_addr;
4168
4169 do_libcall:
4170 fndecl = get_callee_fndecl (orig_exp);
4171 fcode = DECL_FUNCTION_CODE (fndecl);
4172 if (fcode == BUILT_IN_MEMSET
4173 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4174 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4175 dest, val, len);
4176 else if (fcode == BUILT_IN_BZERO)
4177 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4178 dest, len);
4179 else
4180 gcc_unreachable ();
4181 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4182 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4183 return expand_call (fn, target, target == const0_rtx);
4184 }
4185
4186 /* Expand expression EXP, which is a call to the bzero builtin. Return
4187 NULL_RTX if we failed the caller should emit a normal call. */
4188
4189 static rtx
4190 expand_builtin_bzero (tree exp)
4191 {
4192 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4193 return NULL_RTX;
4194
4195 tree dest = CALL_EXPR_ARG (exp, 0);
4196 tree size = CALL_EXPR_ARG (exp, 1);
4197
4198 check_memop_sizes (exp, dest, NULL_TREE, size);
4199
4200 /* New argument list transforming bzero(ptr x, int y) to
4201 memset(ptr x, int 0, size_t y). This is done this way
4202 so that if it isn't expanded inline, we fallback to
4203 calling bzero instead of memset. */
4204
4205 location_t loc = EXPR_LOCATION (exp);
4206
4207 return expand_builtin_memset_args (dest, integer_zero_node,
4208 fold_convert_loc (loc,
4209 size_type_node, size),
4210 const0_rtx, VOIDmode, exp);
4211 }
4212
4213 /* Try to expand cmpstr operation ICODE with the given operands.
4214 Return the result rtx on success, otherwise return null. */
4215
4216 static rtx
4217 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4218 HOST_WIDE_INT align)
4219 {
4220 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4221
4222 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4223 target = NULL_RTX;
4224
4225 struct expand_operand ops[4];
4226 create_output_operand (&ops[0], target, insn_mode);
4227 create_fixed_operand (&ops[1], arg1_rtx);
4228 create_fixed_operand (&ops[2], arg2_rtx);
4229 create_integer_operand (&ops[3], align);
4230 if (maybe_expand_insn (icode, 4, ops))
4231 return ops[0].value;
4232 return NULL_RTX;
4233 }
4234
4235 /* Expand expression EXP, which is a call to the memcmp built-in function.
4236 Return NULL_RTX if we failed and the caller should emit a normal call,
4237 otherwise try to get the result in TARGET, if convenient.
4238 RESULT_EQ is true if we can relax the returned value to be either zero
4239 or nonzero, without caring about the sign. */
4240
4241 static rtx
4242 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4243 {
4244 if (!validate_arglist (exp,
4245 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4246 return NULL_RTX;
4247
4248 tree arg1 = CALL_EXPR_ARG (exp, 0);
4249 tree arg2 = CALL_EXPR_ARG (exp, 1);
4250 tree len = CALL_EXPR_ARG (exp, 2);
4251
4252 /* Diagnose calls where the specified length exceeds the size of either
4253 object. */
4254 if (warn_stringop_overflow)
4255 {
4256 tree size = compute_objsize (arg1, 0);
4257 if (check_sizes (OPT_Wstringop_overflow_,
4258 exp, len, /*maxlen=*/NULL_TREE,
4259 size, /*objsize=*/NULL_TREE))
4260 {
4261 size = compute_objsize (arg2, 0);
4262 check_sizes (OPT_Wstringop_overflow_,
4263 exp, len, /*maxlen=*/NULL_TREE,
4264 size, /*objsize=*/NULL_TREE);
4265 }
4266 }
4267
4268 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4269 location_t loc = EXPR_LOCATION (exp);
4270
4271 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4272 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4273
4274 /* If we don't have POINTER_TYPE, call the function. */
4275 if (arg1_align == 0 || arg2_align == 0)
4276 return NULL_RTX;
4277
4278 rtx arg1_rtx = get_memory_rtx (arg1, len);
4279 rtx arg2_rtx = get_memory_rtx (arg2, len);
4280 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4281
4282 /* Set MEM_SIZE as appropriate. */
4283 if (CONST_INT_P (len_rtx))
4284 {
4285 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4286 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4287 }
4288
4289 by_pieces_constfn constfn = NULL;
4290
4291 const char *src_str = c_getstr (arg2);
4292 if (result_eq && src_str == NULL)
4293 {
4294 src_str = c_getstr (arg1);
4295 if (src_str != NULL)
4296 std::swap (arg1_rtx, arg2_rtx);
4297 }
4298
4299 /* If SRC is a string constant and block move would be done
4300 by pieces, we can avoid loading the string from memory
4301 and only stored the computed constants. */
4302 if (src_str
4303 && CONST_INT_P (len_rtx)
4304 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4305 constfn = builtin_memcpy_read_str;
4306
4307 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4308 TREE_TYPE (len), target,
4309 result_eq, constfn,
4310 CONST_CAST (char *, src_str));
4311
4312 if (result)
4313 {
4314 /* Return the value in the proper mode for this function. */
4315 if (GET_MODE (result) == mode)
4316 return result;
4317
4318 if (target != 0)
4319 {
4320 convert_move (target, result, 0);
4321 return target;
4322 }
4323
4324 return convert_to_mode (mode, result, 0);
4325 }
4326
4327 return NULL_RTX;
4328 }
4329
4330 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4331 if we failed the caller should emit a normal call, otherwise try to get
4332 the result in TARGET, if convenient. */
4333
4334 static rtx
4335 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4336 {
4337 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4338 return NULL_RTX;
4339
4340 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4341 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4342 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4343 {
4344 rtx arg1_rtx, arg2_rtx;
4345 tree fndecl, fn;
4346 tree arg1 = CALL_EXPR_ARG (exp, 0);
4347 tree arg2 = CALL_EXPR_ARG (exp, 1);
4348 rtx result = NULL_RTX;
4349
4350 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4351 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4352
4353 /* If we don't have POINTER_TYPE, call the function. */
4354 if (arg1_align == 0 || arg2_align == 0)
4355 return NULL_RTX;
4356
4357 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4358 arg1 = builtin_save_expr (arg1);
4359 arg2 = builtin_save_expr (arg2);
4360
4361 arg1_rtx = get_memory_rtx (arg1, NULL);
4362 arg2_rtx = get_memory_rtx (arg2, NULL);
4363
4364 /* Try to call cmpstrsi. */
4365 if (cmpstr_icode != CODE_FOR_nothing)
4366 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4367 MIN (arg1_align, arg2_align));
4368
4369 /* Try to determine at least one length and call cmpstrnsi. */
4370 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4371 {
4372 tree len;
4373 rtx arg3_rtx;
4374
4375 tree len1 = c_strlen (arg1, 1);
4376 tree len2 = c_strlen (arg2, 1);
4377
4378 if (len1)
4379 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4380 if (len2)
4381 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4382
4383 /* If we don't have a constant length for the first, use the length
4384 of the second, if we know it. We don't require a constant for
4385 this case; some cost analysis could be done if both are available
4386 but neither is constant. For now, assume they're equally cheap,
4387 unless one has side effects. If both strings have constant lengths,
4388 use the smaller. */
4389
4390 if (!len1)
4391 len = len2;
4392 else if (!len2)
4393 len = len1;
4394 else if (TREE_SIDE_EFFECTS (len1))
4395 len = len2;
4396 else if (TREE_SIDE_EFFECTS (len2))
4397 len = len1;
4398 else if (TREE_CODE (len1) != INTEGER_CST)
4399 len = len2;
4400 else if (TREE_CODE (len2) != INTEGER_CST)
4401 len = len1;
4402 else if (tree_int_cst_lt (len1, len2))
4403 len = len1;
4404 else
4405 len = len2;
4406
4407 /* If both arguments have side effects, we cannot optimize. */
4408 if (len && !TREE_SIDE_EFFECTS (len))
4409 {
4410 arg3_rtx = expand_normal (len);
4411 result = expand_cmpstrn_or_cmpmem
4412 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4413 arg3_rtx, MIN (arg1_align, arg2_align));
4414 }
4415 }
4416
4417 if (result)
4418 {
4419 /* Return the value in the proper mode for this function. */
4420 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4421 if (GET_MODE (result) == mode)
4422 return result;
4423 if (target == 0)
4424 return convert_to_mode (mode, result, 0);
4425 convert_move (target, result, 0);
4426 return target;
4427 }
4428
4429 /* Expand the library call ourselves using a stabilized argument
4430 list to avoid re-evaluating the function's arguments twice. */
4431 fndecl = get_callee_fndecl (exp);
4432 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4433 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4434 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4435 return expand_call (fn, target, target == const0_rtx);
4436 }
4437 return NULL_RTX;
4438 }
4439
4440 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4441 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4442 the result in TARGET, if convenient. */
4443
4444 static rtx
4445 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4446 ATTRIBUTE_UNUSED machine_mode mode)
4447 {
4448 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4449
4450 if (!validate_arglist (exp,
4451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4452 return NULL_RTX;
4453
4454 /* If c_strlen can determine an expression for one of the string
4455 lengths, and it doesn't have side effects, then emit cmpstrnsi
4456 using length MIN(strlen(string)+1, arg3). */
4457 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4458 if (cmpstrn_icode != CODE_FOR_nothing)
4459 {
4460 tree len, len1, len2, len3;
4461 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4462 rtx result;
4463 tree fndecl, fn;
4464 tree arg1 = CALL_EXPR_ARG (exp, 0);
4465 tree arg2 = CALL_EXPR_ARG (exp, 1);
4466 tree arg3 = CALL_EXPR_ARG (exp, 2);
4467
4468 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4469 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4470
4471 len1 = c_strlen (arg1, 1);
4472 len2 = c_strlen (arg2, 1);
4473
4474 if (len1)
4475 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4476 if (len2)
4477 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4478
4479 len3 = fold_convert_loc (loc, sizetype, arg3);
4480
4481 /* If we don't have a constant length for the first, use the length
4482 of the second, if we know it. If neither string is constant length,
4483 use the given length argument. We don't require a constant for
4484 this case; some cost analysis could be done if both are available
4485 but neither is constant. For now, assume they're equally cheap,
4486 unless one has side effects. If both strings have constant lengths,
4487 use the smaller. */
4488
4489 if (!len1 && !len2)
4490 len = len3;
4491 else if (!len1)
4492 len = len2;
4493 else if (!len2)
4494 len = len1;
4495 else if (TREE_SIDE_EFFECTS (len1))
4496 len = len2;
4497 else if (TREE_SIDE_EFFECTS (len2))
4498 len = len1;
4499 else if (TREE_CODE (len1) != INTEGER_CST)
4500 len = len2;
4501 else if (TREE_CODE (len2) != INTEGER_CST)
4502 len = len1;
4503 else if (tree_int_cst_lt (len1, len2))
4504 len = len1;
4505 else
4506 len = len2;
4507
4508 /* If we are not using the given length, we must incorporate it here.
4509 The actual new length parameter will be MIN(len,arg3) in this case. */
4510 if (len != len3)
4511 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4512 arg1_rtx = get_memory_rtx (arg1, len);
4513 arg2_rtx = get_memory_rtx (arg2, len);
4514 arg3_rtx = expand_normal (len);
4515 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4516 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4517 MIN (arg1_align, arg2_align));
4518 if (result)
4519 {
4520 /* Return the value in the proper mode for this function. */
4521 mode = TYPE_MODE (TREE_TYPE (exp));
4522 if (GET_MODE (result) == mode)
4523 return result;
4524 if (target == 0)
4525 return convert_to_mode (mode, result, 0);
4526 convert_move (target, result, 0);
4527 return target;
4528 }
4529
4530 /* Expand the library call ourselves using a stabilized argument
4531 list to avoid re-evaluating the function's arguments twice. */
4532 fndecl = get_callee_fndecl (exp);
4533 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4534 arg1, arg2, len);
4535 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4536 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4537 return expand_call (fn, target, target == const0_rtx);
4538 }
4539 return NULL_RTX;
4540 }
4541
4542 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4543 if that's convenient. */
4544
4545 rtx
4546 expand_builtin_saveregs (void)
4547 {
4548 rtx val;
4549 rtx_insn *seq;
4550
4551 /* Don't do __builtin_saveregs more than once in a function.
4552 Save the result of the first call and reuse it. */
4553 if (saveregs_value != 0)
4554 return saveregs_value;
4555
4556 /* When this function is called, it means that registers must be
4557 saved on entry to this function. So we migrate the call to the
4558 first insn of this function. */
4559
4560 start_sequence ();
4561
4562 /* Do whatever the machine needs done in this case. */
4563 val = targetm.calls.expand_builtin_saveregs ();
4564
4565 seq = get_insns ();
4566 end_sequence ();
4567
4568 saveregs_value = val;
4569
4570 /* Put the insns after the NOTE that starts the function. If this
4571 is inside a start_sequence, make the outer-level insn chain current, so
4572 the code is placed at the start of the function. */
4573 push_topmost_sequence ();
4574 emit_insn_after (seq, entry_of_function ());
4575 pop_topmost_sequence ();
4576
4577 return val;
4578 }
4579
4580 /* Expand a call to __builtin_next_arg. */
4581
4582 static rtx
4583 expand_builtin_next_arg (void)
4584 {
4585 /* Checking arguments is already done in fold_builtin_next_arg
4586 that must be called before this function. */
4587 return expand_binop (ptr_mode, add_optab,
4588 crtl->args.internal_arg_pointer,
4589 crtl->args.arg_offset_rtx,
4590 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4591 }
4592
4593 /* Make it easier for the backends by protecting the valist argument
4594 from multiple evaluations. */
4595
4596 static tree
4597 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4598 {
4599 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4600
4601 /* The current way of determining the type of valist is completely
4602 bogus. We should have the information on the va builtin instead. */
4603 if (!vatype)
4604 vatype = targetm.fn_abi_va_list (cfun->decl);
4605
4606 if (TREE_CODE (vatype) == ARRAY_TYPE)
4607 {
4608 if (TREE_SIDE_EFFECTS (valist))
4609 valist = save_expr (valist);
4610
4611 /* For this case, the backends will be expecting a pointer to
4612 vatype, but it's possible we've actually been given an array
4613 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4614 So fix it. */
4615 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4616 {
4617 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4618 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4619 }
4620 }
4621 else
4622 {
4623 tree pt = build_pointer_type (vatype);
4624
4625 if (! needs_lvalue)
4626 {
4627 if (! TREE_SIDE_EFFECTS (valist))
4628 return valist;
4629
4630 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4631 TREE_SIDE_EFFECTS (valist) = 1;
4632 }
4633
4634 if (TREE_SIDE_EFFECTS (valist))
4635 valist = save_expr (valist);
4636 valist = fold_build2_loc (loc, MEM_REF,
4637 vatype, valist, build_int_cst (pt, 0));
4638 }
4639
4640 return valist;
4641 }
4642
4643 /* The "standard" definition of va_list is void*. */
4644
4645 tree
4646 std_build_builtin_va_list (void)
4647 {
4648 return ptr_type_node;
4649 }
4650
4651 /* The "standard" abi va_list is va_list_type_node. */
4652
4653 tree
4654 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4655 {
4656 return va_list_type_node;
4657 }
4658
4659 /* The "standard" type of va_list is va_list_type_node. */
4660
4661 tree
4662 std_canonical_va_list_type (tree type)
4663 {
4664 tree wtype, htype;
4665
4666 wtype = va_list_type_node;
4667 htype = type;
4668
4669 if (TREE_CODE (wtype) == ARRAY_TYPE)
4670 {
4671 /* If va_list is an array type, the argument may have decayed
4672 to a pointer type, e.g. by being passed to another function.
4673 In that case, unwrap both types so that we can compare the
4674 underlying records. */
4675 if (TREE_CODE (htype) == ARRAY_TYPE
4676 || POINTER_TYPE_P (htype))
4677 {
4678 wtype = TREE_TYPE (wtype);
4679 htype = TREE_TYPE (htype);
4680 }
4681 }
4682 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4683 return va_list_type_node;
4684
4685 return NULL_TREE;
4686 }
4687
4688 /* The "standard" implementation of va_start: just assign `nextarg' to
4689 the variable. */
4690
4691 void
4692 std_expand_builtin_va_start (tree valist, rtx nextarg)
4693 {
4694 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4695 convert_move (va_r, nextarg, 0);
4696
4697 /* We do not have any valid bounds for the pointer, so
4698 just store zero bounds for it. */
4699 if (chkp_function_instrumented_p (current_function_decl))
4700 chkp_expand_bounds_reset_for_mem (valist,
4701 make_tree (TREE_TYPE (valist),
4702 nextarg));
4703 }
4704
4705 /* Expand EXP, a call to __builtin_va_start. */
4706
4707 static rtx
4708 expand_builtin_va_start (tree exp)
4709 {
4710 rtx nextarg;
4711 tree valist;
4712 location_t loc = EXPR_LOCATION (exp);
4713
4714 if (call_expr_nargs (exp) < 2)
4715 {
4716 error_at (loc, "too few arguments to function %<va_start%>");
4717 return const0_rtx;
4718 }
4719
4720 if (fold_builtin_next_arg (exp, true))
4721 return const0_rtx;
4722
4723 nextarg = expand_builtin_next_arg ();
4724 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4725
4726 if (targetm.expand_builtin_va_start)
4727 targetm.expand_builtin_va_start (valist, nextarg);
4728 else
4729 std_expand_builtin_va_start (valist, nextarg);
4730
4731 return const0_rtx;
4732 }
4733
4734 /* Expand EXP, a call to __builtin_va_end. */
4735
4736 static rtx
4737 expand_builtin_va_end (tree exp)
4738 {
4739 tree valist = CALL_EXPR_ARG (exp, 0);
4740
4741 /* Evaluate for side effects, if needed. I hate macros that don't
4742 do that. */
4743 if (TREE_SIDE_EFFECTS (valist))
4744 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4745
4746 return const0_rtx;
4747 }
4748
4749 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4750 builtin rather than just as an assignment in stdarg.h because of the
4751 nastiness of array-type va_list types. */
4752
4753 static rtx
4754 expand_builtin_va_copy (tree exp)
4755 {
4756 tree dst, src, t;
4757 location_t loc = EXPR_LOCATION (exp);
4758
4759 dst = CALL_EXPR_ARG (exp, 0);
4760 src = CALL_EXPR_ARG (exp, 1);
4761
4762 dst = stabilize_va_list_loc (loc, dst, 1);
4763 src = stabilize_va_list_loc (loc, src, 0);
4764
4765 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4766
4767 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4768 {
4769 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4770 TREE_SIDE_EFFECTS (t) = 1;
4771 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4772 }
4773 else
4774 {
4775 rtx dstb, srcb, size;
4776
4777 /* Evaluate to pointers. */
4778 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4779 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4780 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4781 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4782
4783 dstb = convert_memory_address (Pmode, dstb);
4784 srcb = convert_memory_address (Pmode, srcb);
4785
4786 /* "Dereference" to BLKmode memories. */
4787 dstb = gen_rtx_MEM (BLKmode, dstb);
4788 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4789 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4790 srcb = gen_rtx_MEM (BLKmode, srcb);
4791 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4792 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4793
4794 /* Copy. */
4795 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4796 }
4797
4798 return const0_rtx;
4799 }
4800
4801 /* Expand a call to one of the builtin functions __builtin_frame_address or
4802 __builtin_return_address. */
4803
4804 static rtx
4805 expand_builtin_frame_address (tree fndecl, tree exp)
4806 {
4807 /* The argument must be a nonnegative integer constant.
4808 It counts the number of frames to scan up the stack.
4809 The value is either the frame pointer value or the return
4810 address saved in that frame. */
4811 if (call_expr_nargs (exp) == 0)
4812 /* Warning about missing arg was already issued. */
4813 return const0_rtx;
4814 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4815 {
4816 error ("invalid argument to %qD", fndecl);
4817 return const0_rtx;
4818 }
4819 else
4820 {
4821 /* Number of frames to scan up the stack. */
4822 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4823
4824 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4825
4826 /* Some ports cannot access arbitrary stack frames. */
4827 if (tem == NULL)
4828 {
4829 warning (0, "unsupported argument to %qD", fndecl);
4830 return const0_rtx;
4831 }
4832
4833 if (count)
4834 {
4835 /* Warn since no effort is made to ensure that any frame
4836 beyond the current one exists or can be safely reached. */
4837 warning (OPT_Wframe_address, "calling %qD with "
4838 "a nonzero argument is unsafe", fndecl);
4839 }
4840
4841 /* For __builtin_frame_address, return what we've got. */
4842 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4843 return tem;
4844
4845 if (!REG_P (tem)
4846 && ! CONSTANT_P (tem))
4847 tem = copy_addr_to_reg (tem);
4848 return tem;
4849 }
4850 }
4851
4852 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4853 failed and the caller should emit a normal call. */
4854
4855 static rtx
4856 expand_builtin_alloca (tree exp)
4857 {
4858 rtx op0;
4859 rtx result;
4860 unsigned int align;
4861 tree fndecl = get_callee_fndecl (exp);
4862 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4863 == BUILT_IN_ALLOCA_WITH_ALIGN);
4864 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4865 bool valid_arglist
4866 = (alloca_with_align
4867 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4868 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4869
4870 if (!valid_arglist)
4871 return NULL_RTX;
4872
4873 if ((alloca_with_align && !warn_vla_limit)
4874 || (!alloca_with_align && !warn_alloca_limit))
4875 {
4876 /* -Walloca-larger-than and -Wvla-larger-than settings override
4877 the more general -Walloc-size-larger-than so unless either of
4878 the former options is specified check the alloca arguments for
4879 overflow. */
4880 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4881 int idx[] = { 0, -1 };
4882 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4883 }
4884
4885 /* Compute the argument. */
4886 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4887
4888 /* Compute the alignment. */
4889 align = (alloca_with_align
4890 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4891 : BIGGEST_ALIGNMENT);
4892
4893 /* Allocate the desired space. If the allocation stems from the declaration
4894 of a variable-sized object, it cannot accumulate. */
4895 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4896 result = convert_memory_address (ptr_mode, result);
4897
4898 return result;
4899 }
4900
4901 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4902 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4903 dummy value into second parameter relying on this function to perform the
4904 change. See motivation for this in comment to handle_builtin_stack_restore
4905 function. */
4906
4907 static rtx
4908 expand_asan_emit_allocas_unpoison (tree exp)
4909 {
4910 tree arg0 = CALL_EXPR_ARG (exp, 0);
4911 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4912 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
4913 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4914 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
4915 ptr_mode, bot, ptr_mode);
4916 return ret;
4917 }
4918
4919 /* Expand a call to bswap builtin in EXP.
4920 Return NULL_RTX if a normal call should be emitted rather than expanding the
4921 function in-line. If convenient, the result should be placed in TARGET.
4922 SUBTARGET may be used as the target for computing one of EXP's operands. */
4923
4924 static rtx
4925 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4926 rtx subtarget)
4927 {
4928 tree arg;
4929 rtx op0;
4930
4931 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4932 return NULL_RTX;
4933
4934 arg = CALL_EXPR_ARG (exp, 0);
4935 op0 = expand_expr (arg,
4936 subtarget && GET_MODE (subtarget) == target_mode
4937 ? subtarget : NULL_RTX,
4938 target_mode, EXPAND_NORMAL);
4939 if (GET_MODE (op0) != target_mode)
4940 op0 = convert_to_mode (target_mode, op0, 1);
4941
4942 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4943
4944 gcc_assert (target);
4945
4946 return convert_to_mode (target_mode, target, 1);
4947 }
4948
4949 /* Expand a call to a unary builtin in EXP.
4950 Return NULL_RTX if a normal call should be emitted rather than expanding the
4951 function in-line. If convenient, the result should be placed in TARGET.
4952 SUBTARGET may be used as the target for computing one of EXP's operands. */
4953
4954 static rtx
4955 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4956 rtx subtarget, optab op_optab)
4957 {
4958 rtx op0;
4959
4960 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4961 return NULL_RTX;
4962
4963 /* Compute the argument. */
4964 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4965 (subtarget
4966 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4967 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4968 VOIDmode, EXPAND_NORMAL);
4969 /* Compute op, into TARGET if possible.
4970 Set TARGET to wherever the result comes back. */
4971 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4972 op_optab, op0, target, op_optab != clrsb_optab);
4973 gcc_assert (target);
4974
4975 return convert_to_mode (target_mode, target, 0);
4976 }
4977
4978 /* Expand a call to __builtin_expect. We just return our argument
4979 as the builtin_expect semantic should've been already executed by
4980 tree branch prediction pass. */
4981
4982 static rtx
4983 expand_builtin_expect (tree exp, rtx target)
4984 {
4985 tree arg;
4986
4987 if (call_expr_nargs (exp) < 2)
4988 return const0_rtx;
4989 arg = CALL_EXPR_ARG (exp, 0);
4990
4991 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4992 /* When guessing was done, the hints should be already stripped away. */
4993 gcc_assert (!flag_guess_branch_prob
4994 || optimize == 0 || seen_error ());
4995 return target;
4996 }
4997
4998 /* Expand a call to __builtin_assume_aligned. We just return our first
4999 argument as the builtin_assume_aligned semantic should've been already
5000 executed by CCP. */
5001
5002 static rtx
5003 expand_builtin_assume_aligned (tree exp, rtx target)
5004 {
5005 if (call_expr_nargs (exp) < 2)
5006 return const0_rtx;
5007 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5008 EXPAND_NORMAL);
5009 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5010 && (call_expr_nargs (exp) < 3
5011 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5012 return target;
5013 }
5014
5015 void
5016 expand_builtin_trap (void)
5017 {
5018 if (targetm.have_trap ())
5019 {
5020 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5021 /* For trap insns when not accumulating outgoing args force
5022 REG_ARGS_SIZE note to prevent crossjumping of calls with
5023 different args sizes. */
5024 if (!ACCUMULATE_OUTGOING_ARGS)
5025 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5026 }
5027 else
5028 {
5029 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5030 tree call_expr = build_call_expr (fn, 0);
5031 expand_call (call_expr, NULL_RTX, false);
5032 }
5033
5034 emit_barrier ();
5035 }
5036
5037 /* Expand a call to __builtin_unreachable. We do nothing except emit
5038 a barrier saying that control flow will not pass here.
5039
5040 It is the responsibility of the program being compiled to ensure
5041 that control flow does never reach __builtin_unreachable. */
5042 static void
5043 expand_builtin_unreachable (void)
5044 {
5045 emit_barrier ();
5046 }
5047
5048 /* Expand EXP, a call to fabs, fabsf or fabsl.
5049 Return NULL_RTX if a normal call should be emitted rather than expanding
5050 the function inline. If convenient, the result should be placed
5051 in TARGET. SUBTARGET may be used as the target for computing
5052 the operand. */
5053
5054 static rtx
5055 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5056 {
5057 machine_mode mode;
5058 tree arg;
5059 rtx op0;
5060
5061 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5062 return NULL_RTX;
5063
5064 arg = CALL_EXPR_ARG (exp, 0);
5065 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5066 mode = TYPE_MODE (TREE_TYPE (arg));
5067 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5068 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5069 }
5070
5071 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5072 Return NULL is a normal call should be emitted rather than expanding the
5073 function inline. If convenient, the result should be placed in TARGET.
5074 SUBTARGET may be used as the target for computing the operand. */
5075
5076 static rtx
5077 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5078 {
5079 rtx op0, op1;
5080 tree arg;
5081
5082 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5083 return NULL_RTX;
5084
5085 arg = CALL_EXPR_ARG (exp, 0);
5086 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5087
5088 arg = CALL_EXPR_ARG (exp, 1);
5089 op1 = expand_normal (arg);
5090
5091 return expand_copysign (op0, op1, target);
5092 }
5093
5094 /* Expand a call to __builtin___clear_cache. */
5095
5096 static rtx
5097 expand_builtin___clear_cache (tree exp)
5098 {
5099 if (!targetm.code_for_clear_cache)
5100 {
5101 #ifdef CLEAR_INSN_CACHE
5102 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5103 does something. Just do the default expansion to a call to
5104 __clear_cache(). */
5105 return NULL_RTX;
5106 #else
5107 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5108 does nothing. There is no need to call it. Do nothing. */
5109 return const0_rtx;
5110 #endif /* CLEAR_INSN_CACHE */
5111 }
5112
5113 /* We have a "clear_cache" insn, and it will handle everything. */
5114 tree begin, end;
5115 rtx begin_rtx, end_rtx;
5116
5117 /* We must not expand to a library call. If we did, any
5118 fallback library function in libgcc that might contain a call to
5119 __builtin___clear_cache() would recurse infinitely. */
5120 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5121 {
5122 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5123 return const0_rtx;
5124 }
5125
5126 if (targetm.have_clear_cache ())
5127 {
5128 struct expand_operand ops[2];
5129
5130 begin = CALL_EXPR_ARG (exp, 0);
5131 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5132
5133 end = CALL_EXPR_ARG (exp, 1);
5134 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5135
5136 create_address_operand (&ops[0], begin_rtx);
5137 create_address_operand (&ops[1], end_rtx);
5138 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5139 return const0_rtx;
5140 }
5141 return const0_rtx;
5142 }
5143
5144 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5145
5146 static rtx
5147 round_trampoline_addr (rtx tramp)
5148 {
5149 rtx temp, addend, mask;
5150
5151 /* If we don't need too much alignment, we'll have been guaranteed
5152 proper alignment by get_trampoline_type. */
5153 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5154 return tramp;
5155
5156 /* Round address up to desired boundary. */
5157 temp = gen_reg_rtx (Pmode);
5158 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5159 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5160
5161 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5162 temp, 0, OPTAB_LIB_WIDEN);
5163 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5164 temp, 0, OPTAB_LIB_WIDEN);
5165
5166 return tramp;
5167 }
5168
5169 static rtx
5170 expand_builtin_init_trampoline (tree exp, bool onstack)
5171 {
5172 tree t_tramp, t_func, t_chain;
5173 rtx m_tramp, r_tramp, r_chain, tmp;
5174
5175 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5176 POINTER_TYPE, VOID_TYPE))
5177 return NULL_RTX;
5178
5179 t_tramp = CALL_EXPR_ARG (exp, 0);
5180 t_func = CALL_EXPR_ARG (exp, 1);
5181 t_chain = CALL_EXPR_ARG (exp, 2);
5182
5183 r_tramp = expand_normal (t_tramp);
5184 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5185 MEM_NOTRAP_P (m_tramp) = 1;
5186
5187 /* If ONSTACK, the TRAMP argument should be the address of a field
5188 within the local function's FRAME decl. Either way, let's see if
5189 we can fill in the MEM_ATTRs for this memory. */
5190 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5191 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5192
5193 /* Creator of a heap trampoline is responsible for making sure the
5194 address is aligned to at least STACK_BOUNDARY. Normally malloc
5195 will ensure this anyhow. */
5196 tmp = round_trampoline_addr (r_tramp);
5197 if (tmp != r_tramp)
5198 {
5199 m_tramp = change_address (m_tramp, BLKmode, tmp);
5200 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5201 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5202 }
5203
5204 /* The FUNC argument should be the address of the nested function.
5205 Extract the actual function decl to pass to the hook. */
5206 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5207 t_func = TREE_OPERAND (t_func, 0);
5208 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5209
5210 r_chain = expand_normal (t_chain);
5211
5212 /* Generate insns to initialize the trampoline. */
5213 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5214
5215 if (onstack)
5216 {
5217 trampolines_created = 1;
5218
5219 if (targetm.calls.custom_function_descriptors != 0)
5220 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5221 "trampoline generated for nested function %qD", t_func);
5222 }
5223
5224 return const0_rtx;
5225 }
5226
5227 static rtx
5228 expand_builtin_adjust_trampoline (tree exp)
5229 {
5230 rtx tramp;
5231
5232 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5233 return NULL_RTX;
5234
5235 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5236 tramp = round_trampoline_addr (tramp);
5237 if (targetm.calls.trampoline_adjust_address)
5238 tramp = targetm.calls.trampoline_adjust_address (tramp);
5239
5240 return tramp;
5241 }
5242
5243 /* Expand a call to the builtin descriptor initialization routine.
5244 A descriptor is made up of a couple of pointers to the static
5245 chain and the code entry in this order. */
5246
5247 static rtx
5248 expand_builtin_init_descriptor (tree exp)
5249 {
5250 tree t_descr, t_func, t_chain;
5251 rtx m_descr, r_descr, r_func, r_chain;
5252
5253 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5254 VOID_TYPE))
5255 return NULL_RTX;
5256
5257 t_descr = CALL_EXPR_ARG (exp, 0);
5258 t_func = CALL_EXPR_ARG (exp, 1);
5259 t_chain = CALL_EXPR_ARG (exp, 2);
5260
5261 r_descr = expand_normal (t_descr);
5262 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5263 MEM_NOTRAP_P (m_descr) = 1;
5264
5265 r_func = expand_normal (t_func);
5266 r_chain = expand_normal (t_chain);
5267
5268 /* Generate insns to initialize the descriptor. */
5269 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5270 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5271 POINTER_SIZE / BITS_PER_UNIT), r_func);
5272
5273 return const0_rtx;
5274 }
5275
5276 /* Expand a call to the builtin descriptor adjustment routine. */
5277
5278 static rtx
5279 expand_builtin_adjust_descriptor (tree exp)
5280 {
5281 rtx tramp;
5282
5283 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5284 return NULL_RTX;
5285
5286 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5287
5288 /* Unalign the descriptor to allow runtime identification. */
5289 tramp = plus_constant (ptr_mode, tramp,
5290 targetm.calls.custom_function_descriptors);
5291
5292 return force_operand (tramp, NULL_RTX);
5293 }
5294
5295 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5296 function. The function first checks whether the back end provides
5297 an insn to implement signbit for the respective mode. If not, it
5298 checks whether the floating point format of the value is such that
5299 the sign bit can be extracted. If that is not the case, error out.
5300 EXP is the expression that is a call to the builtin function; if
5301 convenient, the result should be placed in TARGET. */
5302 static rtx
5303 expand_builtin_signbit (tree exp, rtx target)
5304 {
5305 const struct real_format *fmt;
5306 machine_mode fmode, imode, rmode;
5307 tree arg;
5308 int word, bitpos;
5309 enum insn_code icode;
5310 rtx temp;
5311 location_t loc = EXPR_LOCATION (exp);
5312
5313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5314 return NULL_RTX;
5315
5316 arg = CALL_EXPR_ARG (exp, 0);
5317 fmode = TYPE_MODE (TREE_TYPE (arg));
5318 rmode = TYPE_MODE (TREE_TYPE (exp));
5319 fmt = REAL_MODE_FORMAT (fmode);
5320
5321 arg = builtin_save_expr (arg);
5322
5323 /* Expand the argument yielding a RTX expression. */
5324 temp = expand_normal (arg);
5325
5326 /* Check if the back end provides an insn that handles signbit for the
5327 argument's mode. */
5328 icode = optab_handler (signbit_optab, fmode);
5329 if (icode != CODE_FOR_nothing)
5330 {
5331 rtx_insn *last = get_last_insn ();
5332 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5333 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5334 return target;
5335 delete_insns_since (last);
5336 }
5337
5338 /* For floating point formats without a sign bit, implement signbit
5339 as "ARG < 0.0". */
5340 bitpos = fmt->signbit_ro;
5341 if (bitpos < 0)
5342 {
5343 /* But we can't do this if the format supports signed zero. */
5344 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5345
5346 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5347 build_real (TREE_TYPE (arg), dconst0));
5348 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5349 }
5350
5351 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5352 {
5353 imode = int_mode_for_mode (fmode);
5354 gcc_assert (imode != BLKmode);
5355 temp = gen_lowpart (imode, temp);
5356 }
5357 else
5358 {
5359 imode = word_mode;
5360 /* Handle targets with different FP word orders. */
5361 if (FLOAT_WORDS_BIG_ENDIAN)
5362 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5363 else
5364 word = bitpos / BITS_PER_WORD;
5365 temp = operand_subword_force (temp, word, fmode);
5366 bitpos = bitpos % BITS_PER_WORD;
5367 }
5368
5369 /* Force the intermediate word_mode (or narrower) result into a
5370 register. This avoids attempting to create paradoxical SUBREGs
5371 of floating point modes below. */
5372 temp = force_reg (imode, temp);
5373
5374 /* If the bitpos is within the "result mode" lowpart, the operation
5375 can be implement with a single bitwise AND. Otherwise, we need
5376 a right shift and an AND. */
5377
5378 if (bitpos < GET_MODE_BITSIZE (rmode))
5379 {
5380 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5381
5382 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5383 temp = gen_lowpart (rmode, temp);
5384 temp = expand_binop (rmode, and_optab, temp,
5385 immed_wide_int_const (mask, rmode),
5386 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5387 }
5388 else
5389 {
5390 /* Perform a logical right shift to place the signbit in the least
5391 significant bit, then truncate the result to the desired mode
5392 and mask just this bit. */
5393 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5394 temp = gen_lowpart (rmode, temp);
5395 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5396 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5397 }
5398
5399 return temp;
5400 }
5401
5402 /* Expand fork or exec calls. TARGET is the desired target of the
5403 call. EXP is the call. FN is the
5404 identificator of the actual function. IGNORE is nonzero if the
5405 value is to be ignored. */
5406
5407 static rtx
5408 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5409 {
5410 tree id, decl;
5411 tree call;
5412
5413 /* If we are not profiling, just call the function. */
5414 if (!profile_arc_flag)
5415 return NULL_RTX;
5416
5417 /* Otherwise call the wrapper. This should be equivalent for the rest of
5418 compiler, so the code does not diverge, and the wrapper may run the
5419 code necessary for keeping the profiling sane. */
5420
5421 switch (DECL_FUNCTION_CODE (fn))
5422 {
5423 case BUILT_IN_FORK:
5424 id = get_identifier ("__gcov_fork");
5425 break;
5426
5427 case BUILT_IN_EXECL:
5428 id = get_identifier ("__gcov_execl");
5429 break;
5430
5431 case BUILT_IN_EXECV:
5432 id = get_identifier ("__gcov_execv");
5433 break;
5434
5435 case BUILT_IN_EXECLP:
5436 id = get_identifier ("__gcov_execlp");
5437 break;
5438
5439 case BUILT_IN_EXECLE:
5440 id = get_identifier ("__gcov_execle");
5441 break;
5442
5443 case BUILT_IN_EXECVP:
5444 id = get_identifier ("__gcov_execvp");
5445 break;
5446
5447 case BUILT_IN_EXECVE:
5448 id = get_identifier ("__gcov_execve");
5449 break;
5450
5451 default:
5452 gcc_unreachable ();
5453 }
5454
5455 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5456 FUNCTION_DECL, id, TREE_TYPE (fn));
5457 DECL_EXTERNAL (decl) = 1;
5458 TREE_PUBLIC (decl) = 1;
5459 DECL_ARTIFICIAL (decl) = 1;
5460 TREE_NOTHROW (decl) = 1;
5461 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5462 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5463 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5464 return expand_call (call, target, ignore);
5465 }
5466
5467
5468 \f
5469 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5470 the pointer in these functions is void*, the tree optimizers may remove
5471 casts. The mode computed in expand_builtin isn't reliable either, due
5472 to __sync_bool_compare_and_swap.
5473
5474 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5475 group of builtins. This gives us log2 of the mode size. */
5476
5477 static inline machine_mode
5478 get_builtin_sync_mode (int fcode_diff)
5479 {
5480 /* The size is not negotiable, so ask not to get BLKmode in return
5481 if the target indicates that a smaller size would be better. */
5482 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5483 }
5484
5485 /* Expand the memory expression LOC and return the appropriate memory operand
5486 for the builtin_sync operations. */
5487
5488 static rtx
5489 get_builtin_sync_mem (tree loc, machine_mode mode)
5490 {
5491 rtx addr, mem;
5492
5493 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5494 addr = convert_memory_address (Pmode, addr);
5495
5496 /* Note that we explicitly do not want any alias information for this
5497 memory, so that we kill all other live memories. Otherwise we don't
5498 satisfy the full barrier semantics of the intrinsic. */
5499 mem = validize_mem (gen_rtx_MEM (mode, addr));
5500
5501 /* The alignment needs to be at least according to that of the mode. */
5502 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5503 get_pointer_alignment (loc)));
5504 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5505 MEM_VOLATILE_P (mem) = 1;
5506
5507 return mem;
5508 }
5509
5510 /* Make sure an argument is in the right mode.
5511 EXP is the tree argument.
5512 MODE is the mode it should be in. */
5513
5514 static rtx
5515 expand_expr_force_mode (tree exp, machine_mode mode)
5516 {
5517 rtx val;
5518 machine_mode old_mode;
5519
5520 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5521 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5522 of CONST_INTs, where we know the old_mode only from the call argument. */
5523
5524 old_mode = GET_MODE (val);
5525 if (old_mode == VOIDmode)
5526 old_mode = TYPE_MODE (TREE_TYPE (exp));
5527 val = convert_modes (mode, old_mode, val, 1);
5528 return val;
5529 }
5530
5531
5532 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5533 EXP is the CALL_EXPR. CODE is the rtx code
5534 that corresponds to the arithmetic or logical operation from the name;
5535 an exception here is that NOT actually means NAND. TARGET is an optional
5536 place for us to store the results; AFTER is true if this is the
5537 fetch_and_xxx form. */
5538
5539 static rtx
5540 expand_builtin_sync_operation (machine_mode mode, tree exp,
5541 enum rtx_code code, bool after,
5542 rtx target)
5543 {
5544 rtx val, mem;
5545 location_t loc = EXPR_LOCATION (exp);
5546
5547 if (code == NOT && warn_sync_nand)
5548 {
5549 tree fndecl = get_callee_fndecl (exp);
5550 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5551
5552 static bool warned_f_a_n, warned_n_a_f;
5553
5554 switch (fcode)
5555 {
5556 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5557 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5558 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5559 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5560 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5561 if (warned_f_a_n)
5562 break;
5563
5564 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5565 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5566 warned_f_a_n = true;
5567 break;
5568
5569 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5570 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5571 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5572 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5573 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5574 if (warned_n_a_f)
5575 break;
5576
5577 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5578 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5579 warned_n_a_f = true;
5580 break;
5581
5582 default:
5583 gcc_unreachable ();
5584 }
5585 }
5586
5587 /* Expand the operands. */
5588 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5589 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5590
5591 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5592 after);
5593 }
5594
5595 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5596 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5597 true if this is the boolean form. TARGET is a place for us to store the
5598 results; this is NOT optional if IS_BOOL is true. */
5599
5600 static rtx
5601 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5602 bool is_bool, rtx target)
5603 {
5604 rtx old_val, new_val, mem;
5605 rtx *pbool, *poval;
5606
5607 /* Expand the operands. */
5608 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5609 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5610 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5611
5612 pbool = poval = NULL;
5613 if (target != const0_rtx)
5614 {
5615 if (is_bool)
5616 pbool = &target;
5617 else
5618 poval = &target;
5619 }
5620 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5621 false, MEMMODEL_SYNC_SEQ_CST,
5622 MEMMODEL_SYNC_SEQ_CST))
5623 return NULL_RTX;
5624
5625 return target;
5626 }
5627
5628 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5629 general form is actually an atomic exchange, and some targets only
5630 support a reduced form with the second argument being a constant 1.
5631 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5632 the results. */
5633
5634 static rtx
5635 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5636 rtx target)
5637 {
5638 rtx val, mem;
5639
5640 /* Expand the operands. */
5641 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5642 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5643
5644 return expand_sync_lock_test_and_set (target, mem, val);
5645 }
5646
5647 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5648
5649 static void
5650 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5651 {
5652 rtx mem;
5653
5654 /* Expand the operands. */
5655 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5656
5657 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5658 }
5659
5660 /* Given an integer representing an ``enum memmodel'', verify its
5661 correctness and return the memory model enum. */
5662
5663 static enum memmodel
5664 get_memmodel (tree exp)
5665 {
5666 rtx op;
5667 unsigned HOST_WIDE_INT val;
5668 source_location loc
5669 = expansion_point_location_if_in_system_header (input_location);
5670
5671 /* If the parameter is not a constant, it's a run time value so we'll just
5672 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5673 if (TREE_CODE (exp) != INTEGER_CST)
5674 return MEMMODEL_SEQ_CST;
5675
5676 op = expand_normal (exp);
5677
5678 val = INTVAL (op);
5679 if (targetm.memmodel_check)
5680 val = targetm.memmodel_check (val);
5681 else if (val & ~MEMMODEL_MASK)
5682 {
5683 warning_at (loc, OPT_Winvalid_memory_model,
5684 "unknown architecture specifier in memory model to builtin");
5685 return MEMMODEL_SEQ_CST;
5686 }
5687
5688 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5689 if (memmodel_base (val) >= MEMMODEL_LAST)
5690 {
5691 warning_at (loc, OPT_Winvalid_memory_model,
5692 "invalid memory model argument to builtin");
5693 return MEMMODEL_SEQ_CST;
5694 }
5695
5696 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5697 be conservative and promote consume to acquire. */
5698 if (val == MEMMODEL_CONSUME)
5699 val = MEMMODEL_ACQUIRE;
5700
5701 return (enum memmodel) val;
5702 }
5703
5704 /* Expand the __atomic_exchange intrinsic:
5705 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5706 EXP is the CALL_EXPR.
5707 TARGET is an optional place for us to store the results. */
5708
5709 static rtx
5710 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5711 {
5712 rtx val, mem;
5713 enum memmodel model;
5714
5715 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5716
5717 if (!flag_inline_atomics)
5718 return NULL_RTX;
5719
5720 /* Expand the operands. */
5721 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5722 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5723
5724 return expand_atomic_exchange (target, mem, val, model);
5725 }
5726
5727 /* Expand the __atomic_compare_exchange intrinsic:
5728 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5729 TYPE desired, BOOL weak,
5730 enum memmodel success,
5731 enum memmodel failure)
5732 EXP is the CALL_EXPR.
5733 TARGET is an optional place for us to store the results. */
5734
5735 static rtx
5736 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5737 rtx target)
5738 {
5739 rtx expect, desired, mem, oldval;
5740 rtx_code_label *label;
5741 enum memmodel success, failure;
5742 tree weak;
5743 bool is_weak;
5744 source_location loc
5745 = expansion_point_location_if_in_system_header (input_location);
5746
5747 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5748 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5749
5750 if (failure > success)
5751 {
5752 warning_at (loc, OPT_Winvalid_memory_model,
5753 "failure memory model cannot be stronger than success "
5754 "memory model for %<__atomic_compare_exchange%>");
5755 success = MEMMODEL_SEQ_CST;
5756 }
5757
5758 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5759 {
5760 warning_at (loc, OPT_Winvalid_memory_model,
5761 "invalid failure memory model for "
5762 "%<__atomic_compare_exchange%>");
5763 failure = MEMMODEL_SEQ_CST;
5764 success = MEMMODEL_SEQ_CST;
5765 }
5766
5767
5768 if (!flag_inline_atomics)
5769 return NULL_RTX;
5770
5771 /* Expand the operands. */
5772 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5773
5774 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5775 expect = convert_memory_address (Pmode, expect);
5776 expect = gen_rtx_MEM (mode, expect);
5777 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5778
5779 weak = CALL_EXPR_ARG (exp, 3);
5780 is_weak = false;
5781 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5782 is_weak = true;
5783
5784 if (target == const0_rtx)
5785 target = NULL;
5786
5787 /* Lest the rtl backend create a race condition with an imporoper store
5788 to memory, always create a new pseudo for OLDVAL. */
5789 oldval = NULL;
5790
5791 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5792 is_weak, success, failure))
5793 return NULL_RTX;
5794
5795 /* Conditionally store back to EXPECT, lest we create a race condition
5796 with an improper store to memory. */
5797 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5798 the normal case where EXPECT is totally private, i.e. a register. At
5799 which point the store can be unconditional. */
5800 label = gen_label_rtx ();
5801 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5802 GET_MODE (target), 1, label);
5803 emit_move_insn (expect, oldval);
5804 emit_label (label);
5805
5806 return target;
5807 }
5808
5809 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5810 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5811 call. The weak parameter must be dropped to match the expected parameter
5812 list and the expected argument changed from value to pointer to memory
5813 slot. */
5814
5815 static void
5816 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5817 {
5818 unsigned int z;
5819 vec<tree, va_gc> *vec;
5820
5821 vec_alloc (vec, 5);
5822 vec->quick_push (gimple_call_arg (call, 0));
5823 tree expected = gimple_call_arg (call, 1);
5824 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5825 TREE_TYPE (expected));
5826 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5827 if (expd != x)
5828 emit_move_insn (x, expd);
5829 tree v = make_tree (TREE_TYPE (expected), x);
5830 vec->quick_push (build1 (ADDR_EXPR,
5831 build_pointer_type (TREE_TYPE (expected)), v));
5832 vec->quick_push (gimple_call_arg (call, 2));
5833 /* Skip the boolean weak parameter. */
5834 for (z = 4; z < 6; z++)
5835 vec->quick_push (gimple_call_arg (call, z));
5836 built_in_function fncode
5837 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5838 + exact_log2 (GET_MODE_SIZE (mode)));
5839 tree fndecl = builtin_decl_explicit (fncode);
5840 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5841 fndecl);
5842 tree exp = build_call_vec (boolean_type_node, fn, vec);
5843 tree lhs = gimple_call_lhs (call);
5844 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5845 if (lhs)
5846 {
5847 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5848 if (GET_MODE (boolret) != mode)
5849 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5850 x = force_reg (mode, x);
5851 write_complex_part (target, boolret, true);
5852 write_complex_part (target, x, false);
5853 }
5854 }
5855
5856 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5857
5858 void
5859 expand_ifn_atomic_compare_exchange (gcall *call)
5860 {
5861 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5862 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5863 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5864 rtx expect, desired, mem, oldval, boolret;
5865 enum memmodel success, failure;
5866 tree lhs;
5867 bool is_weak;
5868 source_location loc
5869 = expansion_point_location_if_in_system_header (gimple_location (call));
5870
5871 success = get_memmodel (gimple_call_arg (call, 4));
5872 failure = get_memmodel (gimple_call_arg (call, 5));
5873
5874 if (failure > success)
5875 {
5876 warning_at (loc, OPT_Winvalid_memory_model,
5877 "failure memory model cannot be stronger than success "
5878 "memory model for %<__atomic_compare_exchange%>");
5879 success = MEMMODEL_SEQ_CST;
5880 }
5881
5882 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5883 {
5884 warning_at (loc, OPT_Winvalid_memory_model,
5885 "invalid failure memory model for "
5886 "%<__atomic_compare_exchange%>");
5887 failure = MEMMODEL_SEQ_CST;
5888 success = MEMMODEL_SEQ_CST;
5889 }
5890
5891 if (!flag_inline_atomics)
5892 {
5893 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5894 return;
5895 }
5896
5897 /* Expand the operands. */
5898 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5899
5900 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5901 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5902
5903 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5904
5905 boolret = NULL;
5906 oldval = NULL;
5907
5908 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5909 is_weak, success, failure))
5910 {
5911 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5912 return;
5913 }
5914
5915 lhs = gimple_call_lhs (call);
5916 if (lhs)
5917 {
5918 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5919 if (GET_MODE (boolret) != mode)
5920 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5921 write_complex_part (target, boolret, true);
5922 write_complex_part (target, oldval, false);
5923 }
5924 }
5925
5926 /* Expand the __atomic_load intrinsic:
5927 TYPE __atomic_load (TYPE *object, enum memmodel)
5928 EXP is the CALL_EXPR.
5929 TARGET is an optional place for us to store the results. */
5930
5931 static rtx
5932 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5933 {
5934 rtx mem;
5935 enum memmodel model;
5936
5937 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5938 if (is_mm_release (model) || is_mm_acq_rel (model))
5939 {
5940 source_location loc
5941 = expansion_point_location_if_in_system_header (input_location);
5942 warning_at (loc, OPT_Winvalid_memory_model,
5943 "invalid memory model for %<__atomic_load%>");
5944 model = MEMMODEL_SEQ_CST;
5945 }
5946
5947 if (!flag_inline_atomics)
5948 return NULL_RTX;
5949
5950 /* Expand the operand. */
5951 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5952
5953 return expand_atomic_load (target, mem, model);
5954 }
5955
5956
5957 /* Expand the __atomic_store intrinsic:
5958 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5959 EXP is the CALL_EXPR.
5960 TARGET is an optional place for us to store the results. */
5961
5962 static rtx
5963 expand_builtin_atomic_store (machine_mode mode, tree exp)
5964 {
5965 rtx mem, val;
5966 enum memmodel model;
5967
5968 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5969 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5970 || is_mm_release (model)))
5971 {
5972 source_location loc
5973 = expansion_point_location_if_in_system_header (input_location);
5974 warning_at (loc, OPT_Winvalid_memory_model,
5975 "invalid memory model for %<__atomic_store%>");
5976 model = MEMMODEL_SEQ_CST;
5977 }
5978
5979 if (!flag_inline_atomics)
5980 return NULL_RTX;
5981
5982 /* Expand the operands. */
5983 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5984 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5985
5986 return expand_atomic_store (mem, val, model, false);
5987 }
5988
5989 /* Expand the __atomic_fetch_XXX intrinsic:
5990 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5991 EXP is the CALL_EXPR.
5992 TARGET is an optional place for us to store the results.
5993 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5994 FETCH_AFTER is true if returning the result of the operation.
5995 FETCH_AFTER is false if returning the value before the operation.
5996 IGNORE is true if the result is not used.
5997 EXT_CALL is the correct builtin for an external call if this cannot be
5998 resolved to an instruction sequence. */
5999
6000 static rtx
6001 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6002 enum rtx_code code, bool fetch_after,
6003 bool ignore, enum built_in_function ext_call)
6004 {
6005 rtx val, mem, ret;
6006 enum memmodel model;
6007 tree fndecl;
6008 tree addr;
6009
6010 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6011
6012 /* Expand the operands. */
6013 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6014 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6015
6016 /* Only try generating instructions if inlining is turned on. */
6017 if (flag_inline_atomics)
6018 {
6019 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6020 if (ret)
6021 return ret;
6022 }
6023
6024 /* Return if a different routine isn't needed for the library call. */
6025 if (ext_call == BUILT_IN_NONE)
6026 return NULL_RTX;
6027
6028 /* Change the call to the specified function. */
6029 fndecl = get_callee_fndecl (exp);
6030 addr = CALL_EXPR_FN (exp);
6031 STRIP_NOPS (addr);
6032
6033 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6034 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6035
6036 /* If we will emit code after the call, the call can not be a tail call.
6037 If it is emitted as a tail call, a barrier is emitted after it, and
6038 then all trailing code is removed. */
6039 if (!ignore)
6040 CALL_EXPR_TAILCALL (exp) = 0;
6041
6042 /* Expand the call here so we can emit trailing code. */
6043 ret = expand_call (exp, target, ignore);
6044
6045 /* Replace the original function just in case it matters. */
6046 TREE_OPERAND (addr, 0) = fndecl;
6047
6048 /* Then issue the arithmetic correction to return the right result. */
6049 if (!ignore)
6050 {
6051 if (code == NOT)
6052 {
6053 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6054 OPTAB_LIB_WIDEN);
6055 ret = expand_simple_unop (mode, NOT, ret, target, true);
6056 }
6057 else
6058 ret = expand_simple_binop (mode, code, ret, val, target, true,
6059 OPTAB_LIB_WIDEN);
6060 }
6061 return ret;
6062 }
6063
6064 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6065
6066 void
6067 expand_ifn_atomic_bit_test_and (gcall *call)
6068 {
6069 tree ptr = gimple_call_arg (call, 0);
6070 tree bit = gimple_call_arg (call, 1);
6071 tree flag = gimple_call_arg (call, 2);
6072 tree lhs = gimple_call_lhs (call);
6073 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6074 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6075 enum rtx_code code;
6076 optab optab;
6077 struct expand_operand ops[5];
6078
6079 gcc_assert (flag_inline_atomics);
6080
6081 if (gimple_call_num_args (call) == 4)
6082 model = get_memmodel (gimple_call_arg (call, 3));
6083
6084 rtx mem = get_builtin_sync_mem (ptr, mode);
6085 rtx val = expand_expr_force_mode (bit, mode);
6086
6087 switch (gimple_call_internal_fn (call))
6088 {
6089 case IFN_ATOMIC_BIT_TEST_AND_SET:
6090 code = IOR;
6091 optab = atomic_bit_test_and_set_optab;
6092 break;
6093 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6094 code = XOR;
6095 optab = atomic_bit_test_and_complement_optab;
6096 break;
6097 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6098 code = AND;
6099 optab = atomic_bit_test_and_reset_optab;
6100 break;
6101 default:
6102 gcc_unreachable ();
6103 }
6104
6105 if (lhs == NULL_TREE)
6106 {
6107 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6108 val, NULL_RTX, true, OPTAB_DIRECT);
6109 if (code == AND)
6110 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6111 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6112 return;
6113 }
6114
6115 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6116 enum insn_code icode = direct_optab_handler (optab, mode);
6117 gcc_assert (icode != CODE_FOR_nothing);
6118 create_output_operand (&ops[0], target, mode);
6119 create_fixed_operand (&ops[1], mem);
6120 create_convert_operand_to (&ops[2], val, mode, true);
6121 create_integer_operand (&ops[3], model);
6122 create_integer_operand (&ops[4], integer_onep (flag));
6123 if (maybe_expand_insn (icode, 5, ops))
6124 return;
6125
6126 rtx bitval = val;
6127 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6128 val, NULL_RTX, true, OPTAB_DIRECT);
6129 rtx maskval = val;
6130 if (code == AND)
6131 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6132 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6133 code, model, false);
6134 if (integer_onep (flag))
6135 {
6136 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6137 NULL_RTX, true, OPTAB_DIRECT);
6138 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6139 true, OPTAB_DIRECT);
6140 }
6141 else
6142 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6143 OPTAB_DIRECT);
6144 if (result != target)
6145 emit_move_insn (target, result);
6146 }
6147
6148 /* Expand an atomic clear operation.
6149 void _atomic_clear (BOOL *obj, enum memmodel)
6150 EXP is the call expression. */
6151
6152 static rtx
6153 expand_builtin_atomic_clear (tree exp)
6154 {
6155 machine_mode mode;
6156 rtx mem, ret;
6157 enum memmodel model;
6158
6159 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6160 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6161 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6162
6163 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6164 {
6165 source_location loc
6166 = expansion_point_location_if_in_system_header (input_location);
6167 warning_at (loc, OPT_Winvalid_memory_model,
6168 "invalid memory model for %<__atomic_store%>");
6169 model = MEMMODEL_SEQ_CST;
6170 }
6171
6172 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6173 Failing that, a store is issued by __atomic_store. The only way this can
6174 fail is if the bool type is larger than a word size. Unlikely, but
6175 handle it anyway for completeness. Assume a single threaded model since
6176 there is no atomic support in this case, and no barriers are required. */
6177 ret = expand_atomic_store (mem, const0_rtx, model, true);
6178 if (!ret)
6179 emit_move_insn (mem, const0_rtx);
6180 return const0_rtx;
6181 }
6182
6183 /* Expand an atomic test_and_set operation.
6184 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6185 EXP is the call expression. */
6186
6187 static rtx
6188 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6189 {
6190 rtx mem;
6191 enum memmodel model;
6192 machine_mode mode;
6193
6194 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6196 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6197
6198 return expand_atomic_test_and_set (target, mem, model);
6199 }
6200
6201
6202 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6203 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6204
6205 static tree
6206 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6207 {
6208 int size;
6209 machine_mode mode;
6210 unsigned int mode_align, type_align;
6211
6212 if (TREE_CODE (arg0) != INTEGER_CST)
6213 return NULL_TREE;
6214
6215 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6216 mode = mode_for_size (size, MODE_INT, 0);
6217 mode_align = GET_MODE_ALIGNMENT (mode);
6218
6219 if (TREE_CODE (arg1) == INTEGER_CST)
6220 {
6221 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6222
6223 /* Either this argument is null, or it's a fake pointer encoding
6224 the alignment of the object. */
6225 val = least_bit_hwi (val);
6226 val *= BITS_PER_UNIT;
6227
6228 if (val == 0 || mode_align < val)
6229 type_align = mode_align;
6230 else
6231 type_align = val;
6232 }
6233 else
6234 {
6235 tree ttype = TREE_TYPE (arg1);
6236
6237 /* This function is usually invoked and folded immediately by the front
6238 end before anything else has a chance to look at it. The pointer
6239 parameter at this point is usually cast to a void *, so check for that
6240 and look past the cast. */
6241 if (CONVERT_EXPR_P (arg1)
6242 && POINTER_TYPE_P (ttype)
6243 && VOID_TYPE_P (TREE_TYPE (ttype))
6244 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6245 arg1 = TREE_OPERAND (arg1, 0);
6246
6247 ttype = TREE_TYPE (arg1);
6248 gcc_assert (POINTER_TYPE_P (ttype));
6249
6250 /* Get the underlying type of the object. */
6251 ttype = TREE_TYPE (ttype);
6252 type_align = TYPE_ALIGN (ttype);
6253 }
6254
6255 /* If the object has smaller alignment, the lock free routines cannot
6256 be used. */
6257 if (type_align < mode_align)
6258 return boolean_false_node;
6259
6260 /* Check if a compare_and_swap pattern exists for the mode which represents
6261 the required size. The pattern is not allowed to fail, so the existence
6262 of the pattern indicates support is present. Also require that an
6263 atomic load exists for the required size. */
6264 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6265 return boolean_true_node;
6266 else
6267 return boolean_false_node;
6268 }
6269
6270 /* Return true if the parameters to call EXP represent an object which will
6271 always generate lock free instructions. The first argument represents the
6272 size of the object, and the second parameter is a pointer to the object
6273 itself. If NULL is passed for the object, then the result is based on
6274 typical alignment for an object of the specified size. Otherwise return
6275 false. */
6276
6277 static rtx
6278 expand_builtin_atomic_always_lock_free (tree exp)
6279 {
6280 tree size;
6281 tree arg0 = CALL_EXPR_ARG (exp, 0);
6282 tree arg1 = CALL_EXPR_ARG (exp, 1);
6283
6284 if (TREE_CODE (arg0) != INTEGER_CST)
6285 {
6286 error ("non-constant argument 1 to __atomic_always_lock_free");
6287 return const0_rtx;
6288 }
6289
6290 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6291 if (size == boolean_true_node)
6292 return const1_rtx;
6293 return const0_rtx;
6294 }
6295
6296 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6297 is lock free on this architecture. */
6298
6299 static tree
6300 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6301 {
6302 if (!flag_inline_atomics)
6303 return NULL_TREE;
6304
6305 /* If it isn't always lock free, don't generate a result. */
6306 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6307 return boolean_true_node;
6308
6309 return NULL_TREE;
6310 }
6311
6312 /* Return true if the parameters to call EXP represent an object which will
6313 always generate lock free instructions. The first argument represents the
6314 size of the object, and the second parameter is a pointer to the object
6315 itself. If NULL is passed for the object, then the result is based on
6316 typical alignment for an object of the specified size. Otherwise return
6317 NULL*/
6318
6319 static rtx
6320 expand_builtin_atomic_is_lock_free (tree exp)
6321 {
6322 tree size;
6323 tree arg0 = CALL_EXPR_ARG (exp, 0);
6324 tree arg1 = CALL_EXPR_ARG (exp, 1);
6325
6326 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6327 {
6328 error ("non-integer argument 1 to __atomic_is_lock_free");
6329 return NULL_RTX;
6330 }
6331
6332 if (!flag_inline_atomics)
6333 return NULL_RTX;
6334
6335 /* If the value is known at compile time, return the RTX for it. */
6336 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6337 if (size == boolean_true_node)
6338 return const1_rtx;
6339
6340 return NULL_RTX;
6341 }
6342
6343 /* Expand the __atomic_thread_fence intrinsic:
6344 void __atomic_thread_fence (enum memmodel)
6345 EXP is the CALL_EXPR. */
6346
6347 static void
6348 expand_builtin_atomic_thread_fence (tree exp)
6349 {
6350 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6351 expand_mem_thread_fence (model);
6352 }
6353
6354 /* Expand the __atomic_signal_fence intrinsic:
6355 void __atomic_signal_fence (enum memmodel)
6356 EXP is the CALL_EXPR. */
6357
6358 static void
6359 expand_builtin_atomic_signal_fence (tree exp)
6360 {
6361 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6362 expand_mem_signal_fence (model);
6363 }
6364
6365 /* Expand the __sync_synchronize intrinsic. */
6366
6367 static void
6368 expand_builtin_sync_synchronize (void)
6369 {
6370 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6371 }
6372
6373 static rtx
6374 expand_builtin_thread_pointer (tree exp, rtx target)
6375 {
6376 enum insn_code icode;
6377 if (!validate_arglist (exp, VOID_TYPE))
6378 return const0_rtx;
6379 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6380 if (icode != CODE_FOR_nothing)
6381 {
6382 struct expand_operand op;
6383 /* If the target is not sutitable then create a new target. */
6384 if (target == NULL_RTX
6385 || !REG_P (target)
6386 || GET_MODE (target) != Pmode)
6387 target = gen_reg_rtx (Pmode);
6388 create_output_operand (&op, target, Pmode);
6389 expand_insn (icode, 1, &op);
6390 return target;
6391 }
6392 error ("__builtin_thread_pointer is not supported on this target");
6393 return const0_rtx;
6394 }
6395
6396 static void
6397 expand_builtin_set_thread_pointer (tree exp)
6398 {
6399 enum insn_code icode;
6400 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6401 return;
6402 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6403 if (icode != CODE_FOR_nothing)
6404 {
6405 struct expand_operand op;
6406 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6407 Pmode, EXPAND_NORMAL);
6408 create_input_operand (&op, val, Pmode);
6409 expand_insn (icode, 1, &op);
6410 return;
6411 }
6412 error ("__builtin_set_thread_pointer is not supported on this target");
6413 }
6414
6415 \f
6416 /* Emit code to restore the current value of stack. */
6417
6418 static void
6419 expand_stack_restore (tree var)
6420 {
6421 rtx_insn *prev;
6422 rtx sa = expand_normal (var);
6423
6424 sa = convert_memory_address (Pmode, sa);
6425
6426 prev = get_last_insn ();
6427 emit_stack_restore (SAVE_BLOCK, sa);
6428
6429 record_new_stack_level ();
6430
6431 fixup_args_size_notes (prev, get_last_insn (), 0);
6432 }
6433
6434 /* Emit code to save the current value of stack. */
6435
6436 static rtx
6437 expand_stack_save (void)
6438 {
6439 rtx ret = NULL_RTX;
6440
6441 emit_stack_save (SAVE_BLOCK, &ret);
6442 return ret;
6443 }
6444
6445
6446 /* Expand an expression EXP that calls a built-in function,
6447 with result going to TARGET if that's convenient
6448 (and in mode MODE if that's convenient).
6449 SUBTARGET may be used as the target for computing one of EXP's operands.
6450 IGNORE is nonzero if the value is to be ignored. */
6451
6452 rtx
6453 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6454 int ignore)
6455 {
6456 tree fndecl = get_callee_fndecl (exp);
6457 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6458 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6459 int flags;
6460
6461 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6462 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6463
6464 /* When ASan is enabled, we don't want to expand some memory/string
6465 builtins and rely on libsanitizer's hooks. This allows us to avoid
6466 redundant checks and be sure, that possible overflow will be detected
6467 by ASan. */
6468
6469 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6470 return expand_call (exp, target, ignore);
6471
6472 /* When not optimizing, generate calls to library functions for a certain
6473 set of builtins. */
6474 if (!optimize
6475 && !called_as_built_in (fndecl)
6476 && fcode != BUILT_IN_FORK
6477 && fcode != BUILT_IN_EXECL
6478 && fcode != BUILT_IN_EXECV
6479 && fcode != BUILT_IN_EXECLP
6480 && fcode != BUILT_IN_EXECLE
6481 && fcode != BUILT_IN_EXECVP
6482 && fcode != BUILT_IN_EXECVE
6483 && fcode != BUILT_IN_ALLOCA
6484 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6485 && fcode != BUILT_IN_FREE
6486 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6487 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6488 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6489 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6490 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6491 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6492 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6493 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6494 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6495 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6496 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6497 && fcode != BUILT_IN_CHKP_BNDRET)
6498 return expand_call (exp, target, ignore);
6499
6500 /* The built-in function expanders test for target == const0_rtx
6501 to determine whether the function's result will be ignored. */
6502 if (ignore)
6503 target = const0_rtx;
6504
6505 /* If the result of a pure or const built-in function is ignored, and
6506 none of its arguments are volatile, we can avoid expanding the
6507 built-in call and just evaluate the arguments for side-effects. */
6508 if (target == const0_rtx
6509 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6510 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6511 {
6512 bool volatilep = false;
6513 tree arg;
6514 call_expr_arg_iterator iter;
6515
6516 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6517 if (TREE_THIS_VOLATILE (arg))
6518 {
6519 volatilep = true;
6520 break;
6521 }
6522
6523 if (! volatilep)
6524 {
6525 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6526 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6527 return const0_rtx;
6528 }
6529 }
6530
6531 /* expand_builtin_with_bounds is supposed to be used for
6532 instrumented builtin calls. */
6533 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6534
6535 switch (fcode)
6536 {
6537 CASE_FLT_FN (BUILT_IN_FABS):
6538 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6539 case BUILT_IN_FABSD32:
6540 case BUILT_IN_FABSD64:
6541 case BUILT_IN_FABSD128:
6542 target = expand_builtin_fabs (exp, target, subtarget);
6543 if (target)
6544 return target;
6545 break;
6546
6547 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6548 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6549 target = expand_builtin_copysign (exp, target, subtarget);
6550 if (target)
6551 return target;
6552 break;
6553
6554 /* Just do a normal library call if we were unable to fold
6555 the values. */
6556 CASE_FLT_FN (BUILT_IN_CABS):
6557 break;
6558
6559 CASE_FLT_FN (BUILT_IN_FMA):
6560 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6561 if (target)
6562 return target;
6563 break;
6564
6565 CASE_FLT_FN (BUILT_IN_ILOGB):
6566 if (! flag_unsafe_math_optimizations)
6567 break;
6568 gcc_fallthrough ();
6569 CASE_FLT_FN (BUILT_IN_ISINF):
6570 CASE_FLT_FN (BUILT_IN_FINITE):
6571 case BUILT_IN_ISFINITE:
6572 case BUILT_IN_ISNORMAL:
6573 target = expand_builtin_interclass_mathfn (exp, target);
6574 if (target)
6575 return target;
6576 break;
6577
6578 CASE_FLT_FN (BUILT_IN_ICEIL):
6579 CASE_FLT_FN (BUILT_IN_LCEIL):
6580 CASE_FLT_FN (BUILT_IN_LLCEIL):
6581 CASE_FLT_FN (BUILT_IN_LFLOOR):
6582 CASE_FLT_FN (BUILT_IN_IFLOOR):
6583 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6584 target = expand_builtin_int_roundingfn (exp, target);
6585 if (target)
6586 return target;
6587 break;
6588
6589 CASE_FLT_FN (BUILT_IN_IRINT):
6590 CASE_FLT_FN (BUILT_IN_LRINT):
6591 CASE_FLT_FN (BUILT_IN_LLRINT):
6592 CASE_FLT_FN (BUILT_IN_IROUND):
6593 CASE_FLT_FN (BUILT_IN_LROUND):
6594 CASE_FLT_FN (BUILT_IN_LLROUND):
6595 target = expand_builtin_int_roundingfn_2 (exp, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 CASE_FLT_FN (BUILT_IN_POWI):
6601 target = expand_builtin_powi (exp, target);
6602 if (target)
6603 return target;
6604 break;
6605
6606 CASE_FLT_FN (BUILT_IN_CEXPI):
6607 target = expand_builtin_cexpi (exp, target);
6608 gcc_assert (target);
6609 return target;
6610
6611 CASE_FLT_FN (BUILT_IN_SIN):
6612 CASE_FLT_FN (BUILT_IN_COS):
6613 if (! flag_unsafe_math_optimizations)
6614 break;
6615 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6616 if (target)
6617 return target;
6618 break;
6619
6620 CASE_FLT_FN (BUILT_IN_SINCOS):
6621 if (! flag_unsafe_math_optimizations)
6622 break;
6623 target = expand_builtin_sincos (exp);
6624 if (target)
6625 return target;
6626 break;
6627
6628 case BUILT_IN_APPLY_ARGS:
6629 return expand_builtin_apply_args ();
6630
6631 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6632 FUNCTION with a copy of the parameters described by
6633 ARGUMENTS, and ARGSIZE. It returns a block of memory
6634 allocated on the stack into which is stored all the registers
6635 that might possibly be used for returning the result of a
6636 function. ARGUMENTS is the value returned by
6637 __builtin_apply_args. ARGSIZE is the number of bytes of
6638 arguments that must be copied. ??? How should this value be
6639 computed? We'll also need a safe worst case value for varargs
6640 functions. */
6641 case BUILT_IN_APPLY:
6642 if (!validate_arglist (exp, POINTER_TYPE,
6643 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6644 && !validate_arglist (exp, REFERENCE_TYPE,
6645 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6646 return const0_rtx;
6647 else
6648 {
6649 rtx ops[3];
6650
6651 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6652 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6653 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6654
6655 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6656 }
6657
6658 /* __builtin_return (RESULT) causes the function to return the
6659 value described by RESULT. RESULT is address of the block of
6660 memory returned by __builtin_apply. */
6661 case BUILT_IN_RETURN:
6662 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6663 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6664 return const0_rtx;
6665
6666 case BUILT_IN_SAVEREGS:
6667 return expand_builtin_saveregs ();
6668
6669 case BUILT_IN_VA_ARG_PACK:
6670 /* All valid uses of __builtin_va_arg_pack () are removed during
6671 inlining. */
6672 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6673 return const0_rtx;
6674
6675 case BUILT_IN_VA_ARG_PACK_LEN:
6676 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6677 inlining. */
6678 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6679 return const0_rtx;
6680
6681 /* Return the address of the first anonymous stack arg. */
6682 case BUILT_IN_NEXT_ARG:
6683 if (fold_builtin_next_arg (exp, false))
6684 return const0_rtx;
6685 return expand_builtin_next_arg ();
6686
6687 case BUILT_IN_CLEAR_CACHE:
6688 target = expand_builtin___clear_cache (exp);
6689 if (target)
6690 return target;
6691 break;
6692
6693 case BUILT_IN_CLASSIFY_TYPE:
6694 return expand_builtin_classify_type (exp);
6695
6696 case BUILT_IN_CONSTANT_P:
6697 return const0_rtx;
6698
6699 case BUILT_IN_FRAME_ADDRESS:
6700 case BUILT_IN_RETURN_ADDRESS:
6701 return expand_builtin_frame_address (fndecl, exp);
6702
6703 /* Returns the address of the area where the structure is returned.
6704 0 otherwise. */
6705 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6706 if (call_expr_nargs (exp) != 0
6707 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6708 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6709 return const0_rtx;
6710 else
6711 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6712
6713 case BUILT_IN_ALLOCA:
6714 case BUILT_IN_ALLOCA_WITH_ALIGN:
6715 target = expand_builtin_alloca (exp);
6716 if (target)
6717 return target;
6718 break;
6719
6720 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6721 return expand_asan_emit_allocas_unpoison (exp);
6722
6723 case BUILT_IN_STACK_SAVE:
6724 return expand_stack_save ();
6725
6726 case BUILT_IN_STACK_RESTORE:
6727 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6728 return const0_rtx;
6729
6730 case BUILT_IN_BSWAP16:
6731 case BUILT_IN_BSWAP32:
6732 case BUILT_IN_BSWAP64:
6733 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6734 if (target)
6735 return target;
6736 break;
6737
6738 CASE_INT_FN (BUILT_IN_FFS):
6739 target = expand_builtin_unop (target_mode, exp, target,
6740 subtarget, ffs_optab);
6741 if (target)
6742 return target;
6743 break;
6744
6745 CASE_INT_FN (BUILT_IN_CLZ):
6746 target = expand_builtin_unop (target_mode, exp, target,
6747 subtarget, clz_optab);
6748 if (target)
6749 return target;
6750 break;
6751
6752 CASE_INT_FN (BUILT_IN_CTZ):
6753 target = expand_builtin_unop (target_mode, exp, target,
6754 subtarget, ctz_optab);
6755 if (target)
6756 return target;
6757 break;
6758
6759 CASE_INT_FN (BUILT_IN_CLRSB):
6760 target = expand_builtin_unop (target_mode, exp, target,
6761 subtarget, clrsb_optab);
6762 if (target)
6763 return target;
6764 break;
6765
6766 CASE_INT_FN (BUILT_IN_POPCOUNT):
6767 target = expand_builtin_unop (target_mode, exp, target,
6768 subtarget, popcount_optab);
6769 if (target)
6770 return target;
6771 break;
6772
6773 CASE_INT_FN (BUILT_IN_PARITY):
6774 target = expand_builtin_unop (target_mode, exp, target,
6775 subtarget, parity_optab);
6776 if (target)
6777 return target;
6778 break;
6779
6780 case BUILT_IN_STRLEN:
6781 target = expand_builtin_strlen (exp, target, target_mode);
6782 if (target)
6783 return target;
6784 break;
6785
6786 case BUILT_IN_STRCAT:
6787 target = expand_builtin_strcat (exp, target);
6788 if (target)
6789 return target;
6790 break;
6791
6792 case BUILT_IN_STRCPY:
6793 target = expand_builtin_strcpy (exp, target);
6794 if (target)
6795 return target;
6796 break;
6797
6798 case BUILT_IN_STRNCAT:
6799 target = expand_builtin_strncat (exp, target);
6800 if (target)
6801 return target;
6802 break;
6803
6804 case BUILT_IN_STRNCPY:
6805 target = expand_builtin_strncpy (exp, target);
6806 if (target)
6807 return target;
6808 break;
6809
6810 case BUILT_IN_STPCPY:
6811 target = expand_builtin_stpcpy (exp, target, mode);
6812 if (target)
6813 return target;
6814 break;
6815
6816 case BUILT_IN_STPNCPY:
6817 target = expand_builtin_stpncpy (exp, target);
6818 if (target)
6819 return target;
6820 break;
6821
6822 case BUILT_IN_MEMCHR:
6823 target = expand_builtin_memchr (exp, target);
6824 if (target)
6825 return target;
6826 break;
6827
6828 case BUILT_IN_MEMCPY:
6829 target = expand_builtin_memcpy (exp, target);
6830 if (target)
6831 return target;
6832 break;
6833
6834 case BUILT_IN_MEMMOVE:
6835 target = expand_builtin_memmove (exp, target);
6836 if (target)
6837 return target;
6838 break;
6839
6840 case BUILT_IN_MEMPCPY:
6841 target = expand_builtin_mempcpy (exp, target);
6842 if (target)
6843 return target;
6844 break;
6845
6846 case BUILT_IN_MEMSET:
6847 target = expand_builtin_memset (exp, target, mode);
6848 if (target)
6849 return target;
6850 break;
6851
6852 case BUILT_IN_BZERO:
6853 target = expand_builtin_bzero (exp);
6854 if (target)
6855 return target;
6856 break;
6857
6858 case BUILT_IN_STRCMP:
6859 target = expand_builtin_strcmp (exp, target);
6860 if (target)
6861 return target;
6862 break;
6863
6864 case BUILT_IN_STRNCMP:
6865 target = expand_builtin_strncmp (exp, target, mode);
6866 if (target)
6867 return target;
6868 break;
6869
6870 case BUILT_IN_BCMP:
6871 case BUILT_IN_MEMCMP:
6872 case BUILT_IN_MEMCMP_EQ:
6873 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6874 if (target)
6875 return target;
6876 if (fcode == BUILT_IN_MEMCMP_EQ)
6877 {
6878 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6879 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6880 }
6881 break;
6882
6883 case BUILT_IN_SETJMP:
6884 /* This should have been lowered to the builtins below. */
6885 gcc_unreachable ();
6886
6887 case BUILT_IN_SETJMP_SETUP:
6888 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6889 and the receiver label. */
6890 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6891 {
6892 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6893 VOIDmode, EXPAND_NORMAL);
6894 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6895 rtx_insn *label_r = label_rtx (label);
6896
6897 /* This is copied from the handling of non-local gotos. */
6898 expand_builtin_setjmp_setup (buf_addr, label_r);
6899 nonlocal_goto_handler_labels
6900 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6901 nonlocal_goto_handler_labels);
6902 /* ??? Do not let expand_label treat us as such since we would
6903 not want to be both on the list of non-local labels and on
6904 the list of forced labels. */
6905 FORCED_LABEL (label) = 0;
6906 return const0_rtx;
6907 }
6908 break;
6909
6910 case BUILT_IN_SETJMP_RECEIVER:
6911 /* __builtin_setjmp_receiver is passed the receiver label. */
6912 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6913 {
6914 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6915 rtx_insn *label_r = label_rtx (label);
6916
6917 expand_builtin_setjmp_receiver (label_r);
6918 return const0_rtx;
6919 }
6920 break;
6921
6922 /* __builtin_longjmp is passed a pointer to an array of five words.
6923 It's similar to the C library longjmp function but works with
6924 __builtin_setjmp above. */
6925 case BUILT_IN_LONGJMP:
6926 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6927 {
6928 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6929 VOIDmode, EXPAND_NORMAL);
6930 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6931
6932 if (value != const1_rtx)
6933 {
6934 error ("%<__builtin_longjmp%> second argument must be 1");
6935 return const0_rtx;
6936 }
6937
6938 expand_builtin_longjmp (buf_addr, value);
6939 return const0_rtx;
6940 }
6941 break;
6942
6943 case BUILT_IN_NONLOCAL_GOTO:
6944 target = expand_builtin_nonlocal_goto (exp);
6945 if (target)
6946 return target;
6947 break;
6948
6949 /* This updates the setjmp buffer that is its argument with the value
6950 of the current stack pointer. */
6951 case BUILT_IN_UPDATE_SETJMP_BUF:
6952 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6953 {
6954 rtx buf_addr
6955 = expand_normal (CALL_EXPR_ARG (exp, 0));
6956
6957 expand_builtin_update_setjmp_buf (buf_addr);
6958 return const0_rtx;
6959 }
6960 break;
6961
6962 case BUILT_IN_TRAP:
6963 expand_builtin_trap ();
6964 return const0_rtx;
6965
6966 case BUILT_IN_UNREACHABLE:
6967 expand_builtin_unreachable ();
6968 return const0_rtx;
6969
6970 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6971 case BUILT_IN_SIGNBITD32:
6972 case BUILT_IN_SIGNBITD64:
6973 case BUILT_IN_SIGNBITD128:
6974 target = expand_builtin_signbit (exp, target);
6975 if (target)
6976 return target;
6977 break;
6978
6979 /* Various hooks for the DWARF 2 __throw routine. */
6980 case BUILT_IN_UNWIND_INIT:
6981 expand_builtin_unwind_init ();
6982 return const0_rtx;
6983 case BUILT_IN_DWARF_CFA:
6984 return virtual_cfa_rtx;
6985 #ifdef DWARF2_UNWIND_INFO
6986 case BUILT_IN_DWARF_SP_COLUMN:
6987 return expand_builtin_dwarf_sp_column ();
6988 case BUILT_IN_INIT_DWARF_REG_SIZES:
6989 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6990 return const0_rtx;
6991 #endif
6992 case BUILT_IN_FROB_RETURN_ADDR:
6993 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6994 case BUILT_IN_EXTRACT_RETURN_ADDR:
6995 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6996 case BUILT_IN_EH_RETURN:
6997 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6998 CALL_EXPR_ARG (exp, 1));
6999 return const0_rtx;
7000 case BUILT_IN_EH_RETURN_DATA_REGNO:
7001 return expand_builtin_eh_return_data_regno (exp);
7002 case BUILT_IN_EXTEND_POINTER:
7003 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7004 case BUILT_IN_EH_POINTER:
7005 return expand_builtin_eh_pointer (exp);
7006 case BUILT_IN_EH_FILTER:
7007 return expand_builtin_eh_filter (exp);
7008 case BUILT_IN_EH_COPY_VALUES:
7009 return expand_builtin_eh_copy_values (exp);
7010
7011 case BUILT_IN_VA_START:
7012 return expand_builtin_va_start (exp);
7013 case BUILT_IN_VA_END:
7014 return expand_builtin_va_end (exp);
7015 case BUILT_IN_VA_COPY:
7016 return expand_builtin_va_copy (exp);
7017 case BUILT_IN_EXPECT:
7018 return expand_builtin_expect (exp, target);
7019 case BUILT_IN_ASSUME_ALIGNED:
7020 return expand_builtin_assume_aligned (exp, target);
7021 case BUILT_IN_PREFETCH:
7022 expand_builtin_prefetch (exp);
7023 return const0_rtx;
7024
7025 case BUILT_IN_INIT_TRAMPOLINE:
7026 return expand_builtin_init_trampoline (exp, true);
7027 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7028 return expand_builtin_init_trampoline (exp, false);
7029 case BUILT_IN_ADJUST_TRAMPOLINE:
7030 return expand_builtin_adjust_trampoline (exp);
7031
7032 case BUILT_IN_INIT_DESCRIPTOR:
7033 return expand_builtin_init_descriptor (exp);
7034 case BUILT_IN_ADJUST_DESCRIPTOR:
7035 return expand_builtin_adjust_descriptor (exp);
7036
7037 case BUILT_IN_FORK:
7038 case BUILT_IN_EXECL:
7039 case BUILT_IN_EXECV:
7040 case BUILT_IN_EXECLP:
7041 case BUILT_IN_EXECLE:
7042 case BUILT_IN_EXECVP:
7043 case BUILT_IN_EXECVE:
7044 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7045 if (target)
7046 return target;
7047 break;
7048
7049 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7050 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7051 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7052 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7053 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7054 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7055 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7056 if (target)
7057 return target;
7058 break;
7059
7060 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7061 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7062 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7063 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7064 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7065 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7066 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7067 if (target)
7068 return target;
7069 break;
7070
7071 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7072 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7073 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7074 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7075 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7076 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7077 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7078 if (target)
7079 return target;
7080 break;
7081
7082 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7083 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7084 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7085 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7086 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7087 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7088 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7089 if (target)
7090 return target;
7091 break;
7092
7093 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7094 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7095 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7096 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7097 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7098 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7099 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7100 if (target)
7101 return target;
7102 break;
7103
7104 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7105 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7106 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7107 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7108 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7109 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7110 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7111 if (target)
7112 return target;
7113 break;
7114
7115 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7116 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7117 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7118 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7119 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7120 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7121 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7122 if (target)
7123 return target;
7124 break;
7125
7126 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7127 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7128 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7129 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7130 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7131 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7132 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7133 if (target)
7134 return target;
7135 break;
7136
7137 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7138 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7139 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7140 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7141 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7142 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7143 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7144 if (target)
7145 return target;
7146 break;
7147
7148 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7149 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7150 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7151 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7152 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7153 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7154 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7155 if (target)
7156 return target;
7157 break;
7158
7159 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7160 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7161 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7162 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7163 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7164 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7165 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7166 if (target)
7167 return target;
7168 break;
7169
7170 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7171 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7172 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7173 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7174 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7175 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7176 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7177 if (target)
7178 return target;
7179 break;
7180
7181 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7182 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7183 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7184 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7185 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7186 if (mode == VOIDmode)
7187 mode = TYPE_MODE (boolean_type_node);
7188 if (!target || !register_operand (target, mode))
7189 target = gen_reg_rtx (mode);
7190
7191 mode = get_builtin_sync_mode
7192 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7193 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7194 if (target)
7195 return target;
7196 break;
7197
7198 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7199 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7200 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7201 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7202 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7203 mode = get_builtin_sync_mode
7204 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7205 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7206 if (target)
7207 return target;
7208 break;
7209
7210 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7211 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7212 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7213 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7214 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7215 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7216 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7217 if (target)
7218 return target;
7219 break;
7220
7221 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7222 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7223 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7224 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7225 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7226 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7227 expand_builtin_sync_lock_release (mode, exp);
7228 return const0_rtx;
7229
7230 case BUILT_IN_SYNC_SYNCHRONIZE:
7231 expand_builtin_sync_synchronize ();
7232 return const0_rtx;
7233
7234 case BUILT_IN_ATOMIC_EXCHANGE_1:
7235 case BUILT_IN_ATOMIC_EXCHANGE_2:
7236 case BUILT_IN_ATOMIC_EXCHANGE_4:
7237 case BUILT_IN_ATOMIC_EXCHANGE_8:
7238 case BUILT_IN_ATOMIC_EXCHANGE_16:
7239 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7240 target = expand_builtin_atomic_exchange (mode, exp, target);
7241 if (target)
7242 return target;
7243 break;
7244
7245 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7246 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7247 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7248 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7249 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7250 {
7251 unsigned int nargs, z;
7252 vec<tree, va_gc> *vec;
7253
7254 mode =
7255 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7256 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7257 if (target)
7258 return target;
7259
7260 /* If this is turned into an external library call, the weak parameter
7261 must be dropped to match the expected parameter list. */
7262 nargs = call_expr_nargs (exp);
7263 vec_alloc (vec, nargs - 1);
7264 for (z = 0; z < 3; z++)
7265 vec->quick_push (CALL_EXPR_ARG (exp, z));
7266 /* Skip the boolean weak parameter. */
7267 for (z = 4; z < 6; z++)
7268 vec->quick_push (CALL_EXPR_ARG (exp, z));
7269 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7270 break;
7271 }
7272
7273 case BUILT_IN_ATOMIC_LOAD_1:
7274 case BUILT_IN_ATOMIC_LOAD_2:
7275 case BUILT_IN_ATOMIC_LOAD_4:
7276 case BUILT_IN_ATOMIC_LOAD_8:
7277 case BUILT_IN_ATOMIC_LOAD_16:
7278 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7279 target = expand_builtin_atomic_load (mode, exp, target);
7280 if (target)
7281 return target;
7282 break;
7283
7284 case BUILT_IN_ATOMIC_STORE_1:
7285 case BUILT_IN_ATOMIC_STORE_2:
7286 case BUILT_IN_ATOMIC_STORE_4:
7287 case BUILT_IN_ATOMIC_STORE_8:
7288 case BUILT_IN_ATOMIC_STORE_16:
7289 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7290 target = expand_builtin_atomic_store (mode, exp);
7291 if (target)
7292 return const0_rtx;
7293 break;
7294
7295 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7296 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7297 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7298 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7299 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7300 {
7301 enum built_in_function lib;
7302 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7303 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7304 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7305 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7306 ignore, lib);
7307 if (target)
7308 return target;
7309 break;
7310 }
7311 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7312 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7313 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7314 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7315 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7316 {
7317 enum built_in_function lib;
7318 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7319 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7320 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7321 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7322 ignore, lib);
7323 if (target)
7324 return target;
7325 break;
7326 }
7327 case BUILT_IN_ATOMIC_AND_FETCH_1:
7328 case BUILT_IN_ATOMIC_AND_FETCH_2:
7329 case BUILT_IN_ATOMIC_AND_FETCH_4:
7330 case BUILT_IN_ATOMIC_AND_FETCH_8:
7331 case BUILT_IN_ATOMIC_AND_FETCH_16:
7332 {
7333 enum built_in_function lib;
7334 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7335 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7336 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7337 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7338 ignore, lib);
7339 if (target)
7340 return target;
7341 break;
7342 }
7343 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7344 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7345 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7346 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7347 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7348 {
7349 enum built_in_function lib;
7350 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7351 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7352 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7353 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7354 ignore, lib);
7355 if (target)
7356 return target;
7357 break;
7358 }
7359 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7360 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7361 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7362 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7363 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7364 {
7365 enum built_in_function lib;
7366 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7367 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7368 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7369 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7370 ignore, lib);
7371 if (target)
7372 return target;
7373 break;
7374 }
7375 case BUILT_IN_ATOMIC_OR_FETCH_1:
7376 case BUILT_IN_ATOMIC_OR_FETCH_2:
7377 case BUILT_IN_ATOMIC_OR_FETCH_4:
7378 case BUILT_IN_ATOMIC_OR_FETCH_8:
7379 case BUILT_IN_ATOMIC_OR_FETCH_16:
7380 {
7381 enum built_in_function lib;
7382 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7383 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7384 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7385 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7386 ignore, lib);
7387 if (target)
7388 return target;
7389 break;
7390 }
7391 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7392 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7393 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7394 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7395 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7396 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7397 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7398 ignore, BUILT_IN_NONE);
7399 if (target)
7400 return target;
7401 break;
7402
7403 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7404 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7405 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7406 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7407 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7408 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7409 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7410 ignore, BUILT_IN_NONE);
7411 if (target)
7412 return target;
7413 break;
7414
7415 case BUILT_IN_ATOMIC_FETCH_AND_1:
7416 case BUILT_IN_ATOMIC_FETCH_AND_2:
7417 case BUILT_IN_ATOMIC_FETCH_AND_4:
7418 case BUILT_IN_ATOMIC_FETCH_AND_8:
7419 case BUILT_IN_ATOMIC_FETCH_AND_16:
7420 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7421 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7422 ignore, BUILT_IN_NONE);
7423 if (target)
7424 return target;
7425 break;
7426
7427 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7428 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7429 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7430 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7431 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7432 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7433 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7434 ignore, BUILT_IN_NONE);
7435 if (target)
7436 return target;
7437 break;
7438
7439 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7440 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7441 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7442 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7443 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7444 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7445 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7446 ignore, BUILT_IN_NONE);
7447 if (target)
7448 return target;
7449 break;
7450
7451 case BUILT_IN_ATOMIC_FETCH_OR_1:
7452 case BUILT_IN_ATOMIC_FETCH_OR_2:
7453 case BUILT_IN_ATOMIC_FETCH_OR_4:
7454 case BUILT_IN_ATOMIC_FETCH_OR_8:
7455 case BUILT_IN_ATOMIC_FETCH_OR_16:
7456 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7457 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7458 ignore, BUILT_IN_NONE);
7459 if (target)
7460 return target;
7461 break;
7462
7463 case BUILT_IN_ATOMIC_TEST_AND_SET:
7464 return expand_builtin_atomic_test_and_set (exp, target);
7465
7466 case BUILT_IN_ATOMIC_CLEAR:
7467 return expand_builtin_atomic_clear (exp);
7468
7469 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7470 return expand_builtin_atomic_always_lock_free (exp);
7471
7472 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7473 target = expand_builtin_atomic_is_lock_free (exp);
7474 if (target)
7475 return target;
7476 break;
7477
7478 case BUILT_IN_ATOMIC_THREAD_FENCE:
7479 expand_builtin_atomic_thread_fence (exp);
7480 return const0_rtx;
7481
7482 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7483 expand_builtin_atomic_signal_fence (exp);
7484 return const0_rtx;
7485
7486 case BUILT_IN_OBJECT_SIZE:
7487 return expand_builtin_object_size (exp);
7488
7489 case BUILT_IN_MEMCPY_CHK:
7490 case BUILT_IN_MEMPCPY_CHK:
7491 case BUILT_IN_MEMMOVE_CHK:
7492 case BUILT_IN_MEMSET_CHK:
7493 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7494 if (target)
7495 return target;
7496 break;
7497
7498 case BUILT_IN_STRCPY_CHK:
7499 case BUILT_IN_STPCPY_CHK:
7500 case BUILT_IN_STRNCPY_CHK:
7501 case BUILT_IN_STPNCPY_CHK:
7502 case BUILT_IN_STRCAT_CHK:
7503 case BUILT_IN_STRNCAT_CHK:
7504 case BUILT_IN_SNPRINTF_CHK:
7505 case BUILT_IN_VSNPRINTF_CHK:
7506 maybe_emit_chk_warning (exp, fcode);
7507 break;
7508
7509 case BUILT_IN_SPRINTF_CHK:
7510 case BUILT_IN_VSPRINTF_CHK:
7511 maybe_emit_sprintf_chk_warning (exp, fcode);
7512 break;
7513
7514 case BUILT_IN_FREE:
7515 if (warn_free_nonheap_object)
7516 maybe_emit_free_warning (exp);
7517 break;
7518
7519 case BUILT_IN_THREAD_POINTER:
7520 return expand_builtin_thread_pointer (exp, target);
7521
7522 case BUILT_IN_SET_THREAD_POINTER:
7523 expand_builtin_set_thread_pointer (exp);
7524 return const0_rtx;
7525
7526 case BUILT_IN_CILK_DETACH:
7527 expand_builtin_cilk_detach (exp);
7528 return const0_rtx;
7529
7530 case BUILT_IN_CILK_POP_FRAME:
7531 expand_builtin_cilk_pop_frame (exp);
7532 return const0_rtx;
7533
7534 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7535 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7536 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7537 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7538 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7539 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7540 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7541 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7542 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7543 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7544 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7545 /* We allow user CHKP builtins if Pointer Bounds
7546 Checker is off. */
7547 if (!chkp_function_instrumented_p (current_function_decl))
7548 {
7549 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7550 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7551 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7552 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7553 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7554 return expand_normal (CALL_EXPR_ARG (exp, 0));
7555 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7556 return expand_normal (size_zero_node);
7557 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7558 return expand_normal (size_int (-1));
7559 else
7560 return const0_rtx;
7561 }
7562 /* FALLTHROUGH */
7563
7564 case BUILT_IN_CHKP_BNDMK:
7565 case BUILT_IN_CHKP_BNDSTX:
7566 case BUILT_IN_CHKP_BNDCL:
7567 case BUILT_IN_CHKP_BNDCU:
7568 case BUILT_IN_CHKP_BNDLDX:
7569 case BUILT_IN_CHKP_BNDRET:
7570 case BUILT_IN_CHKP_INTERSECT:
7571 case BUILT_IN_CHKP_NARROW:
7572 case BUILT_IN_CHKP_EXTRACT_LOWER:
7573 case BUILT_IN_CHKP_EXTRACT_UPPER:
7574 /* Software implementation of Pointer Bounds Checker is NYI.
7575 Target support is required. */
7576 error ("Your target platform does not support -fcheck-pointer-bounds");
7577 break;
7578
7579 case BUILT_IN_ACC_ON_DEVICE:
7580 /* Do library call, if we failed to expand the builtin when
7581 folding. */
7582 break;
7583
7584 default: /* just do library call, if unknown builtin */
7585 break;
7586 }
7587
7588 /* The switch statement above can drop through to cause the function
7589 to be called normally. */
7590 return expand_call (exp, target, ignore);
7591 }
7592
7593 /* Similar to expand_builtin but is used for instrumented calls. */
7594
7595 rtx
7596 expand_builtin_with_bounds (tree exp, rtx target,
7597 rtx subtarget ATTRIBUTE_UNUSED,
7598 machine_mode mode, int ignore)
7599 {
7600 tree fndecl = get_callee_fndecl (exp);
7601 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7602
7603 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7604
7605 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7606 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7607
7608 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7609 && fcode < END_CHKP_BUILTINS);
7610
7611 switch (fcode)
7612 {
7613 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7614 target = expand_builtin_memcpy_with_bounds (exp, target);
7615 if (target)
7616 return target;
7617 break;
7618
7619 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7620 target = expand_builtin_mempcpy_with_bounds (exp, target);
7621 if (target)
7622 return target;
7623 break;
7624
7625 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7626 target = expand_builtin_memset_with_bounds (exp, target, mode);
7627 if (target)
7628 return target;
7629 break;
7630
7631 default:
7632 break;
7633 }
7634
7635 /* The switch statement above can drop through to cause the function
7636 to be called normally. */
7637 return expand_call (exp, target, ignore);
7638 }
7639
7640 /* Determine whether a tree node represents a call to a built-in
7641 function. If the tree T is a call to a built-in function with
7642 the right number of arguments of the appropriate types, return
7643 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7644 Otherwise the return value is END_BUILTINS. */
7645
7646 enum built_in_function
7647 builtin_mathfn_code (const_tree t)
7648 {
7649 const_tree fndecl, arg, parmlist;
7650 const_tree argtype, parmtype;
7651 const_call_expr_arg_iterator iter;
7652
7653 if (TREE_CODE (t) != CALL_EXPR
7654 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7655 return END_BUILTINS;
7656
7657 fndecl = get_callee_fndecl (t);
7658 if (fndecl == NULL_TREE
7659 || TREE_CODE (fndecl) != FUNCTION_DECL
7660 || ! DECL_BUILT_IN (fndecl)
7661 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7662 return END_BUILTINS;
7663
7664 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7665 init_const_call_expr_arg_iterator (t, &iter);
7666 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7667 {
7668 /* If a function doesn't take a variable number of arguments,
7669 the last element in the list will have type `void'. */
7670 parmtype = TREE_VALUE (parmlist);
7671 if (VOID_TYPE_P (parmtype))
7672 {
7673 if (more_const_call_expr_args_p (&iter))
7674 return END_BUILTINS;
7675 return DECL_FUNCTION_CODE (fndecl);
7676 }
7677
7678 if (! more_const_call_expr_args_p (&iter))
7679 return END_BUILTINS;
7680
7681 arg = next_const_call_expr_arg (&iter);
7682 argtype = TREE_TYPE (arg);
7683
7684 if (SCALAR_FLOAT_TYPE_P (parmtype))
7685 {
7686 if (! SCALAR_FLOAT_TYPE_P (argtype))
7687 return END_BUILTINS;
7688 }
7689 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7690 {
7691 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7692 return END_BUILTINS;
7693 }
7694 else if (POINTER_TYPE_P (parmtype))
7695 {
7696 if (! POINTER_TYPE_P (argtype))
7697 return END_BUILTINS;
7698 }
7699 else if (INTEGRAL_TYPE_P (parmtype))
7700 {
7701 if (! INTEGRAL_TYPE_P (argtype))
7702 return END_BUILTINS;
7703 }
7704 else
7705 return END_BUILTINS;
7706 }
7707
7708 /* Variable-length argument list. */
7709 return DECL_FUNCTION_CODE (fndecl);
7710 }
7711
7712 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7713 evaluate to a constant. */
7714
7715 static tree
7716 fold_builtin_constant_p (tree arg)
7717 {
7718 /* We return 1 for a numeric type that's known to be a constant
7719 value at compile-time or for an aggregate type that's a
7720 literal constant. */
7721 STRIP_NOPS (arg);
7722
7723 /* If we know this is a constant, emit the constant of one. */
7724 if (CONSTANT_CLASS_P (arg)
7725 || (TREE_CODE (arg) == CONSTRUCTOR
7726 && TREE_CONSTANT (arg)))
7727 return integer_one_node;
7728 if (TREE_CODE (arg) == ADDR_EXPR)
7729 {
7730 tree op = TREE_OPERAND (arg, 0);
7731 if (TREE_CODE (op) == STRING_CST
7732 || (TREE_CODE (op) == ARRAY_REF
7733 && integer_zerop (TREE_OPERAND (op, 1))
7734 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7735 return integer_one_node;
7736 }
7737
7738 /* If this expression has side effects, show we don't know it to be a
7739 constant. Likewise if it's a pointer or aggregate type since in
7740 those case we only want literals, since those are only optimized
7741 when generating RTL, not later.
7742 And finally, if we are compiling an initializer, not code, we
7743 need to return a definite result now; there's not going to be any
7744 more optimization done. */
7745 if (TREE_SIDE_EFFECTS (arg)
7746 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7747 || POINTER_TYPE_P (TREE_TYPE (arg))
7748 || cfun == 0
7749 || folding_initializer
7750 || force_folding_builtin_constant_p)
7751 return integer_zero_node;
7752
7753 return NULL_TREE;
7754 }
7755
7756 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7757 return it as a truthvalue. */
7758
7759 static tree
7760 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7761 tree predictor)
7762 {
7763 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7764
7765 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7766 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7767 ret_type = TREE_TYPE (TREE_TYPE (fn));
7768 pred_type = TREE_VALUE (arg_types);
7769 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7770
7771 pred = fold_convert_loc (loc, pred_type, pred);
7772 expected = fold_convert_loc (loc, expected_type, expected);
7773 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7774 predictor);
7775
7776 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7777 build_int_cst (ret_type, 0));
7778 }
7779
7780 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7781 NULL_TREE if no simplification is possible. */
7782
7783 tree
7784 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7785 {
7786 tree inner, fndecl, inner_arg0;
7787 enum tree_code code;
7788
7789 /* Distribute the expected value over short-circuiting operators.
7790 See through the cast from truthvalue_type_node to long. */
7791 inner_arg0 = arg0;
7792 while (CONVERT_EXPR_P (inner_arg0)
7793 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7794 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7795 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7796
7797 /* If this is a builtin_expect within a builtin_expect keep the
7798 inner one. See through a comparison against a constant. It
7799 might have been added to create a thruthvalue. */
7800 inner = inner_arg0;
7801
7802 if (COMPARISON_CLASS_P (inner)
7803 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7804 inner = TREE_OPERAND (inner, 0);
7805
7806 if (TREE_CODE (inner) == CALL_EXPR
7807 && (fndecl = get_callee_fndecl (inner))
7808 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7809 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7810 return arg0;
7811
7812 inner = inner_arg0;
7813 code = TREE_CODE (inner);
7814 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7815 {
7816 tree op0 = TREE_OPERAND (inner, 0);
7817 tree op1 = TREE_OPERAND (inner, 1);
7818
7819 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7820 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7821 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7822
7823 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7824 }
7825
7826 /* If the argument isn't invariant then there's nothing else we can do. */
7827 if (!TREE_CONSTANT (inner_arg0))
7828 return NULL_TREE;
7829
7830 /* If we expect that a comparison against the argument will fold to
7831 a constant return the constant. In practice, this means a true
7832 constant or the address of a non-weak symbol. */
7833 inner = inner_arg0;
7834 STRIP_NOPS (inner);
7835 if (TREE_CODE (inner) == ADDR_EXPR)
7836 {
7837 do
7838 {
7839 inner = TREE_OPERAND (inner, 0);
7840 }
7841 while (TREE_CODE (inner) == COMPONENT_REF
7842 || TREE_CODE (inner) == ARRAY_REF);
7843 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7844 return NULL_TREE;
7845 }
7846
7847 /* Otherwise, ARG0 already has the proper type for the return value. */
7848 return arg0;
7849 }
7850
7851 /* Fold a call to __builtin_classify_type with argument ARG. */
7852
7853 static tree
7854 fold_builtin_classify_type (tree arg)
7855 {
7856 if (arg == 0)
7857 return build_int_cst (integer_type_node, no_type_class);
7858
7859 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7860 }
7861
7862 /* Fold a call to __builtin_strlen with argument ARG. */
7863
7864 static tree
7865 fold_builtin_strlen (location_t loc, tree type, tree arg)
7866 {
7867 if (!validate_arg (arg, POINTER_TYPE))
7868 return NULL_TREE;
7869 else
7870 {
7871 tree len = c_strlen (arg, 0);
7872
7873 if (len)
7874 return fold_convert_loc (loc, type, len);
7875
7876 return NULL_TREE;
7877 }
7878 }
7879
7880 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7881
7882 static tree
7883 fold_builtin_inf (location_t loc, tree type, int warn)
7884 {
7885 REAL_VALUE_TYPE real;
7886
7887 /* __builtin_inff is intended to be usable to define INFINITY on all
7888 targets. If an infinity is not available, INFINITY expands "to a
7889 positive constant of type float that overflows at translation
7890 time", footnote "In this case, using INFINITY will violate the
7891 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7892 Thus we pedwarn to ensure this constraint violation is
7893 diagnosed. */
7894 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7895 pedwarn (loc, 0, "target format does not support infinity");
7896
7897 real_inf (&real);
7898 return build_real (type, real);
7899 }
7900
7901 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7902 NULL_TREE if no simplification can be made. */
7903
7904 static tree
7905 fold_builtin_sincos (location_t loc,
7906 tree arg0, tree arg1, tree arg2)
7907 {
7908 tree type;
7909 tree fndecl, call = NULL_TREE;
7910
7911 if (!validate_arg (arg0, REAL_TYPE)
7912 || !validate_arg (arg1, POINTER_TYPE)
7913 || !validate_arg (arg2, POINTER_TYPE))
7914 return NULL_TREE;
7915
7916 type = TREE_TYPE (arg0);
7917
7918 /* Calculate the result when the argument is a constant. */
7919 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7920 if (fn == END_BUILTINS)
7921 return NULL_TREE;
7922
7923 /* Canonicalize sincos to cexpi. */
7924 if (TREE_CODE (arg0) == REAL_CST)
7925 {
7926 tree complex_type = build_complex_type (type);
7927 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7928 }
7929 if (!call)
7930 {
7931 if (!targetm.libc_has_function (function_c99_math_complex)
7932 || !builtin_decl_implicit_p (fn))
7933 return NULL_TREE;
7934 fndecl = builtin_decl_explicit (fn);
7935 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7936 call = builtin_save_expr (call);
7937 }
7938
7939 return build2 (COMPOUND_EXPR, void_type_node,
7940 build2 (MODIFY_EXPR, void_type_node,
7941 build_fold_indirect_ref_loc (loc, arg1),
7942 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7943 build2 (MODIFY_EXPR, void_type_node,
7944 build_fold_indirect_ref_loc (loc, arg2),
7945 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7946 }
7947
7948 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7949 Return NULL_TREE if no simplification can be made. */
7950
7951 static tree
7952 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7953 {
7954 if (!validate_arg (arg1, POINTER_TYPE)
7955 || !validate_arg (arg2, POINTER_TYPE)
7956 || !validate_arg (len, INTEGER_TYPE))
7957 return NULL_TREE;
7958
7959 /* If the LEN parameter is zero, return zero. */
7960 if (integer_zerop (len))
7961 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7962 arg1, arg2);
7963
7964 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7965 if (operand_equal_p (arg1, arg2, 0))
7966 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7967
7968 /* If len parameter is one, return an expression corresponding to
7969 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7970 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7971 {
7972 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7973 tree cst_uchar_ptr_node
7974 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7975
7976 tree ind1
7977 = fold_convert_loc (loc, integer_type_node,
7978 build1 (INDIRECT_REF, cst_uchar_node,
7979 fold_convert_loc (loc,
7980 cst_uchar_ptr_node,
7981 arg1)));
7982 tree ind2
7983 = fold_convert_loc (loc, integer_type_node,
7984 build1 (INDIRECT_REF, cst_uchar_node,
7985 fold_convert_loc (loc,
7986 cst_uchar_ptr_node,
7987 arg2)));
7988 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7989 }
7990
7991 return NULL_TREE;
7992 }
7993
7994 /* Fold a call to builtin isascii with argument ARG. */
7995
7996 static tree
7997 fold_builtin_isascii (location_t loc, tree arg)
7998 {
7999 if (!validate_arg (arg, INTEGER_TYPE))
8000 return NULL_TREE;
8001 else
8002 {
8003 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8004 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8005 build_int_cst (integer_type_node,
8006 ~ (unsigned HOST_WIDE_INT) 0x7f));
8007 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8008 arg, integer_zero_node);
8009 }
8010 }
8011
8012 /* Fold a call to builtin toascii with argument ARG. */
8013
8014 static tree
8015 fold_builtin_toascii (location_t loc, tree arg)
8016 {
8017 if (!validate_arg (arg, INTEGER_TYPE))
8018 return NULL_TREE;
8019
8020 /* Transform toascii(c) -> (c & 0x7f). */
8021 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8022 build_int_cst (integer_type_node, 0x7f));
8023 }
8024
8025 /* Fold a call to builtin isdigit with argument ARG. */
8026
8027 static tree
8028 fold_builtin_isdigit (location_t loc, tree arg)
8029 {
8030 if (!validate_arg (arg, INTEGER_TYPE))
8031 return NULL_TREE;
8032 else
8033 {
8034 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8035 /* According to the C standard, isdigit is unaffected by locale.
8036 However, it definitely is affected by the target character set. */
8037 unsigned HOST_WIDE_INT target_digit0
8038 = lang_hooks.to_target_charset ('0');
8039
8040 if (target_digit0 == 0)
8041 return NULL_TREE;
8042
8043 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8044 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8045 build_int_cst (unsigned_type_node, target_digit0));
8046 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8047 build_int_cst (unsigned_type_node, 9));
8048 }
8049 }
8050
8051 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8052
8053 static tree
8054 fold_builtin_fabs (location_t loc, tree arg, tree type)
8055 {
8056 if (!validate_arg (arg, REAL_TYPE))
8057 return NULL_TREE;
8058
8059 arg = fold_convert_loc (loc, type, arg);
8060 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8061 }
8062
8063 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8064
8065 static tree
8066 fold_builtin_abs (location_t loc, tree arg, tree type)
8067 {
8068 if (!validate_arg (arg, INTEGER_TYPE))
8069 return NULL_TREE;
8070
8071 arg = fold_convert_loc (loc, type, arg);
8072 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8073 }
8074
8075 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8076
8077 static tree
8078 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8079 {
8080 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8081 if (validate_arg (arg0, REAL_TYPE)
8082 && validate_arg (arg1, REAL_TYPE)
8083 && validate_arg (arg2, REAL_TYPE)
8084 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8085 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8086
8087 return NULL_TREE;
8088 }
8089
8090 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8091
8092 static tree
8093 fold_builtin_carg (location_t loc, tree arg, tree type)
8094 {
8095 if (validate_arg (arg, COMPLEX_TYPE)
8096 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8097 {
8098 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8099
8100 if (atan2_fn)
8101 {
8102 tree new_arg = builtin_save_expr (arg);
8103 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8104 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8105 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8106 }
8107 }
8108
8109 return NULL_TREE;
8110 }
8111
8112 /* Fold a call to builtin frexp, we can assume the base is 2. */
8113
8114 static tree
8115 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8116 {
8117 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8118 return NULL_TREE;
8119
8120 STRIP_NOPS (arg0);
8121
8122 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8123 return NULL_TREE;
8124
8125 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8126
8127 /* Proceed if a valid pointer type was passed in. */
8128 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8129 {
8130 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8131 tree frac, exp;
8132
8133 switch (value->cl)
8134 {
8135 case rvc_zero:
8136 /* For +-0, return (*exp = 0, +-0). */
8137 exp = integer_zero_node;
8138 frac = arg0;
8139 break;
8140 case rvc_nan:
8141 case rvc_inf:
8142 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8143 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8144 case rvc_normal:
8145 {
8146 /* Since the frexp function always expects base 2, and in
8147 GCC normalized significands are already in the range
8148 [0.5, 1.0), we have exactly what frexp wants. */
8149 REAL_VALUE_TYPE frac_rvt = *value;
8150 SET_REAL_EXP (&frac_rvt, 0);
8151 frac = build_real (rettype, frac_rvt);
8152 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8153 }
8154 break;
8155 default:
8156 gcc_unreachable ();
8157 }
8158
8159 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8160 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8161 TREE_SIDE_EFFECTS (arg1) = 1;
8162 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold a call to builtin modf. */
8169
8170 static tree
8171 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8172 {
8173 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8174 return NULL_TREE;
8175
8176 STRIP_NOPS (arg0);
8177
8178 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8179 return NULL_TREE;
8180
8181 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8182
8183 /* Proceed if a valid pointer type was passed in. */
8184 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8185 {
8186 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8187 REAL_VALUE_TYPE trunc, frac;
8188
8189 switch (value->cl)
8190 {
8191 case rvc_nan:
8192 case rvc_zero:
8193 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8194 trunc = frac = *value;
8195 break;
8196 case rvc_inf:
8197 /* For +-Inf, return (*arg1 = arg0, +-0). */
8198 frac = dconst0;
8199 frac.sign = value->sign;
8200 trunc = *value;
8201 break;
8202 case rvc_normal:
8203 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8204 real_trunc (&trunc, VOIDmode, value);
8205 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8206 /* If the original number was negative and already
8207 integral, then the fractional part is -0.0. */
8208 if (value->sign && frac.cl == rvc_zero)
8209 frac.sign = value->sign;
8210 break;
8211 }
8212
8213 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8214 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8215 build_real (rettype, trunc));
8216 TREE_SIDE_EFFECTS (arg1) = 1;
8217 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8218 build_real (rettype, frac));
8219 }
8220
8221 return NULL_TREE;
8222 }
8223
8224 /* Given a location LOC, an interclass builtin function decl FNDECL
8225 and its single argument ARG, return an folded expression computing
8226 the same, or NULL_TREE if we either couldn't or didn't want to fold
8227 (the latter happen if there's an RTL instruction available). */
8228
8229 static tree
8230 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8231 {
8232 machine_mode mode;
8233
8234 if (!validate_arg (arg, REAL_TYPE))
8235 return NULL_TREE;
8236
8237 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8238 return NULL_TREE;
8239
8240 mode = TYPE_MODE (TREE_TYPE (arg));
8241
8242 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8243
8244 /* If there is no optab, try generic code. */
8245 switch (DECL_FUNCTION_CODE (fndecl))
8246 {
8247 tree result;
8248
8249 CASE_FLT_FN (BUILT_IN_ISINF):
8250 {
8251 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8252 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8253 tree type = TREE_TYPE (arg);
8254 REAL_VALUE_TYPE r;
8255 char buf[128];
8256
8257 if (is_ibm_extended)
8258 {
8259 /* NaN and Inf are encoded in the high-order double value
8260 only. The low-order value is not significant. */
8261 type = double_type_node;
8262 mode = DFmode;
8263 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8264 }
8265 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8266 real_from_string (&r, buf);
8267 result = build_call_expr (isgr_fn, 2,
8268 fold_build1_loc (loc, ABS_EXPR, type, arg),
8269 build_real (type, r));
8270 return result;
8271 }
8272 CASE_FLT_FN (BUILT_IN_FINITE):
8273 case BUILT_IN_ISFINITE:
8274 {
8275 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8276 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8277 tree type = TREE_TYPE (arg);
8278 REAL_VALUE_TYPE r;
8279 char buf[128];
8280
8281 if (is_ibm_extended)
8282 {
8283 /* NaN and Inf are encoded in the high-order double value
8284 only. The low-order value is not significant. */
8285 type = double_type_node;
8286 mode = DFmode;
8287 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8288 }
8289 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8290 real_from_string (&r, buf);
8291 result = build_call_expr (isle_fn, 2,
8292 fold_build1_loc (loc, ABS_EXPR, type, arg),
8293 build_real (type, r));
8294 /*result = fold_build2_loc (loc, UNGT_EXPR,
8295 TREE_TYPE (TREE_TYPE (fndecl)),
8296 fold_build1_loc (loc, ABS_EXPR, type, arg),
8297 build_real (type, r));
8298 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8299 TREE_TYPE (TREE_TYPE (fndecl)),
8300 result);*/
8301 return result;
8302 }
8303 case BUILT_IN_ISNORMAL:
8304 {
8305 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8306 islessequal(fabs(x),DBL_MAX). */
8307 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8308 tree type = TREE_TYPE (arg);
8309 tree orig_arg, max_exp, min_exp;
8310 machine_mode orig_mode = mode;
8311 REAL_VALUE_TYPE rmax, rmin;
8312 char buf[128];
8313
8314 orig_arg = arg = builtin_save_expr (arg);
8315 if (is_ibm_extended)
8316 {
8317 /* Use double to test the normal range of IBM extended
8318 precision. Emin for IBM extended precision is
8319 different to emin for IEEE double, being 53 higher
8320 since the low double exponent is at least 53 lower
8321 than the high double exponent. */
8322 type = double_type_node;
8323 mode = DFmode;
8324 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8325 }
8326 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8327
8328 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8329 real_from_string (&rmax, buf);
8330 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8331 real_from_string (&rmin, buf);
8332 max_exp = build_real (type, rmax);
8333 min_exp = build_real (type, rmin);
8334
8335 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8336 if (is_ibm_extended)
8337 {
8338 /* Testing the high end of the range is done just using
8339 the high double, using the same test as isfinite().
8340 For the subnormal end of the range we first test the
8341 high double, then if its magnitude is equal to the
8342 limit of 0x1p-969, we test whether the low double is
8343 non-zero and opposite sign to the high double. */
8344 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8345 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8346 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8347 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8348 arg, min_exp);
8349 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8350 complex_double_type_node, orig_arg);
8351 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8352 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8353 tree zero = build_real (type, dconst0);
8354 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8355 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8356 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8357 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8358 fold_build3 (COND_EXPR,
8359 integer_type_node,
8360 hilt, logt, lolt));
8361 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8362 eq_min, ok_lo);
8363 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8364 gt_min, eq_min);
8365 }
8366 else
8367 {
8368 tree const isge_fn
8369 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8370 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8371 }
8372 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8373 max_exp, min_exp);
8374 return result;
8375 }
8376 default:
8377 break;
8378 }
8379
8380 return NULL_TREE;
8381 }
8382
8383 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8384 ARG is the argument for the call. */
8385
8386 static tree
8387 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8388 {
8389 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8390
8391 if (!validate_arg (arg, REAL_TYPE))
8392 return NULL_TREE;
8393
8394 switch (builtin_index)
8395 {
8396 case BUILT_IN_ISINF:
8397 if (!HONOR_INFINITIES (arg))
8398 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8399
8400 return NULL_TREE;
8401
8402 case BUILT_IN_ISINF_SIGN:
8403 {
8404 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8405 /* In a boolean context, GCC will fold the inner COND_EXPR to
8406 1. So e.g. "if (isinf_sign(x))" would be folded to just
8407 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8408 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8409 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8410 tree tmp = NULL_TREE;
8411
8412 arg = builtin_save_expr (arg);
8413
8414 if (signbit_fn && isinf_fn)
8415 {
8416 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8417 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8418
8419 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8420 signbit_call, integer_zero_node);
8421 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8422 isinf_call, integer_zero_node);
8423
8424 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8425 integer_minus_one_node, integer_one_node);
8426 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8427 isinf_call, tmp,
8428 integer_zero_node);
8429 }
8430
8431 return tmp;
8432 }
8433
8434 case BUILT_IN_ISFINITE:
8435 if (!HONOR_NANS (arg)
8436 && !HONOR_INFINITIES (arg))
8437 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8438
8439 return NULL_TREE;
8440
8441 case BUILT_IN_ISNAN:
8442 if (!HONOR_NANS (arg))
8443 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8444
8445 {
8446 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8447 if (is_ibm_extended)
8448 {
8449 /* NaN and Inf are encoded in the high-order double value
8450 only. The low-order value is not significant. */
8451 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8452 }
8453 }
8454 arg = builtin_save_expr (arg);
8455 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8456
8457 default:
8458 gcc_unreachable ();
8459 }
8460 }
8461
8462 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8463 This builtin will generate code to return the appropriate floating
8464 point classification depending on the value of the floating point
8465 number passed in. The possible return values must be supplied as
8466 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8467 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8468 one floating point argument which is "type generic". */
8469
8470 static tree
8471 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8472 {
8473 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8474 arg, type, res, tmp;
8475 machine_mode mode;
8476 REAL_VALUE_TYPE r;
8477 char buf[128];
8478
8479 /* Verify the required arguments in the original call. */
8480 if (nargs != 6
8481 || !validate_arg (args[0], INTEGER_TYPE)
8482 || !validate_arg (args[1], INTEGER_TYPE)
8483 || !validate_arg (args[2], INTEGER_TYPE)
8484 || !validate_arg (args[3], INTEGER_TYPE)
8485 || !validate_arg (args[4], INTEGER_TYPE)
8486 || !validate_arg (args[5], REAL_TYPE))
8487 return NULL_TREE;
8488
8489 fp_nan = args[0];
8490 fp_infinite = args[1];
8491 fp_normal = args[2];
8492 fp_subnormal = args[3];
8493 fp_zero = args[4];
8494 arg = args[5];
8495 type = TREE_TYPE (arg);
8496 mode = TYPE_MODE (type);
8497 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8498
8499 /* fpclassify(x) ->
8500 isnan(x) ? FP_NAN :
8501 (fabs(x) == Inf ? FP_INFINITE :
8502 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8503 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8504
8505 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8506 build_real (type, dconst0));
8507 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8508 tmp, fp_zero, fp_subnormal);
8509
8510 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8511 real_from_string (&r, buf);
8512 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8513 arg, build_real (type, r));
8514 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8515
8516 if (HONOR_INFINITIES (mode))
8517 {
8518 real_inf (&r);
8519 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8520 build_real (type, r));
8521 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8522 fp_infinite, res);
8523 }
8524
8525 if (HONOR_NANS (mode))
8526 {
8527 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8528 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8529 }
8530
8531 return res;
8532 }
8533
8534 /* Fold a call to an unordered comparison function such as
8535 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8536 being called and ARG0 and ARG1 are the arguments for the call.
8537 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8538 the opposite of the desired result. UNORDERED_CODE is used
8539 for modes that can hold NaNs and ORDERED_CODE is used for
8540 the rest. */
8541
8542 static tree
8543 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8544 enum tree_code unordered_code,
8545 enum tree_code ordered_code)
8546 {
8547 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8548 enum tree_code code;
8549 tree type0, type1;
8550 enum tree_code code0, code1;
8551 tree cmp_type = NULL_TREE;
8552
8553 type0 = TREE_TYPE (arg0);
8554 type1 = TREE_TYPE (arg1);
8555
8556 code0 = TREE_CODE (type0);
8557 code1 = TREE_CODE (type1);
8558
8559 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8560 /* Choose the wider of two real types. */
8561 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8562 ? type0 : type1;
8563 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8564 cmp_type = type0;
8565 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8566 cmp_type = type1;
8567
8568 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8569 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8570
8571 if (unordered_code == UNORDERED_EXPR)
8572 {
8573 if (!HONOR_NANS (arg0))
8574 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8575 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8576 }
8577
8578 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8579 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8580 fold_build2_loc (loc, code, type, arg0, arg1));
8581 }
8582
8583 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8584 arithmetics if it can never overflow, or into internal functions that
8585 return both result of arithmetics and overflowed boolean flag in
8586 a complex integer result, or some other check for overflow.
8587 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8588 checking part of that. */
8589
8590 static tree
8591 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8592 tree arg0, tree arg1, tree arg2)
8593 {
8594 enum internal_fn ifn = IFN_LAST;
8595 /* The code of the expression corresponding to the type-generic
8596 built-in, or ERROR_MARK for the type-specific ones. */
8597 enum tree_code opcode = ERROR_MARK;
8598 bool ovf_only = false;
8599
8600 switch (fcode)
8601 {
8602 case BUILT_IN_ADD_OVERFLOW_P:
8603 ovf_only = true;
8604 /* FALLTHRU */
8605 case BUILT_IN_ADD_OVERFLOW:
8606 opcode = PLUS_EXPR;
8607 /* FALLTHRU */
8608 case BUILT_IN_SADD_OVERFLOW:
8609 case BUILT_IN_SADDL_OVERFLOW:
8610 case BUILT_IN_SADDLL_OVERFLOW:
8611 case BUILT_IN_UADD_OVERFLOW:
8612 case BUILT_IN_UADDL_OVERFLOW:
8613 case BUILT_IN_UADDLL_OVERFLOW:
8614 ifn = IFN_ADD_OVERFLOW;
8615 break;
8616 case BUILT_IN_SUB_OVERFLOW_P:
8617 ovf_only = true;
8618 /* FALLTHRU */
8619 case BUILT_IN_SUB_OVERFLOW:
8620 opcode = MINUS_EXPR;
8621 /* FALLTHRU */
8622 case BUILT_IN_SSUB_OVERFLOW:
8623 case BUILT_IN_SSUBL_OVERFLOW:
8624 case BUILT_IN_SSUBLL_OVERFLOW:
8625 case BUILT_IN_USUB_OVERFLOW:
8626 case BUILT_IN_USUBL_OVERFLOW:
8627 case BUILT_IN_USUBLL_OVERFLOW:
8628 ifn = IFN_SUB_OVERFLOW;
8629 break;
8630 case BUILT_IN_MUL_OVERFLOW_P:
8631 ovf_only = true;
8632 /* FALLTHRU */
8633 case BUILT_IN_MUL_OVERFLOW:
8634 opcode = MULT_EXPR;
8635 /* FALLTHRU */
8636 case BUILT_IN_SMUL_OVERFLOW:
8637 case BUILT_IN_SMULL_OVERFLOW:
8638 case BUILT_IN_SMULLL_OVERFLOW:
8639 case BUILT_IN_UMUL_OVERFLOW:
8640 case BUILT_IN_UMULL_OVERFLOW:
8641 case BUILT_IN_UMULLL_OVERFLOW:
8642 ifn = IFN_MUL_OVERFLOW;
8643 break;
8644 default:
8645 gcc_unreachable ();
8646 }
8647
8648 /* For the "generic" overloads, the first two arguments can have different
8649 types and the last argument determines the target type to use to check
8650 for overflow. The arguments of the other overloads all have the same
8651 type. */
8652 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8653
8654 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8655 arguments are constant, attempt to fold the built-in call into a constant
8656 expression indicating whether or not it detected an overflow. */
8657 if (ovf_only
8658 && TREE_CODE (arg0) == INTEGER_CST
8659 && TREE_CODE (arg1) == INTEGER_CST)
8660 /* Perform the computation in the target type and check for overflow. */
8661 return omit_one_operand_loc (loc, boolean_type_node,
8662 arith_overflowed_p (opcode, type, arg0, arg1)
8663 ? boolean_true_node : boolean_false_node,
8664 arg2);
8665
8666 tree ctype = build_complex_type (type);
8667 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8668 2, arg0, arg1);
8669 tree tgt = save_expr (call);
8670 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8671 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8672 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8673
8674 if (ovf_only)
8675 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8676
8677 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8678 tree store
8679 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8680 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8681 }
8682
8683 /* Fold a call to __builtin_FILE to a constant string. */
8684
8685 static inline tree
8686 fold_builtin_FILE (location_t loc)
8687 {
8688 if (const char *fname = LOCATION_FILE (loc))
8689 return build_string_literal (strlen (fname) + 1, fname);
8690
8691 return build_string_literal (1, "");
8692 }
8693
8694 /* Fold a call to __builtin_FUNCTION to a constant string. */
8695
8696 static inline tree
8697 fold_builtin_FUNCTION ()
8698 {
8699 const char *name = "";
8700
8701 if (current_function_decl)
8702 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8703
8704 return build_string_literal (strlen (name) + 1, name);
8705 }
8706
8707 /* Fold a call to __builtin_LINE to an integer constant. */
8708
8709 static inline tree
8710 fold_builtin_LINE (location_t loc, tree type)
8711 {
8712 return build_int_cst (type, LOCATION_LINE (loc));
8713 }
8714
8715 /* Fold a call to built-in function FNDECL with 0 arguments.
8716 This function returns NULL_TREE if no simplification was possible. */
8717
8718 static tree
8719 fold_builtin_0 (location_t loc, tree fndecl)
8720 {
8721 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8722 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8723 switch (fcode)
8724 {
8725 case BUILT_IN_FILE:
8726 return fold_builtin_FILE (loc);
8727
8728 case BUILT_IN_FUNCTION:
8729 return fold_builtin_FUNCTION ();
8730
8731 case BUILT_IN_LINE:
8732 return fold_builtin_LINE (loc, type);
8733
8734 CASE_FLT_FN (BUILT_IN_INF):
8735 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8736 case BUILT_IN_INFD32:
8737 case BUILT_IN_INFD64:
8738 case BUILT_IN_INFD128:
8739 return fold_builtin_inf (loc, type, true);
8740
8741 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8742 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8743 return fold_builtin_inf (loc, type, false);
8744
8745 case BUILT_IN_CLASSIFY_TYPE:
8746 return fold_builtin_classify_type (NULL_TREE);
8747
8748 default:
8749 break;
8750 }
8751 return NULL_TREE;
8752 }
8753
8754 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8755 This function returns NULL_TREE if no simplification was possible. */
8756
8757 static tree
8758 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8759 {
8760 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8761 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8762
8763 if (TREE_CODE (arg0) == ERROR_MARK)
8764 return NULL_TREE;
8765
8766 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8767 return ret;
8768
8769 switch (fcode)
8770 {
8771 case BUILT_IN_CONSTANT_P:
8772 {
8773 tree val = fold_builtin_constant_p (arg0);
8774
8775 /* Gimplification will pull the CALL_EXPR for the builtin out of
8776 an if condition. When not optimizing, we'll not CSE it back.
8777 To avoid link error types of regressions, return false now. */
8778 if (!val && !optimize)
8779 val = integer_zero_node;
8780
8781 return val;
8782 }
8783
8784 case BUILT_IN_CLASSIFY_TYPE:
8785 return fold_builtin_classify_type (arg0);
8786
8787 case BUILT_IN_STRLEN:
8788 return fold_builtin_strlen (loc, type, arg0);
8789
8790 CASE_FLT_FN (BUILT_IN_FABS):
8791 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8792 case BUILT_IN_FABSD32:
8793 case BUILT_IN_FABSD64:
8794 case BUILT_IN_FABSD128:
8795 return fold_builtin_fabs (loc, arg0, type);
8796
8797 case BUILT_IN_ABS:
8798 case BUILT_IN_LABS:
8799 case BUILT_IN_LLABS:
8800 case BUILT_IN_IMAXABS:
8801 return fold_builtin_abs (loc, arg0, type);
8802
8803 CASE_FLT_FN (BUILT_IN_CONJ):
8804 if (validate_arg (arg0, COMPLEX_TYPE)
8805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8806 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8807 break;
8808
8809 CASE_FLT_FN (BUILT_IN_CREAL):
8810 if (validate_arg (arg0, COMPLEX_TYPE)
8811 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8812 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8813 break;
8814
8815 CASE_FLT_FN (BUILT_IN_CIMAG):
8816 if (validate_arg (arg0, COMPLEX_TYPE)
8817 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8818 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8819 break;
8820
8821 CASE_FLT_FN (BUILT_IN_CARG):
8822 return fold_builtin_carg (loc, arg0, type);
8823
8824 case BUILT_IN_ISASCII:
8825 return fold_builtin_isascii (loc, arg0);
8826
8827 case BUILT_IN_TOASCII:
8828 return fold_builtin_toascii (loc, arg0);
8829
8830 case BUILT_IN_ISDIGIT:
8831 return fold_builtin_isdigit (loc, arg0);
8832
8833 CASE_FLT_FN (BUILT_IN_FINITE):
8834 case BUILT_IN_FINITED32:
8835 case BUILT_IN_FINITED64:
8836 case BUILT_IN_FINITED128:
8837 case BUILT_IN_ISFINITE:
8838 {
8839 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8840 if (ret)
8841 return ret;
8842 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8843 }
8844
8845 CASE_FLT_FN (BUILT_IN_ISINF):
8846 case BUILT_IN_ISINFD32:
8847 case BUILT_IN_ISINFD64:
8848 case BUILT_IN_ISINFD128:
8849 {
8850 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8851 if (ret)
8852 return ret;
8853 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8854 }
8855
8856 case BUILT_IN_ISNORMAL:
8857 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8858
8859 case BUILT_IN_ISINF_SIGN:
8860 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8861
8862 CASE_FLT_FN (BUILT_IN_ISNAN):
8863 case BUILT_IN_ISNAND32:
8864 case BUILT_IN_ISNAND64:
8865 case BUILT_IN_ISNAND128:
8866 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8867
8868 case BUILT_IN_FREE:
8869 if (integer_zerop (arg0))
8870 return build_empty_stmt (loc);
8871 break;
8872
8873 default:
8874 break;
8875 }
8876
8877 return NULL_TREE;
8878
8879 }
8880
8881 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8882 This function returns NULL_TREE if no simplification was possible. */
8883
8884 static tree
8885 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8886 {
8887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8888 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8889
8890 if (TREE_CODE (arg0) == ERROR_MARK
8891 || TREE_CODE (arg1) == ERROR_MARK)
8892 return NULL_TREE;
8893
8894 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8895 return ret;
8896
8897 switch (fcode)
8898 {
8899 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8900 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8901 if (validate_arg (arg0, REAL_TYPE)
8902 && validate_arg (arg1, POINTER_TYPE))
8903 return do_mpfr_lgamma_r (arg0, arg1, type);
8904 break;
8905
8906 CASE_FLT_FN (BUILT_IN_FREXP):
8907 return fold_builtin_frexp (loc, arg0, arg1, type);
8908
8909 CASE_FLT_FN (BUILT_IN_MODF):
8910 return fold_builtin_modf (loc, arg0, arg1, type);
8911
8912 case BUILT_IN_STRSPN:
8913 return fold_builtin_strspn (loc, arg0, arg1);
8914
8915 case BUILT_IN_STRCSPN:
8916 return fold_builtin_strcspn (loc, arg0, arg1);
8917
8918 case BUILT_IN_STRPBRK:
8919 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8920
8921 case BUILT_IN_EXPECT:
8922 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8923
8924 case BUILT_IN_ISGREATER:
8925 return fold_builtin_unordered_cmp (loc, fndecl,
8926 arg0, arg1, UNLE_EXPR, LE_EXPR);
8927 case BUILT_IN_ISGREATEREQUAL:
8928 return fold_builtin_unordered_cmp (loc, fndecl,
8929 arg0, arg1, UNLT_EXPR, LT_EXPR);
8930 case BUILT_IN_ISLESS:
8931 return fold_builtin_unordered_cmp (loc, fndecl,
8932 arg0, arg1, UNGE_EXPR, GE_EXPR);
8933 case BUILT_IN_ISLESSEQUAL:
8934 return fold_builtin_unordered_cmp (loc, fndecl,
8935 arg0, arg1, UNGT_EXPR, GT_EXPR);
8936 case BUILT_IN_ISLESSGREATER:
8937 return fold_builtin_unordered_cmp (loc, fndecl,
8938 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8939 case BUILT_IN_ISUNORDERED:
8940 return fold_builtin_unordered_cmp (loc, fndecl,
8941 arg0, arg1, UNORDERED_EXPR,
8942 NOP_EXPR);
8943
8944 /* We do the folding for va_start in the expander. */
8945 case BUILT_IN_VA_START:
8946 break;
8947
8948 case BUILT_IN_OBJECT_SIZE:
8949 return fold_builtin_object_size (arg0, arg1);
8950
8951 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8952 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8953
8954 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8955 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8956
8957 default:
8958 break;
8959 }
8960 return NULL_TREE;
8961 }
8962
8963 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8964 and ARG2.
8965 This function returns NULL_TREE if no simplification was possible. */
8966
8967 static tree
8968 fold_builtin_3 (location_t loc, tree fndecl,
8969 tree arg0, tree arg1, tree arg2)
8970 {
8971 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8972 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8973
8974 if (TREE_CODE (arg0) == ERROR_MARK
8975 || TREE_CODE (arg1) == ERROR_MARK
8976 || TREE_CODE (arg2) == ERROR_MARK)
8977 return NULL_TREE;
8978
8979 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8980 arg0, arg1, arg2))
8981 return ret;
8982
8983 switch (fcode)
8984 {
8985
8986 CASE_FLT_FN (BUILT_IN_SINCOS):
8987 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8988
8989 CASE_FLT_FN (BUILT_IN_FMA):
8990 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8991
8992 CASE_FLT_FN (BUILT_IN_REMQUO):
8993 if (validate_arg (arg0, REAL_TYPE)
8994 && validate_arg (arg1, REAL_TYPE)
8995 && validate_arg (arg2, POINTER_TYPE))
8996 return do_mpfr_remquo (arg0, arg1, arg2);
8997 break;
8998
8999 case BUILT_IN_MEMCMP:
9000 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9001
9002 case BUILT_IN_EXPECT:
9003 return fold_builtin_expect (loc, arg0, arg1, arg2);
9004
9005 case BUILT_IN_ADD_OVERFLOW:
9006 case BUILT_IN_SUB_OVERFLOW:
9007 case BUILT_IN_MUL_OVERFLOW:
9008 case BUILT_IN_ADD_OVERFLOW_P:
9009 case BUILT_IN_SUB_OVERFLOW_P:
9010 case BUILT_IN_MUL_OVERFLOW_P:
9011 case BUILT_IN_SADD_OVERFLOW:
9012 case BUILT_IN_SADDL_OVERFLOW:
9013 case BUILT_IN_SADDLL_OVERFLOW:
9014 case BUILT_IN_SSUB_OVERFLOW:
9015 case BUILT_IN_SSUBL_OVERFLOW:
9016 case BUILT_IN_SSUBLL_OVERFLOW:
9017 case BUILT_IN_SMUL_OVERFLOW:
9018 case BUILT_IN_SMULL_OVERFLOW:
9019 case BUILT_IN_SMULLL_OVERFLOW:
9020 case BUILT_IN_UADD_OVERFLOW:
9021 case BUILT_IN_UADDL_OVERFLOW:
9022 case BUILT_IN_UADDLL_OVERFLOW:
9023 case BUILT_IN_USUB_OVERFLOW:
9024 case BUILT_IN_USUBL_OVERFLOW:
9025 case BUILT_IN_USUBLL_OVERFLOW:
9026 case BUILT_IN_UMUL_OVERFLOW:
9027 case BUILT_IN_UMULL_OVERFLOW:
9028 case BUILT_IN_UMULLL_OVERFLOW:
9029 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9030
9031 default:
9032 break;
9033 }
9034 return NULL_TREE;
9035 }
9036
9037 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9038 arguments. IGNORE is true if the result of the
9039 function call is ignored. This function returns NULL_TREE if no
9040 simplification was possible. */
9041
9042 tree
9043 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9044 {
9045 tree ret = NULL_TREE;
9046
9047 switch (nargs)
9048 {
9049 case 0:
9050 ret = fold_builtin_0 (loc, fndecl);
9051 break;
9052 case 1:
9053 ret = fold_builtin_1 (loc, fndecl, args[0]);
9054 break;
9055 case 2:
9056 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9057 break;
9058 case 3:
9059 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9060 break;
9061 default:
9062 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9063 break;
9064 }
9065 if (ret)
9066 {
9067 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9068 SET_EXPR_LOCATION (ret, loc);
9069 TREE_NO_WARNING (ret) = 1;
9070 return ret;
9071 }
9072 return NULL_TREE;
9073 }
9074
9075 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9076 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9077 of arguments in ARGS to be omitted. OLDNARGS is the number of
9078 elements in ARGS. */
9079
9080 static tree
9081 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9082 int skip, tree fndecl, int n, va_list newargs)
9083 {
9084 int nargs = oldnargs - skip + n;
9085 tree *buffer;
9086
9087 if (n > 0)
9088 {
9089 int i, j;
9090
9091 buffer = XALLOCAVEC (tree, nargs);
9092 for (i = 0; i < n; i++)
9093 buffer[i] = va_arg (newargs, tree);
9094 for (j = skip; j < oldnargs; j++, i++)
9095 buffer[i] = args[j];
9096 }
9097 else
9098 buffer = args + skip;
9099
9100 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9101 }
9102
9103 /* Return true if FNDECL shouldn't be folded right now.
9104 If a built-in function has an inline attribute always_inline
9105 wrapper, defer folding it after always_inline functions have
9106 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9107 might not be performed. */
9108
9109 bool
9110 avoid_folding_inline_builtin (tree fndecl)
9111 {
9112 return (DECL_DECLARED_INLINE_P (fndecl)
9113 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9114 && cfun
9115 && !cfun->always_inline_functions_inlined
9116 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9117 }
9118
9119 /* A wrapper function for builtin folding that prevents warnings for
9120 "statement without effect" and the like, caused by removing the
9121 call node earlier than the warning is generated. */
9122
9123 tree
9124 fold_call_expr (location_t loc, tree exp, bool ignore)
9125 {
9126 tree ret = NULL_TREE;
9127 tree fndecl = get_callee_fndecl (exp);
9128 if (fndecl
9129 && TREE_CODE (fndecl) == FUNCTION_DECL
9130 && DECL_BUILT_IN (fndecl)
9131 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9132 yet. Defer folding until we see all the arguments
9133 (after inlining). */
9134 && !CALL_EXPR_VA_ARG_PACK (exp))
9135 {
9136 int nargs = call_expr_nargs (exp);
9137
9138 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9139 instead last argument is __builtin_va_arg_pack (). Defer folding
9140 even in that case, until arguments are finalized. */
9141 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9142 {
9143 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9144 if (fndecl2
9145 && TREE_CODE (fndecl2) == FUNCTION_DECL
9146 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9147 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9148 return NULL_TREE;
9149 }
9150
9151 if (avoid_folding_inline_builtin (fndecl))
9152 return NULL_TREE;
9153
9154 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9155 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9156 CALL_EXPR_ARGP (exp), ignore);
9157 else
9158 {
9159 tree *args = CALL_EXPR_ARGP (exp);
9160 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9161 if (ret)
9162 return ret;
9163 }
9164 }
9165 return NULL_TREE;
9166 }
9167
9168 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9169 N arguments are passed in the array ARGARRAY. Return a folded
9170 expression or NULL_TREE if no simplification was possible. */
9171
9172 tree
9173 fold_builtin_call_array (location_t loc, tree,
9174 tree fn,
9175 int n,
9176 tree *argarray)
9177 {
9178 if (TREE_CODE (fn) != ADDR_EXPR)
9179 return NULL_TREE;
9180
9181 tree fndecl = TREE_OPERAND (fn, 0);
9182 if (TREE_CODE (fndecl) == FUNCTION_DECL
9183 && DECL_BUILT_IN (fndecl))
9184 {
9185 /* If last argument is __builtin_va_arg_pack (), arguments to this
9186 function are not finalized yet. Defer folding until they are. */
9187 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9188 {
9189 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9190 if (fndecl2
9191 && TREE_CODE (fndecl2) == FUNCTION_DECL
9192 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9193 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9194 return NULL_TREE;
9195 }
9196 if (avoid_folding_inline_builtin (fndecl))
9197 return NULL_TREE;
9198 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9199 return targetm.fold_builtin (fndecl, n, argarray, false);
9200 else
9201 return fold_builtin_n (loc, fndecl, argarray, n, false);
9202 }
9203
9204 return NULL_TREE;
9205 }
9206
9207 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9208 along with N new arguments specified as the "..." parameters. SKIP
9209 is the number of arguments in EXP to be omitted. This function is used
9210 to do varargs-to-varargs transformations. */
9211
9212 static tree
9213 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9214 {
9215 va_list ap;
9216 tree t;
9217
9218 va_start (ap, n);
9219 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9220 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9221 va_end (ap);
9222
9223 return t;
9224 }
9225
9226 /* Validate a single argument ARG against a tree code CODE representing
9227 a type. Return true when argument is valid. */
9228
9229 static bool
9230 validate_arg (const_tree arg, enum tree_code code)
9231 {
9232 if (!arg)
9233 return false;
9234 else if (code == POINTER_TYPE)
9235 return POINTER_TYPE_P (TREE_TYPE (arg));
9236 else if (code == INTEGER_TYPE)
9237 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9238 return code == TREE_CODE (TREE_TYPE (arg));
9239 }
9240
9241 /* This function validates the types of a function call argument list
9242 against a specified list of tree_codes. If the last specifier is a 0,
9243 that represents an ellipses, otherwise the last specifier must be a
9244 VOID_TYPE.
9245
9246 This is the GIMPLE version of validate_arglist. Eventually we want to
9247 completely convert builtins.c to work from GIMPLEs and the tree based
9248 validate_arglist will then be removed. */
9249
9250 bool
9251 validate_gimple_arglist (const gcall *call, ...)
9252 {
9253 enum tree_code code;
9254 bool res = 0;
9255 va_list ap;
9256 const_tree arg;
9257 size_t i;
9258
9259 va_start (ap, call);
9260 i = 0;
9261
9262 do
9263 {
9264 code = (enum tree_code) va_arg (ap, int);
9265 switch (code)
9266 {
9267 case 0:
9268 /* This signifies an ellipses, any further arguments are all ok. */
9269 res = true;
9270 goto end;
9271 case VOID_TYPE:
9272 /* This signifies an endlink, if no arguments remain, return
9273 true, otherwise return false. */
9274 res = (i == gimple_call_num_args (call));
9275 goto end;
9276 default:
9277 /* If no parameters remain or the parameter's code does not
9278 match the specified code, return false. Otherwise continue
9279 checking any remaining arguments. */
9280 arg = gimple_call_arg (call, i++);
9281 if (!validate_arg (arg, code))
9282 goto end;
9283 break;
9284 }
9285 }
9286 while (1);
9287
9288 /* We need gotos here since we can only have one VA_CLOSE in a
9289 function. */
9290 end: ;
9291 va_end (ap);
9292
9293 return res;
9294 }
9295
9296 /* Default target-specific builtin expander that does nothing. */
9297
9298 rtx
9299 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9300 rtx target ATTRIBUTE_UNUSED,
9301 rtx subtarget ATTRIBUTE_UNUSED,
9302 machine_mode mode ATTRIBUTE_UNUSED,
9303 int ignore ATTRIBUTE_UNUSED)
9304 {
9305 return NULL_RTX;
9306 }
9307
9308 /* Returns true is EXP represents data that would potentially reside
9309 in a readonly section. */
9310
9311 bool
9312 readonly_data_expr (tree exp)
9313 {
9314 STRIP_NOPS (exp);
9315
9316 if (TREE_CODE (exp) != ADDR_EXPR)
9317 return false;
9318
9319 exp = get_base_address (TREE_OPERAND (exp, 0));
9320 if (!exp)
9321 return false;
9322
9323 /* Make sure we call decl_readonly_section only for trees it
9324 can handle (since it returns true for everything it doesn't
9325 understand). */
9326 if (TREE_CODE (exp) == STRING_CST
9327 || TREE_CODE (exp) == CONSTRUCTOR
9328 || (VAR_P (exp) && TREE_STATIC (exp)))
9329 return decl_readonly_section (exp, 0);
9330 else
9331 return false;
9332 }
9333
9334 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9335 to the call, and TYPE is its return type.
9336
9337 Return NULL_TREE if no simplification was possible, otherwise return the
9338 simplified form of the call as a tree.
9339
9340 The simplified form may be a constant or other expression which
9341 computes the same value, but in a more efficient manner (including
9342 calls to other builtin functions).
9343
9344 The call may contain arguments which need to be evaluated, but
9345 which are not useful to determine the result of the call. In
9346 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9347 COMPOUND_EXPR will be an argument which must be evaluated.
9348 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9349 COMPOUND_EXPR in the chain will contain the tree for the simplified
9350 form of the builtin function call. */
9351
9352 static tree
9353 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9354 {
9355 if (!validate_arg (s1, POINTER_TYPE)
9356 || !validate_arg (s2, POINTER_TYPE))
9357 return NULL_TREE;
9358 else
9359 {
9360 tree fn;
9361 const char *p1, *p2;
9362
9363 p2 = c_getstr (s2);
9364 if (p2 == NULL)
9365 return NULL_TREE;
9366
9367 p1 = c_getstr (s1);
9368 if (p1 != NULL)
9369 {
9370 const char *r = strpbrk (p1, p2);
9371 tree tem;
9372
9373 if (r == NULL)
9374 return build_int_cst (TREE_TYPE (s1), 0);
9375
9376 /* Return an offset into the constant string argument. */
9377 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9378 return fold_convert_loc (loc, type, tem);
9379 }
9380
9381 if (p2[0] == '\0')
9382 /* strpbrk(x, "") == NULL.
9383 Evaluate and ignore s1 in case it had side-effects. */
9384 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9385
9386 if (p2[1] != '\0')
9387 return NULL_TREE; /* Really call strpbrk. */
9388
9389 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9390 if (!fn)
9391 return NULL_TREE;
9392
9393 /* New argument list transforming strpbrk(s1, s2) to
9394 strchr(s1, s2[0]). */
9395 return build_call_expr_loc (loc, fn, 2, s1,
9396 build_int_cst (integer_type_node, p2[0]));
9397 }
9398 }
9399
9400 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9401 to the call.
9402
9403 Return NULL_TREE if no simplification was possible, otherwise return the
9404 simplified form of the call as a tree.
9405
9406 The simplified form may be a constant or other expression which
9407 computes the same value, but in a more efficient manner (including
9408 calls to other builtin functions).
9409
9410 The call may contain arguments which need to be evaluated, but
9411 which are not useful to determine the result of the call. In
9412 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9413 COMPOUND_EXPR will be an argument which must be evaluated.
9414 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9415 COMPOUND_EXPR in the chain will contain the tree for the simplified
9416 form of the builtin function call. */
9417
9418 static tree
9419 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9420 {
9421 if (!validate_arg (s1, POINTER_TYPE)
9422 || !validate_arg (s2, POINTER_TYPE))
9423 return NULL_TREE;
9424 else
9425 {
9426 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9427
9428 /* If either argument is "", return NULL_TREE. */
9429 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9430 /* Evaluate and ignore both arguments in case either one has
9431 side-effects. */
9432 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9433 s1, s2);
9434 return NULL_TREE;
9435 }
9436 }
9437
9438 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9439 to the call.
9440
9441 Return NULL_TREE if no simplification was possible, otherwise return the
9442 simplified form of the call as a tree.
9443
9444 The simplified form may be a constant or other expression which
9445 computes the same value, but in a more efficient manner (including
9446 calls to other builtin functions).
9447
9448 The call may contain arguments which need to be evaluated, but
9449 which are not useful to determine the result of the call. In
9450 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9451 COMPOUND_EXPR will be an argument which must be evaluated.
9452 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9453 COMPOUND_EXPR in the chain will contain the tree for the simplified
9454 form of the builtin function call. */
9455
9456 static tree
9457 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9458 {
9459 if (!validate_arg (s1, POINTER_TYPE)
9460 || !validate_arg (s2, POINTER_TYPE))
9461 return NULL_TREE;
9462 else
9463 {
9464 /* If the first argument is "", return NULL_TREE. */
9465 const char *p1 = c_getstr (s1);
9466 if (p1 && *p1 == '\0')
9467 {
9468 /* Evaluate and ignore argument s2 in case it has
9469 side-effects. */
9470 return omit_one_operand_loc (loc, size_type_node,
9471 size_zero_node, s2);
9472 }
9473
9474 /* If the second argument is "", return __builtin_strlen(s1). */
9475 const char *p2 = c_getstr (s2);
9476 if (p2 && *p2 == '\0')
9477 {
9478 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9479
9480 /* If the replacement _DECL isn't initialized, don't do the
9481 transformation. */
9482 if (!fn)
9483 return NULL_TREE;
9484
9485 return build_call_expr_loc (loc, fn, 1, s1);
9486 }
9487 return NULL_TREE;
9488 }
9489 }
9490
9491 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9492 produced. False otherwise. This is done so that we don't output the error
9493 or warning twice or three times. */
9494
9495 bool
9496 fold_builtin_next_arg (tree exp, bool va_start_p)
9497 {
9498 tree fntype = TREE_TYPE (current_function_decl);
9499 int nargs = call_expr_nargs (exp);
9500 tree arg;
9501 /* There is good chance the current input_location points inside the
9502 definition of the va_start macro (perhaps on the token for
9503 builtin) in a system header, so warnings will not be emitted.
9504 Use the location in real source code. */
9505 source_location current_location =
9506 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9507 NULL);
9508
9509 if (!stdarg_p (fntype))
9510 {
9511 error ("%<va_start%> used in function with fixed args");
9512 return true;
9513 }
9514
9515 if (va_start_p)
9516 {
9517 if (va_start_p && (nargs != 2))
9518 {
9519 error ("wrong number of arguments to function %<va_start%>");
9520 return true;
9521 }
9522 arg = CALL_EXPR_ARG (exp, 1);
9523 }
9524 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9525 when we checked the arguments and if needed issued a warning. */
9526 else
9527 {
9528 if (nargs == 0)
9529 {
9530 /* Evidently an out of date version of <stdarg.h>; can't validate
9531 va_start's second argument, but can still work as intended. */
9532 warning_at (current_location,
9533 OPT_Wvarargs,
9534 "%<__builtin_next_arg%> called without an argument");
9535 return true;
9536 }
9537 else if (nargs > 1)
9538 {
9539 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9540 return true;
9541 }
9542 arg = CALL_EXPR_ARG (exp, 0);
9543 }
9544
9545 if (TREE_CODE (arg) == SSA_NAME)
9546 arg = SSA_NAME_VAR (arg);
9547
9548 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9549 or __builtin_next_arg (0) the first time we see it, after checking
9550 the arguments and if needed issuing a warning. */
9551 if (!integer_zerop (arg))
9552 {
9553 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9554
9555 /* Strip off all nops for the sake of the comparison. This
9556 is not quite the same as STRIP_NOPS. It does more.
9557 We must also strip off INDIRECT_EXPR for C++ reference
9558 parameters. */
9559 while (CONVERT_EXPR_P (arg)
9560 || TREE_CODE (arg) == INDIRECT_REF)
9561 arg = TREE_OPERAND (arg, 0);
9562 if (arg != last_parm)
9563 {
9564 /* FIXME: Sometimes with the tree optimizers we can get the
9565 not the last argument even though the user used the last
9566 argument. We just warn and set the arg to be the last
9567 argument so that we will get wrong-code because of
9568 it. */
9569 warning_at (current_location,
9570 OPT_Wvarargs,
9571 "second parameter of %<va_start%> not last named argument");
9572 }
9573
9574 /* Undefined by C99 7.15.1.4p4 (va_start):
9575 "If the parameter parmN is declared with the register storage
9576 class, with a function or array type, or with a type that is
9577 not compatible with the type that results after application of
9578 the default argument promotions, the behavior is undefined."
9579 */
9580 else if (DECL_REGISTER (arg))
9581 {
9582 warning_at (current_location,
9583 OPT_Wvarargs,
9584 "undefined behavior when second parameter of "
9585 "%<va_start%> is declared with %<register%> storage");
9586 }
9587
9588 /* We want to verify the second parameter just once before the tree
9589 optimizers are run and then avoid keeping it in the tree,
9590 as otherwise we could warn even for correct code like:
9591 void foo (int i, ...)
9592 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9593 if (va_start_p)
9594 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9595 else
9596 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9597 }
9598 return false;
9599 }
9600
9601
9602 /* Expand a call EXP to __builtin_object_size. */
9603
9604 static rtx
9605 expand_builtin_object_size (tree exp)
9606 {
9607 tree ost;
9608 int object_size_type;
9609 tree fndecl = get_callee_fndecl (exp);
9610
9611 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9612 {
9613 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9614 exp, fndecl);
9615 expand_builtin_trap ();
9616 return const0_rtx;
9617 }
9618
9619 ost = CALL_EXPR_ARG (exp, 1);
9620 STRIP_NOPS (ost);
9621
9622 if (TREE_CODE (ost) != INTEGER_CST
9623 || tree_int_cst_sgn (ost) < 0
9624 || compare_tree_int (ost, 3) > 0)
9625 {
9626 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9627 exp, fndecl);
9628 expand_builtin_trap ();
9629 return const0_rtx;
9630 }
9631
9632 object_size_type = tree_to_shwi (ost);
9633
9634 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9635 }
9636
9637 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9638 FCODE is the BUILT_IN_* to use.
9639 Return NULL_RTX if we failed; the caller should emit a normal call,
9640 otherwise try to get the result in TARGET, if convenient (and in
9641 mode MODE if that's convenient). */
9642
9643 static rtx
9644 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9645 enum built_in_function fcode)
9646 {
9647 tree dest, src, len, size;
9648
9649 if (!validate_arglist (exp,
9650 POINTER_TYPE,
9651 fcode == BUILT_IN_MEMSET_CHK
9652 ? INTEGER_TYPE : POINTER_TYPE,
9653 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9654 return NULL_RTX;
9655
9656 dest = CALL_EXPR_ARG (exp, 0);
9657 src = CALL_EXPR_ARG (exp, 1);
9658 len = CALL_EXPR_ARG (exp, 2);
9659 size = CALL_EXPR_ARG (exp, 3);
9660
9661 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9662 exp, len, /*maxlen=*/NULL_TREE,
9663 /*str=*/NULL_TREE, size);
9664
9665 if (!tree_fits_uhwi_p (size))
9666 return NULL_RTX;
9667
9668 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9669 {
9670 /* Avoid transforming the checking call to an ordinary one when
9671 an overflow has been detected or when the call couldn't be
9672 validated because the size is not constant. */
9673 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9674 return NULL_RTX;
9675
9676 tree fn = NULL_TREE;
9677 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9678 mem{cpy,pcpy,move,set} is available. */
9679 switch (fcode)
9680 {
9681 case BUILT_IN_MEMCPY_CHK:
9682 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9683 break;
9684 case BUILT_IN_MEMPCPY_CHK:
9685 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9686 break;
9687 case BUILT_IN_MEMMOVE_CHK:
9688 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9689 break;
9690 case BUILT_IN_MEMSET_CHK:
9691 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9692 break;
9693 default:
9694 break;
9695 }
9696
9697 if (! fn)
9698 return NULL_RTX;
9699
9700 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9701 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9702 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9703 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9704 }
9705 else if (fcode == BUILT_IN_MEMSET_CHK)
9706 return NULL_RTX;
9707 else
9708 {
9709 unsigned int dest_align = get_pointer_alignment (dest);
9710
9711 /* If DEST is not a pointer type, call the normal function. */
9712 if (dest_align == 0)
9713 return NULL_RTX;
9714
9715 /* If SRC and DEST are the same (and not volatile), do nothing. */
9716 if (operand_equal_p (src, dest, 0))
9717 {
9718 tree expr;
9719
9720 if (fcode != BUILT_IN_MEMPCPY_CHK)
9721 {
9722 /* Evaluate and ignore LEN in case it has side-effects. */
9723 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9724 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9725 }
9726
9727 expr = fold_build_pointer_plus (dest, len);
9728 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9729 }
9730
9731 /* __memmove_chk special case. */
9732 if (fcode == BUILT_IN_MEMMOVE_CHK)
9733 {
9734 unsigned int src_align = get_pointer_alignment (src);
9735
9736 if (src_align == 0)
9737 return NULL_RTX;
9738
9739 /* If src is categorized for a readonly section we can use
9740 normal __memcpy_chk. */
9741 if (readonly_data_expr (src))
9742 {
9743 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9744 if (!fn)
9745 return NULL_RTX;
9746 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9747 dest, src, len, size);
9748 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9749 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9750 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9751 }
9752 }
9753 return NULL_RTX;
9754 }
9755 }
9756
9757 /* Emit warning if a buffer overflow is detected at compile time. */
9758
9759 static void
9760 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9761 {
9762 /* The source string. */
9763 tree srcstr = NULL_TREE;
9764 /* The size of the destination object. */
9765 tree objsize = NULL_TREE;
9766 /* The string that is being concatenated with (as in __strcat_chk)
9767 or null if it isn't. */
9768 tree catstr = NULL_TREE;
9769 /* The maximum length of the source sequence in a bounded operation
9770 (such as __strncat_chk) or null if the operation isn't bounded
9771 (such as __strcat_chk). */
9772 tree maxlen = NULL_TREE;
9773
9774 switch (fcode)
9775 {
9776 case BUILT_IN_STRCPY_CHK:
9777 case BUILT_IN_STPCPY_CHK:
9778 srcstr = CALL_EXPR_ARG (exp, 1);
9779 objsize = CALL_EXPR_ARG (exp, 2);
9780 break;
9781
9782 case BUILT_IN_STRCAT_CHK:
9783 /* For __strcat_chk the warning will be emitted only if overflowing
9784 by at least strlen (dest) + 1 bytes. */
9785 catstr = CALL_EXPR_ARG (exp, 0);
9786 srcstr = CALL_EXPR_ARG (exp, 1);
9787 objsize = CALL_EXPR_ARG (exp, 2);
9788 break;
9789
9790 case BUILT_IN_STRNCAT_CHK:
9791 catstr = CALL_EXPR_ARG (exp, 0);
9792 srcstr = CALL_EXPR_ARG (exp, 1);
9793 maxlen = CALL_EXPR_ARG (exp, 2);
9794 objsize = CALL_EXPR_ARG (exp, 3);
9795 break;
9796
9797 case BUILT_IN_STRNCPY_CHK:
9798 case BUILT_IN_STPNCPY_CHK:
9799 srcstr = CALL_EXPR_ARG (exp, 1);
9800 maxlen = CALL_EXPR_ARG (exp, 2);
9801 objsize = CALL_EXPR_ARG (exp, 3);
9802 break;
9803
9804 case BUILT_IN_SNPRINTF_CHK:
9805 case BUILT_IN_VSNPRINTF_CHK:
9806 maxlen = CALL_EXPR_ARG (exp, 1);
9807 objsize = CALL_EXPR_ARG (exp, 3);
9808 break;
9809 default:
9810 gcc_unreachable ();
9811 }
9812
9813 if (catstr && maxlen)
9814 {
9815 /* Check __strncat_chk. There is no way to determine the length
9816 of the string to which the source string is being appended so
9817 just warn when the length of the source string is not known. */
9818 check_strncat_sizes (exp, objsize);
9819 return;
9820 }
9821
9822 check_sizes (OPT_Wstringop_overflow_, exp,
9823 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9824 }
9825
9826 /* Emit warning if a buffer overflow is detected at compile time
9827 in __sprintf_chk/__vsprintf_chk calls. */
9828
9829 static void
9830 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9831 {
9832 tree size, len, fmt;
9833 const char *fmt_str;
9834 int nargs = call_expr_nargs (exp);
9835
9836 /* Verify the required arguments in the original call. */
9837
9838 if (nargs < 4)
9839 return;
9840 size = CALL_EXPR_ARG (exp, 2);
9841 fmt = CALL_EXPR_ARG (exp, 3);
9842
9843 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9844 return;
9845
9846 /* Check whether the format is a literal string constant. */
9847 fmt_str = c_getstr (fmt);
9848 if (fmt_str == NULL)
9849 return;
9850
9851 if (!init_target_chars ())
9852 return;
9853
9854 /* If the format doesn't contain % args or %%, we know its size. */
9855 if (strchr (fmt_str, target_percent) == 0)
9856 len = build_int_cstu (size_type_node, strlen (fmt_str));
9857 /* If the format is "%s" and first ... argument is a string literal,
9858 we know it too. */
9859 else if (fcode == BUILT_IN_SPRINTF_CHK
9860 && strcmp (fmt_str, target_percent_s) == 0)
9861 {
9862 tree arg;
9863
9864 if (nargs < 5)
9865 return;
9866 arg = CALL_EXPR_ARG (exp, 4);
9867 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9868 return;
9869
9870 len = c_strlen (arg, 1);
9871 if (!len || ! tree_fits_uhwi_p (len))
9872 return;
9873 }
9874 else
9875 return;
9876
9877 /* Add one for the terminating nul. */
9878 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9879 check_sizes (OPT_Wstringop_overflow_,
9880 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9881 }
9882
9883 /* Emit warning if a free is called with address of a variable. */
9884
9885 static void
9886 maybe_emit_free_warning (tree exp)
9887 {
9888 tree arg = CALL_EXPR_ARG (exp, 0);
9889
9890 STRIP_NOPS (arg);
9891 if (TREE_CODE (arg) != ADDR_EXPR)
9892 return;
9893
9894 arg = get_base_address (TREE_OPERAND (arg, 0));
9895 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9896 return;
9897
9898 if (SSA_VAR_P (arg))
9899 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9900 "%Kattempt to free a non-heap object %qD", exp, arg);
9901 else
9902 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9903 "%Kattempt to free a non-heap object", exp);
9904 }
9905
9906 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9907 if possible. */
9908
9909 static tree
9910 fold_builtin_object_size (tree ptr, tree ost)
9911 {
9912 unsigned HOST_WIDE_INT bytes;
9913 int object_size_type;
9914
9915 if (!validate_arg (ptr, POINTER_TYPE)
9916 || !validate_arg (ost, INTEGER_TYPE))
9917 return NULL_TREE;
9918
9919 STRIP_NOPS (ost);
9920
9921 if (TREE_CODE (ost) != INTEGER_CST
9922 || tree_int_cst_sgn (ost) < 0
9923 || compare_tree_int (ost, 3) > 0)
9924 return NULL_TREE;
9925
9926 object_size_type = tree_to_shwi (ost);
9927
9928 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9929 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9930 and (size_t) 0 for types 2 and 3. */
9931 if (TREE_SIDE_EFFECTS (ptr))
9932 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9933
9934 if (TREE_CODE (ptr) == ADDR_EXPR)
9935 {
9936 compute_builtin_object_size (ptr, object_size_type, &bytes);
9937 if (wi::fits_to_tree_p (bytes, size_type_node))
9938 return build_int_cstu (size_type_node, bytes);
9939 }
9940 else if (TREE_CODE (ptr) == SSA_NAME)
9941 {
9942 /* If object size is not known yet, delay folding until
9943 later. Maybe subsequent passes will help determining
9944 it. */
9945 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9946 && wi::fits_to_tree_p (bytes, size_type_node))
9947 return build_int_cstu (size_type_node, bytes);
9948 }
9949
9950 return NULL_TREE;
9951 }
9952
9953 /* Builtins with folding operations that operate on "..." arguments
9954 need special handling; we need to store the arguments in a convenient
9955 data structure before attempting any folding. Fortunately there are
9956 only a few builtins that fall into this category. FNDECL is the
9957 function, EXP is the CALL_EXPR for the call. */
9958
9959 static tree
9960 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9961 {
9962 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9963 tree ret = NULL_TREE;
9964
9965 switch (fcode)
9966 {
9967 case BUILT_IN_FPCLASSIFY:
9968 ret = fold_builtin_fpclassify (loc, args, nargs);
9969 break;
9970
9971 default:
9972 break;
9973 }
9974 if (ret)
9975 {
9976 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9977 SET_EXPR_LOCATION (ret, loc);
9978 TREE_NO_WARNING (ret) = 1;
9979 return ret;
9980 }
9981 return NULL_TREE;
9982 }
9983
9984 /* Initialize format string characters in the target charset. */
9985
9986 bool
9987 init_target_chars (void)
9988 {
9989 static bool init;
9990 if (!init)
9991 {
9992 target_newline = lang_hooks.to_target_charset ('\n');
9993 target_percent = lang_hooks.to_target_charset ('%');
9994 target_c = lang_hooks.to_target_charset ('c');
9995 target_s = lang_hooks.to_target_charset ('s');
9996 if (target_newline == 0 || target_percent == 0 || target_c == 0
9997 || target_s == 0)
9998 return false;
9999
10000 target_percent_c[0] = target_percent;
10001 target_percent_c[1] = target_c;
10002 target_percent_c[2] = '\0';
10003
10004 target_percent_s[0] = target_percent;
10005 target_percent_s[1] = target_s;
10006 target_percent_s[2] = '\0';
10007
10008 target_percent_s_newline[0] = target_percent;
10009 target_percent_s_newline[1] = target_s;
10010 target_percent_s_newline[2] = target_newline;
10011 target_percent_s_newline[3] = '\0';
10012
10013 init = true;
10014 }
10015 return true;
10016 }
10017
10018 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10019 and no overflow/underflow occurred. INEXACT is true if M was not
10020 exactly calculated. TYPE is the tree type for the result. This
10021 function assumes that you cleared the MPFR flags and then
10022 calculated M to see if anything subsequently set a flag prior to
10023 entering this function. Return NULL_TREE if any checks fail. */
10024
10025 static tree
10026 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10027 {
10028 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10029 overflow/underflow occurred. If -frounding-math, proceed iff the
10030 result of calling FUNC was exact. */
10031 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10032 && (!flag_rounding_math || !inexact))
10033 {
10034 REAL_VALUE_TYPE rr;
10035
10036 real_from_mpfr (&rr, m, type, GMP_RNDN);
10037 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10038 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10039 but the mpft_t is not, then we underflowed in the
10040 conversion. */
10041 if (real_isfinite (&rr)
10042 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10043 {
10044 REAL_VALUE_TYPE rmode;
10045
10046 real_convert (&rmode, TYPE_MODE (type), &rr);
10047 /* Proceed iff the specified mode can hold the value. */
10048 if (real_identical (&rmode, &rr))
10049 return build_real (type, rmode);
10050 }
10051 }
10052 return NULL_TREE;
10053 }
10054
10055 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10056 number and no overflow/underflow occurred. INEXACT is true if M
10057 was not exactly calculated. TYPE is the tree type for the result.
10058 This function assumes that you cleared the MPFR flags and then
10059 calculated M to see if anything subsequently set a flag prior to
10060 entering this function. Return NULL_TREE if any checks fail, if
10061 FORCE_CONVERT is true, then bypass the checks. */
10062
10063 static tree
10064 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10065 {
10066 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10067 overflow/underflow occurred. If -frounding-math, proceed iff the
10068 result of calling FUNC was exact. */
10069 if (force_convert
10070 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10071 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10072 && (!flag_rounding_math || !inexact)))
10073 {
10074 REAL_VALUE_TYPE re, im;
10075
10076 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10077 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10078 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10079 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10080 but the mpft_t is not, then we underflowed in the
10081 conversion. */
10082 if (force_convert
10083 || (real_isfinite (&re) && real_isfinite (&im)
10084 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10085 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10086 {
10087 REAL_VALUE_TYPE re_mode, im_mode;
10088
10089 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10090 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10091 /* Proceed iff the specified mode can hold the value. */
10092 if (force_convert
10093 || (real_identical (&re_mode, &re)
10094 && real_identical (&im_mode, &im)))
10095 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10096 build_real (TREE_TYPE (type), im_mode));
10097 }
10098 }
10099 return NULL_TREE;
10100 }
10101
10102 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10103 the pointer *(ARG_QUO) and return the result. The type is taken
10104 from the type of ARG0 and is used for setting the precision of the
10105 calculation and results. */
10106
10107 static tree
10108 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10109 {
10110 tree const type = TREE_TYPE (arg0);
10111 tree result = NULL_TREE;
10112
10113 STRIP_NOPS (arg0);
10114 STRIP_NOPS (arg1);
10115
10116 /* To proceed, MPFR must exactly represent the target floating point
10117 format, which only happens when the target base equals two. */
10118 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10119 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10120 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10121 {
10122 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10123 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10124
10125 if (real_isfinite (ra0) && real_isfinite (ra1))
10126 {
10127 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10128 const int prec = fmt->p;
10129 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10130 tree result_rem;
10131 long integer_quo;
10132 mpfr_t m0, m1;
10133
10134 mpfr_inits2 (prec, m0, m1, NULL);
10135 mpfr_from_real (m0, ra0, GMP_RNDN);
10136 mpfr_from_real (m1, ra1, GMP_RNDN);
10137 mpfr_clear_flags ();
10138 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10139 /* Remquo is independent of the rounding mode, so pass
10140 inexact=0 to do_mpfr_ckconv(). */
10141 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10142 mpfr_clears (m0, m1, NULL);
10143 if (result_rem)
10144 {
10145 /* MPFR calculates quo in the host's long so it may
10146 return more bits in quo than the target int can hold
10147 if sizeof(host long) > sizeof(target int). This can
10148 happen even for native compilers in LP64 mode. In
10149 these cases, modulo the quo value with the largest
10150 number that the target int can hold while leaving one
10151 bit for the sign. */
10152 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10153 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10154
10155 /* Dereference the quo pointer argument. */
10156 arg_quo = build_fold_indirect_ref (arg_quo);
10157 /* Proceed iff a valid pointer type was passed in. */
10158 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10159 {
10160 /* Set the value. */
10161 tree result_quo
10162 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10163 build_int_cst (TREE_TYPE (arg_quo),
10164 integer_quo));
10165 TREE_SIDE_EFFECTS (result_quo) = 1;
10166 /* Combine the quo assignment with the rem. */
10167 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10168 result_quo, result_rem));
10169 }
10170 }
10171 }
10172 }
10173 return result;
10174 }
10175
10176 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10177 resulting value as a tree with type TYPE. The mpfr precision is
10178 set to the precision of TYPE. We assume that this mpfr function
10179 returns zero if the result could be calculated exactly within the
10180 requested precision. In addition, the integer pointer represented
10181 by ARG_SG will be dereferenced and set to the appropriate signgam
10182 (-1,1) value. */
10183
10184 static tree
10185 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10186 {
10187 tree result = NULL_TREE;
10188
10189 STRIP_NOPS (arg);
10190
10191 /* To proceed, MPFR must exactly represent the target floating point
10192 format, which only happens when the target base equals two. Also
10193 verify ARG is a constant and that ARG_SG is an int pointer. */
10194 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10195 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10196 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10197 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10198 {
10199 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10200
10201 /* In addition to NaN and Inf, the argument cannot be zero or a
10202 negative integer. */
10203 if (real_isfinite (ra)
10204 && ra->cl != rvc_zero
10205 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10206 {
10207 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10208 const int prec = fmt->p;
10209 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10210 int inexact, sg;
10211 mpfr_t m;
10212 tree result_lg;
10213
10214 mpfr_init2 (m, prec);
10215 mpfr_from_real (m, ra, GMP_RNDN);
10216 mpfr_clear_flags ();
10217 inexact = mpfr_lgamma (m, &sg, m, rnd);
10218 result_lg = do_mpfr_ckconv (m, type, inexact);
10219 mpfr_clear (m);
10220 if (result_lg)
10221 {
10222 tree result_sg;
10223
10224 /* Dereference the arg_sg pointer argument. */
10225 arg_sg = build_fold_indirect_ref (arg_sg);
10226 /* Assign the signgam value into *arg_sg. */
10227 result_sg = fold_build2 (MODIFY_EXPR,
10228 TREE_TYPE (arg_sg), arg_sg,
10229 build_int_cst (TREE_TYPE (arg_sg), sg));
10230 TREE_SIDE_EFFECTS (result_sg) = 1;
10231 /* Combine the signgam assignment with the lgamma result. */
10232 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10233 result_sg, result_lg));
10234 }
10235 }
10236 }
10237
10238 return result;
10239 }
10240
10241 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10242 mpc function FUNC on it and return the resulting value as a tree
10243 with type TYPE. The mpfr precision is set to the precision of
10244 TYPE. We assume that function FUNC returns zero if the result
10245 could be calculated exactly within the requested precision. If
10246 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10247 in the arguments and/or results. */
10248
10249 tree
10250 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10251 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10252 {
10253 tree result = NULL_TREE;
10254
10255 STRIP_NOPS (arg0);
10256 STRIP_NOPS (arg1);
10257
10258 /* To proceed, MPFR must exactly represent the target floating point
10259 format, which only happens when the target base equals two. */
10260 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10262 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10264 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10265 {
10266 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10267 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10268 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10269 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10270
10271 if (do_nonfinite
10272 || (real_isfinite (re0) && real_isfinite (im0)
10273 && real_isfinite (re1) && real_isfinite (im1)))
10274 {
10275 const struct real_format *const fmt =
10276 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10277 const int prec = fmt->p;
10278 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10279 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10280 int inexact;
10281 mpc_t m0, m1;
10282
10283 mpc_init2 (m0, prec);
10284 mpc_init2 (m1, prec);
10285 mpfr_from_real (mpc_realref (m0), re0, rnd);
10286 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10287 mpfr_from_real (mpc_realref (m1), re1, rnd);
10288 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10289 mpfr_clear_flags ();
10290 inexact = func (m0, m0, m1, crnd);
10291 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10292 mpc_clear (m0);
10293 mpc_clear (m1);
10294 }
10295 }
10296
10297 return result;
10298 }
10299
10300 /* A wrapper function for builtin folding that prevents warnings for
10301 "statement without effect" and the like, caused by removing the
10302 call node earlier than the warning is generated. */
10303
10304 tree
10305 fold_call_stmt (gcall *stmt, bool ignore)
10306 {
10307 tree ret = NULL_TREE;
10308 tree fndecl = gimple_call_fndecl (stmt);
10309 location_t loc = gimple_location (stmt);
10310 if (fndecl
10311 && TREE_CODE (fndecl) == FUNCTION_DECL
10312 && DECL_BUILT_IN (fndecl)
10313 && !gimple_call_va_arg_pack_p (stmt))
10314 {
10315 int nargs = gimple_call_num_args (stmt);
10316 tree *args = (nargs > 0
10317 ? gimple_call_arg_ptr (stmt, 0)
10318 : &error_mark_node);
10319
10320 if (avoid_folding_inline_builtin (fndecl))
10321 return NULL_TREE;
10322 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10323 {
10324 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10325 }
10326 else
10327 {
10328 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10329 if (ret)
10330 {
10331 /* Propagate location information from original call to
10332 expansion of builtin. Otherwise things like
10333 maybe_emit_chk_warning, that operate on the expansion
10334 of a builtin, will use the wrong location information. */
10335 if (gimple_has_location (stmt))
10336 {
10337 tree realret = ret;
10338 if (TREE_CODE (ret) == NOP_EXPR)
10339 realret = TREE_OPERAND (ret, 0);
10340 if (CAN_HAVE_LOCATION_P (realret)
10341 && !EXPR_HAS_LOCATION (realret))
10342 SET_EXPR_LOCATION (realret, loc);
10343 return realret;
10344 }
10345 return ret;
10346 }
10347 }
10348 }
10349 return NULL_TREE;
10350 }
10351
10352 /* Look up the function in builtin_decl that corresponds to DECL
10353 and set ASMSPEC as its user assembler name. DECL must be a
10354 function decl that declares a builtin. */
10355
10356 void
10357 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10358 {
10359 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10360 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10361 && asmspec != 0);
10362
10363 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10364 set_user_assembler_name (builtin, asmspec);
10365
10366 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10367 && INT_TYPE_SIZE < BITS_PER_WORD)
10368 {
10369 set_user_assembler_libfunc ("ffs", asmspec);
10370 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10371 "ffs");
10372 }
10373 }
10374
10375 /* Return true if DECL is a builtin that expands to a constant or similarly
10376 simple code. */
10377 bool
10378 is_simple_builtin (tree decl)
10379 {
10380 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10381 switch (DECL_FUNCTION_CODE (decl))
10382 {
10383 /* Builtins that expand to constants. */
10384 case BUILT_IN_CONSTANT_P:
10385 case BUILT_IN_EXPECT:
10386 case BUILT_IN_OBJECT_SIZE:
10387 case BUILT_IN_UNREACHABLE:
10388 /* Simple register moves or loads from stack. */
10389 case BUILT_IN_ASSUME_ALIGNED:
10390 case BUILT_IN_RETURN_ADDRESS:
10391 case BUILT_IN_EXTRACT_RETURN_ADDR:
10392 case BUILT_IN_FROB_RETURN_ADDR:
10393 case BUILT_IN_RETURN:
10394 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10395 case BUILT_IN_FRAME_ADDRESS:
10396 case BUILT_IN_VA_END:
10397 case BUILT_IN_STACK_SAVE:
10398 case BUILT_IN_STACK_RESTORE:
10399 /* Exception state returns or moves registers around. */
10400 case BUILT_IN_EH_FILTER:
10401 case BUILT_IN_EH_POINTER:
10402 case BUILT_IN_EH_COPY_VALUES:
10403 return true;
10404
10405 default:
10406 return false;
10407 }
10408
10409 return false;
10410 }
10411
10412 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10413 most probably expanded inline into reasonably simple code. This is a
10414 superset of is_simple_builtin. */
10415 bool
10416 is_inexpensive_builtin (tree decl)
10417 {
10418 if (!decl)
10419 return false;
10420 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10421 return true;
10422 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10423 switch (DECL_FUNCTION_CODE (decl))
10424 {
10425 case BUILT_IN_ABS:
10426 case BUILT_IN_ALLOCA:
10427 case BUILT_IN_ALLOCA_WITH_ALIGN:
10428 case BUILT_IN_BSWAP16:
10429 case BUILT_IN_BSWAP32:
10430 case BUILT_IN_BSWAP64:
10431 case BUILT_IN_CLZ:
10432 case BUILT_IN_CLZIMAX:
10433 case BUILT_IN_CLZL:
10434 case BUILT_IN_CLZLL:
10435 case BUILT_IN_CTZ:
10436 case BUILT_IN_CTZIMAX:
10437 case BUILT_IN_CTZL:
10438 case BUILT_IN_CTZLL:
10439 case BUILT_IN_FFS:
10440 case BUILT_IN_FFSIMAX:
10441 case BUILT_IN_FFSL:
10442 case BUILT_IN_FFSLL:
10443 case BUILT_IN_IMAXABS:
10444 case BUILT_IN_FINITE:
10445 case BUILT_IN_FINITEF:
10446 case BUILT_IN_FINITEL:
10447 case BUILT_IN_FINITED32:
10448 case BUILT_IN_FINITED64:
10449 case BUILT_IN_FINITED128:
10450 case BUILT_IN_FPCLASSIFY:
10451 case BUILT_IN_ISFINITE:
10452 case BUILT_IN_ISINF_SIGN:
10453 case BUILT_IN_ISINF:
10454 case BUILT_IN_ISINFF:
10455 case BUILT_IN_ISINFL:
10456 case BUILT_IN_ISINFD32:
10457 case BUILT_IN_ISINFD64:
10458 case BUILT_IN_ISINFD128:
10459 case BUILT_IN_ISNAN:
10460 case BUILT_IN_ISNANF:
10461 case BUILT_IN_ISNANL:
10462 case BUILT_IN_ISNAND32:
10463 case BUILT_IN_ISNAND64:
10464 case BUILT_IN_ISNAND128:
10465 case BUILT_IN_ISNORMAL:
10466 case BUILT_IN_ISGREATER:
10467 case BUILT_IN_ISGREATEREQUAL:
10468 case BUILT_IN_ISLESS:
10469 case BUILT_IN_ISLESSEQUAL:
10470 case BUILT_IN_ISLESSGREATER:
10471 case BUILT_IN_ISUNORDERED:
10472 case BUILT_IN_VA_ARG_PACK:
10473 case BUILT_IN_VA_ARG_PACK_LEN:
10474 case BUILT_IN_VA_COPY:
10475 case BUILT_IN_TRAP:
10476 case BUILT_IN_SAVEREGS:
10477 case BUILT_IN_POPCOUNTL:
10478 case BUILT_IN_POPCOUNTLL:
10479 case BUILT_IN_POPCOUNTIMAX:
10480 case BUILT_IN_POPCOUNT:
10481 case BUILT_IN_PARITYL:
10482 case BUILT_IN_PARITYLL:
10483 case BUILT_IN_PARITYIMAX:
10484 case BUILT_IN_PARITY:
10485 case BUILT_IN_LABS:
10486 case BUILT_IN_LLABS:
10487 case BUILT_IN_PREFETCH:
10488 case BUILT_IN_ACC_ON_DEVICE:
10489 return true;
10490
10491 default:
10492 return is_simple_builtin (decl);
10493 }
10494
10495 return false;
10496 }
10497
10498 /* Return true if T is a constant and the value cast to a target char
10499 can be represented by a host char.
10500 Store the casted char constant in *P if so. */
10501
10502 bool
10503 target_char_cst_p (tree t, char *p)
10504 {
10505 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10506 return false;
10507
10508 *p = (char)tree_to_uhwi (t);
10509 return true;
10510 }