]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
[17/77] Add an int_mode_for_size helper function
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
78
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
82
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
85 {
86 #include "builtins.def"
87 };
88
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
92
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
95
96 static rtx c_readstr (const char *, machine_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
175
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194
195 /* Return true if NAME starts with __builtin_ or __sync_. */
196
197 static bool
198 is_builtin_name (const char *name)
199 {
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
211 }
212
213
214 /* Return true if DECL is a function symbol representing a built-in. */
215
216 bool
217 is_builtin_fn (tree decl)
218 {
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 }
221
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
225
226 bool
227 called_as_built_in (tree node)
228 {
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
234 }
235
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
240
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
247
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
250
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
254 {
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
261
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
266
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
270 {
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
277 }
278 else if (TREE_CODE (exp) == LABEL_DECL)
279 ;
280 else if (TREE_CODE (exp) == CONST_DECL)
281 {
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
287
288 known_alignment = true;
289 }
290 else if (DECL_P (exp))
291 {
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
294 }
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
298 {
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
303
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 {
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
312 }
313
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
317
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
320
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
324 {
325 if (TMR_INDEX (exp))
326 {
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
331 }
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
335 }
336
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
348 {
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
354 }
355 }
356 else if (TREE_CODE (exp) == STRING_CST)
357 {
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
363
364 known_alignment = true;
365 }
366
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
370 {
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
373 {
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
377 }
378 }
379
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
383 }
384
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
393 {
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 }
396
397 /* Return the alignment in bits of EXP, an object. */
398
399 unsigned int
400 get_object_alignment (tree exp)
401 {
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
404
405 get_object_alignment_1 (exp, &align, &bitpos);
406
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
409
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
413 }
414
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
419
420 If EXP is not a pointer, false is returned too. */
421
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
425 {
426 STRIP_NOPS (exp);
427
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 {
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
440 {
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
443 {
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
447 }
448 }
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
452 }
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 {
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 {
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
469 }
470 else
471 {
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
475 }
476 }
477 else if (TREE_CODE (exp) == INTEGER_CST)
478 {
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
483 }
484
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
488 }
489
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
496
497 unsigned int
498 get_pointer_alignment (tree exp)
499 {
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
502
503 get_pointer_alignment_1 (exp, &align, &bitpos);
504
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
507
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
510
511 return align;
512 }
513
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 {
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522
523 unsigned n;
524
525 if (eltsize == 1)
526 {
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
529 {
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
533 }
534 }
535 else
536 {
537 for (n = 0; n < maxelts; n++)
538 {
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
542 }
543 }
544 return n;
545 }
546
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
552
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
559
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
563
564 The value returned is of type `ssizetype'.
565
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
568
569 tree
570 c_strlen (tree src, int only_value)
571 {
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 {
576 tree len1, len2;
577
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
582 }
583
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
587
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
589
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
595
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
599
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
603
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
607
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
609 {
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
614 {
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
617 }
618
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
625
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
627 }
628
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
631
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
640
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
644 {
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
648 {
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
652 }
653 return NULL_TREE;
654 }
655
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
659
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
664
665 return ssize_int (len);
666 }
667
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
670
671 static rtx
672 c_readstr (const char *str, machine_mode mode)
673 {
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
681
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
685
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
688 {
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
696
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
700 }
701
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
704 }
705
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
709
710 static int
711 target_char_cast (tree cst, char *p)
712 {
713 unsigned HOST_WIDE_INT val, hostval;
714
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
718
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
721
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734 }
735
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740 static tree
741 builtin_save_expr (tree exp)
742 {
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
748
749 return save_expr (exp);
750 }
751
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
755
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 {
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
762 {
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
767
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
775 {
776 tem = hard_frame_pointer_rtx;
777
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
780 }
781 }
782
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
792
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
795 {
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
802 }
803
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
808
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
818 }
819
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
822
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
826
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
829 {
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
833
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
836
837 buf_addr = convert_memory_address (Pmode, buf_addr);
838
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
840
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
844
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
848
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
852
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
855
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
861
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
865
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
868 }
869
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
873
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
876 {
877 rtx chain;
878
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
882
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
888
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
892 {
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
897
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
912 }
913
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
915 {
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
923
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
928
929 if (i == ARRAY_SIZE (elim_regs))
930 {
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
935 }
936 }
937
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
944
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
949 }
950
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
955
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
958 {
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
962
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
967
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
970
971 buf_addr = convert_memory_address (Pmode, buf_addr);
972
973 buf_addr = force_reg (Pmode, buf_addr);
974
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
978
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
983 {
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
987
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
993
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1002 {
1003 lab = copy_to_reg (lab);
1004
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1007
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1010
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1014 }
1015 }
1016
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1023 {
1024 gcc_assert (insn != last);
1025
1026 if (JUMP_P (insn))
1027 {
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1030 }
1031 else if (CALL_P (insn))
1032 break;
1033 }
1034 }
1035
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1038 {
1039 return (iter->i < iter->n);
1040 }
1041
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1046
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1049 {
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1055
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1058
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1062
1063 for (unsigned argno = 1; ; ++argno)
1064 {
1065 code = (enum tree_code) va_arg (ap, int);
1066
1067 switch (code)
1068 {
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1084 {
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1089 }
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1099 }
1100 }
1101
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1106
1107 BITMAP_FREE (argmap);
1108
1109 return res;
1110 }
1111
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1114
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1117 {
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1121
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1124
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1127
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1139
1140 crtl->has_nonlocal_goto = 1;
1141
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1146 {
1147 r_label = copy_to_reg (r_label);
1148
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1151
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1155
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1160
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1173
1174 emit_indirect_jump (r_label);
1175 }
1176
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1180 {
1181 if (JUMP_P (insn))
1182 {
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1185 }
1186 else if (CALL_P (insn))
1187 break;
1188 }
1189
1190 return const0_rtx;
1191 }
1192
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1197
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1200 {
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 rtx stack_save
1203 = gen_rtx_MEM (sa_mode,
1204 memory_address
1205 (sa_mode,
1206 plus_constant (Pmode, buf_addr,
1207 2 * GET_MODE_SIZE (Pmode))));
1208
1209 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1210 }
1211
1212 /* Expand a call to __builtin_prefetch. For a target that does not support
1213 data prefetch, evaluate the memory address argument in case it has side
1214 effects. */
1215
1216 static void
1217 expand_builtin_prefetch (tree exp)
1218 {
1219 tree arg0, arg1, arg2;
1220 int nargs;
1221 rtx op0, op1, op2;
1222
1223 if (!validate_arglist (exp, POINTER_TYPE, 0))
1224 return;
1225
1226 arg0 = CALL_EXPR_ARG (exp, 0);
1227
1228 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1229 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1230 locality). */
1231 nargs = call_expr_nargs (exp);
1232 if (nargs > 1)
1233 arg1 = CALL_EXPR_ARG (exp, 1);
1234 else
1235 arg1 = integer_zero_node;
1236 if (nargs > 2)
1237 arg2 = CALL_EXPR_ARG (exp, 2);
1238 else
1239 arg2 = integer_three_node;
1240
1241 /* Argument 0 is an address. */
1242 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1243
1244 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1245 if (TREE_CODE (arg1) != INTEGER_CST)
1246 {
1247 error ("second argument to %<__builtin_prefetch%> must be a constant");
1248 arg1 = integer_zero_node;
1249 }
1250 op1 = expand_normal (arg1);
1251 /* Argument 1 must be either zero or one. */
1252 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1253 {
1254 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1255 " using zero");
1256 op1 = const0_rtx;
1257 }
1258
1259 /* Argument 2 (locality) must be a compile-time constant int. */
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 {
1262 error ("third argument to %<__builtin_prefetch%> must be a constant");
1263 arg2 = integer_zero_node;
1264 }
1265 op2 = expand_normal (arg2);
1266 /* Argument 2 must be 0, 1, 2, or 3. */
1267 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1268 {
1269 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1270 op2 = const0_rtx;
1271 }
1272
1273 if (targetm.have_prefetch ())
1274 {
1275 struct expand_operand ops[3];
1276
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1280 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1281 return;
1282 }
1283
1284 /* Don't do anything with direct references to volatile memory, but
1285 generate code to handle other side effects. */
1286 if (!MEM_P (op0) && side_effects_p (op0))
1287 emit_insn (op0);
1288 }
1289
1290 /* Get a MEM rtx for expression EXP which is the address of an operand
1291 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1292 the maximum length of the block of memory that might be accessed or
1293 NULL if unknown. */
1294
1295 static rtx
1296 get_memory_rtx (tree exp, tree len)
1297 {
1298 tree orig_exp = exp;
1299 rtx addr, mem;
1300
1301 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1302 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1303 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1304 exp = TREE_OPERAND (exp, 0);
1305
1306 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1307 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1308
1309 /* Get an expression we can use to find the attributes to assign to MEM.
1310 First remove any nops. */
1311 while (CONVERT_EXPR_P (exp)
1312 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1313 exp = TREE_OPERAND (exp, 0);
1314
1315 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1316 (as builtin stringops may alias with anything). */
1317 exp = fold_build2 (MEM_REF,
1318 build_array_type (char_type_node,
1319 build_range_type (sizetype,
1320 size_one_node, len)),
1321 exp, build_int_cst (ptr_type_node, 0));
1322
1323 /* If the MEM_REF has no acceptable address, try to get the base object
1324 from the original address we got, and build an all-aliasing
1325 unknown-sized access to that one. */
1326 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1327 set_mem_attributes (mem, exp, 0);
1328 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1329 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1330 0))))
1331 {
1332 exp = build_fold_addr_expr (exp);
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_zero_node,
1337 NULL)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339 set_mem_attributes (mem, exp, 0);
1340 }
1341 set_mem_alias_set (mem, 0);
1342 return mem;
1343 }
1344 \f
1345 /* Built-in functions to perform an untyped call and return. */
1346
1347 #define apply_args_mode \
1348 (this_target_builtins->x_apply_args_mode)
1349 #define apply_result_mode \
1350 (this_target_builtins->x_apply_result_mode)
1351
1352 /* Return the size required for the block returned by __builtin_apply_args,
1353 and initialize apply_args_mode. */
1354
1355 static int
1356 apply_args_size (void)
1357 {
1358 static int size = -1;
1359 int align;
1360 unsigned int regno;
1361 machine_mode mode;
1362
1363 /* The values computed by this function never change. */
1364 if (size < 0)
1365 {
1366 /* The first value is the incoming arg-pointer. */
1367 size = GET_MODE_SIZE (Pmode);
1368
1369 /* The second value is the structure value address unless this is
1370 passed as an "invisible" first argument. */
1371 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1372 size += GET_MODE_SIZE (Pmode);
1373
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if (FUNCTION_ARG_REGNO_P (regno))
1376 {
1377 mode = targetm.calls.get_raw_arg_mode (regno);
1378
1379 gcc_assert (mode != VOIDmode);
1380
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 size += GET_MODE_SIZE (mode);
1385 apply_args_mode[regno] = mode;
1386 }
1387 else
1388 {
1389 apply_args_mode[regno] = VOIDmode;
1390 }
1391 }
1392 return size;
1393 }
1394
1395 /* Return the size required for the block returned by __builtin_apply,
1396 and initialize apply_result_mode. */
1397
1398 static int
1399 apply_result_size (void)
1400 {
1401 static int size = -1;
1402 int align, regno;
1403 machine_mode mode;
1404
1405 /* The values computed by this function never change. */
1406 if (size < 0)
1407 {
1408 size = 0;
1409
1410 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1411 if (targetm.calls.function_value_regno_p (regno))
1412 {
1413 mode = targetm.calls.get_raw_result_mode (regno);
1414
1415 gcc_assert (mode != VOIDmode);
1416
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 size += GET_MODE_SIZE (mode);
1421 apply_result_mode[regno] = mode;
1422 }
1423 else
1424 apply_result_mode[regno] = VOIDmode;
1425
1426 /* Allow targets that use untyped_call and untyped_return to override
1427 the size so that machine-specific information can be stored here. */
1428 #ifdef APPLY_RESULT_SIZE
1429 size = APPLY_RESULT_SIZE;
1430 #endif
1431 }
1432 return size;
1433 }
1434
1435 /* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1438
1439 static rtx
1440 result_vector (int savep, rtx result)
1441 {
1442 int regno, size, align, nelts;
1443 machine_mode mode;
1444 rtx reg, mem;
1445 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1446
1447 size = nelts = 0;
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if ((mode = apply_result_mode[regno]) != VOIDmode)
1450 {
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1455 mem = adjust_address (result, mode, size);
1456 savevec[nelts++] = (savep
1457 ? gen_rtx_SET (mem, reg)
1458 : gen_rtx_SET (reg, mem));
1459 size += GET_MODE_SIZE (mode);
1460 }
1461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462 }
1463
1464 /* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1466
1467 static rtx
1468 expand_builtin_apply_args_1 (void)
1469 {
1470 rtx registers, tem;
1471 int size, align, regno;
1472 machine_mode mode;
1473 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1474
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1478
1479 /* Walk past the arg-pointer and structure value address. */
1480 size = GET_MODE_SIZE (Pmode);
1481 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1482 size += GET_MODE_SIZE (Pmode);
1483
1484 /* Save each register used in calling a function to the block. */
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_args_mode[regno]) != VOIDmode)
1487 {
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491
1492 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1493
1494 emit_move_insn (adjust_address (registers, mode, size), tem);
1495 size += GET_MODE_SIZE (mode);
1496 }
1497
1498 /* Save the arg pointer to the block. */
1499 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1503 if (STACK_GROWS_DOWNWARD)
1504 tem
1505 = force_operand (plus_constant (Pmode, tem,
1506 crtl->args.pretend_args_size),
1507 NULL_RTX);
1508 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1509
1510 size = GET_MODE_SIZE (Pmode);
1511
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
1514 if (struct_incoming_value)
1515 {
1516 emit_move_insn (adjust_address (registers, Pmode, size),
1517 copy_to_reg (struct_incoming_value));
1518 size += GET_MODE_SIZE (Pmode);
1519 }
1520
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers, 0));
1523 }
1524
1525 /* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1530 saved. */
1531
1532 static rtx
1533 expand_builtin_apply_args (void)
1534 {
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value != 0)
1538 return apply_args_value;
1539 {
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1543 rtx temp;
1544
1545 start_sequence ();
1546 temp = expand_builtin_apply_args_1 ();
1547 rtx_insn *seq = get_insns ();
1548 end_sequence ();
1549
1550 apply_args_value = temp;
1551
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1557 that pseudo. */
1558 push_topmost_sequence ();
1559 if (REG_P (crtl->args.internal_arg_pointer)
1560 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1561 emit_insn_before (seq, parm_birth_insn);
1562 else
1563 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1565 return temp;
1566 }
1567 }
1568
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1571
1572 static rtx
1573 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1574 {
1575 int size, align, regno;
1576 machine_mode mode;
1577 rtx incoming_args, result, reg, dest, src;
1578 rtx_call_insn *call_insn;
1579 rtx old_stack_level = 0;
1580 rtx call_fusage = 0;
1581 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1582
1583 arguments = convert_memory_address (Pmode, arguments);
1584
1585 /* Create a block where the return registers can be saved. */
1586 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1587
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args = gen_reg_rtx (Pmode);
1590 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1591 if (!STACK_GROWS_DOWNWARD)
1592 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1593 incoming_args, 0, OPTAB_LIB_WIDEN);
1594
1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1597 manipulations. */
1598 do_pending_stack_adjust ();
1599 NO_DEFER_POP;
1600
1601 /* Save the stack with nonlocal if available. */
1602 if (targetm.have_save_stack_nonlocal ())
1603 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1604 else
1605 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1606
1607 /* Allocate a block of memory onto the stack and copy the memory
1608 arguments to the outgoing arguments address. We can pass TRUE
1609 as the 4th argument because we just saved the stack pointer
1610 and will restore it right after the call. */
1611 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1612
1613 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1614 may have already set current_function_calls_alloca to true.
1615 current_function_calls_alloca won't be set if argsize is zero,
1616 so we have to guarantee need_drap is true here. */
1617 if (SUPPORTS_STACK_ALIGNMENT)
1618 crtl->need_drap = true;
1619
1620 dest = virtual_outgoing_args_rtx;
1621 if (!STACK_GROWS_DOWNWARD)
1622 {
1623 if (CONST_INT_P (argsize))
1624 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1625 else
1626 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1627 }
1628 dest = gen_rtx_MEM (BLKmode, dest);
1629 set_mem_align (dest, PARM_BOUNDARY);
1630 src = gen_rtx_MEM (BLKmode, incoming_args);
1631 set_mem_align (src, PARM_BOUNDARY);
1632 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1633
1634 /* Refer to the argument block. */
1635 apply_args_size ();
1636 arguments = gen_rtx_MEM (BLKmode, arguments);
1637 set_mem_align (arguments, PARM_BOUNDARY);
1638
1639 /* Walk past the arg-pointer and structure value address. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1642 size += GET_MODE_SIZE (Pmode);
1643
1644 /* Restore each of the registers previously saved. Make USE insns
1645 for each of these registers for use in making the call. */
1646 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1647 if ((mode = apply_args_mode[regno]) != VOIDmode)
1648 {
1649 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1650 if (size % align != 0)
1651 size = CEIL (size, align) * align;
1652 reg = gen_rtx_REG (mode, regno);
1653 emit_move_insn (reg, adjust_address (arguments, mode, size));
1654 use_reg (&call_fusage, reg);
1655 size += GET_MODE_SIZE (mode);
1656 }
1657
1658 /* Restore the structure value address unless this is passed as an
1659 "invisible" first argument. */
1660 size = GET_MODE_SIZE (Pmode);
1661 if (struct_value)
1662 {
1663 rtx value = gen_reg_rtx (Pmode);
1664 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1665 emit_move_insn (struct_value, value);
1666 if (REG_P (struct_value))
1667 use_reg (&call_fusage, struct_value);
1668 size += GET_MODE_SIZE (Pmode);
1669 }
1670
1671 /* All arguments and registers used for the call are set up by now! */
1672 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1673
1674 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1675 and we don't want to load it into a register as an optimization,
1676 because prepare_call_address already did it if it should be done. */
1677 if (GET_CODE (function) != SYMBOL_REF)
1678 function = memory_address (FUNCTION_MODE, function);
1679
1680 /* Generate the actual call instruction and save the return value. */
1681 if (targetm.have_untyped_call ())
1682 {
1683 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1684 emit_call_insn (targetm.gen_untyped_call (mem, result,
1685 result_vector (1, result)));
1686 }
1687 else if (targetm.have_call_value ())
1688 {
1689 rtx valreg = 0;
1690
1691 /* Locate the unique return register. It is not possible to
1692 express a call that sets more than one return register using
1693 call_value; use untyped_call for that. In fact, untyped_call
1694 only needs to save the return registers in the given block. */
1695 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1696 if ((mode = apply_result_mode[regno]) != VOIDmode)
1697 {
1698 gcc_assert (!valreg); /* have_untyped_call required. */
1699
1700 valreg = gen_rtx_REG (mode, regno);
1701 }
1702
1703 emit_insn (targetm.gen_call_value (valreg,
1704 gen_rtx_MEM (FUNCTION_MODE, function),
1705 const0_rtx, NULL_RTX, const0_rtx));
1706
1707 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1708 }
1709 else
1710 gcc_unreachable ();
1711
1712 /* Find the CALL insn we just emitted, and attach the register usage
1713 information. */
1714 call_insn = last_call_insn ();
1715 add_function_usage_to (call_insn, call_fusage);
1716
1717 /* Restore the stack. */
1718 if (targetm.have_save_stack_nonlocal ())
1719 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1720 else
1721 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1722 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1723
1724 OK_DEFER_POP;
1725
1726 /* Return the address of the result block. */
1727 result = copy_addr_to_reg (XEXP (result, 0));
1728 return convert_memory_address (ptr_mode, result);
1729 }
1730
1731 /* Perform an untyped return. */
1732
1733 static void
1734 expand_builtin_return (rtx result)
1735 {
1736 int size, align, regno;
1737 machine_mode mode;
1738 rtx reg;
1739 rtx_insn *call_fusage = 0;
1740
1741 result = convert_memory_address (Pmode, result);
1742
1743 apply_result_size ();
1744 result = gen_rtx_MEM (BLKmode, result);
1745
1746 if (targetm.have_untyped_return ())
1747 {
1748 rtx vector = result_vector (0, result);
1749 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1750 emit_barrier ();
1751 return;
1752 }
1753
1754 /* Restore the return value and note that each value is used. */
1755 size = 0;
1756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1757 if ((mode = apply_result_mode[regno]) != VOIDmode)
1758 {
1759 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1760 if (size % align != 0)
1761 size = CEIL (size, align) * align;
1762 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1763 emit_move_insn (reg, adjust_address (result, mode, size));
1764
1765 push_to_sequence (call_fusage);
1766 emit_use (reg);
1767 call_fusage = get_insns ();
1768 end_sequence ();
1769 size += GET_MODE_SIZE (mode);
1770 }
1771
1772 /* Put the USE insns before the return. */
1773 emit_insn (call_fusage);
1774
1775 /* Return whatever values was restored by jumping directly to the end
1776 of the function. */
1777 expand_naked_return ();
1778 }
1779
1780 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1781
1782 static enum type_class
1783 type_to_class (tree type)
1784 {
1785 switch (TREE_CODE (type))
1786 {
1787 case VOID_TYPE: return void_type_class;
1788 case INTEGER_TYPE: return integer_type_class;
1789 case ENUMERAL_TYPE: return enumeral_type_class;
1790 case BOOLEAN_TYPE: return boolean_type_class;
1791 case POINTER_TYPE: return pointer_type_class;
1792 case REFERENCE_TYPE: return reference_type_class;
1793 case OFFSET_TYPE: return offset_type_class;
1794 case REAL_TYPE: return real_type_class;
1795 case COMPLEX_TYPE: return complex_type_class;
1796 case FUNCTION_TYPE: return function_type_class;
1797 case METHOD_TYPE: return method_type_class;
1798 case RECORD_TYPE: return record_type_class;
1799 case UNION_TYPE:
1800 case QUAL_UNION_TYPE: return union_type_class;
1801 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1802 ? string_type_class : array_type_class);
1803 case LANG_TYPE: return lang_type_class;
1804 default: return no_type_class;
1805 }
1806 }
1807
1808 /* Expand a call EXP to __builtin_classify_type. */
1809
1810 static rtx
1811 expand_builtin_classify_type (tree exp)
1812 {
1813 if (call_expr_nargs (exp))
1814 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1815 return GEN_INT (no_type_class);
1816 }
1817
1818 /* This helper macro, meant to be used in mathfn_built_in below,
1819 determines which among a set of three builtin math functions is
1820 appropriate for a given type mode. The `F' and `L' cases are
1821 automatically generated from the `double' case. */
1822 #define CASE_MATHFN(MATHFN) \
1823 CASE_CFN_##MATHFN: \
1824 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1825 fcodel = BUILT_IN_##MATHFN##L ; break;
1826 /* Similar to above, but appends _R after any F/L suffix. */
1827 #define CASE_MATHFN_REENT(MATHFN) \
1828 case CFN_BUILT_IN_##MATHFN##_R: \
1829 case CFN_BUILT_IN_##MATHFN##F_R: \
1830 case CFN_BUILT_IN_##MATHFN##L_R: \
1831 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1832 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1833
1834 /* Return a function equivalent to FN but operating on floating-point
1835 values of type TYPE, or END_BUILTINS if no such function exists.
1836 This is purely an operation on function codes; it does not guarantee
1837 that the target actually has an implementation of the function. */
1838
1839 static built_in_function
1840 mathfn_built_in_2 (tree type, combined_fn fn)
1841 {
1842 built_in_function fcode, fcodef, fcodel;
1843
1844 switch (fn)
1845 {
1846 CASE_MATHFN (ACOS)
1847 CASE_MATHFN (ACOSH)
1848 CASE_MATHFN (ASIN)
1849 CASE_MATHFN (ASINH)
1850 CASE_MATHFN (ATAN)
1851 CASE_MATHFN (ATAN2)
1852 CASE_MATHFN (ATANH)
1853 CASE_MATHFN (CBRT)
1854 CASE_MATHFN (CEIL)
1855 CASE_MATHFN (CEXPI)
1856 CASE_MATHFN (COPYSIGN)
1857 CASE_MATHFN (COS)
1858 CASE_MATHFN (COSH)
1859 CASE_MATHFN (DREM)
1860 CASE_MATHFN (ERF)
1861 CASE_MATHFN (ERFC)
1862 CASE_MATHFN (EXP)
1863 CASE_MATHFN (EXP10)
1864 CASE_MATHFN (EXP2)
1865 CASE_MATHFN (EXPM1)
1866 CASE_MATHFN (FABS)
1867 CASE_MATHFN (FDIM)
1868 CASE_MATHFN (FLOOR)
1869 CASE_MATHFN (FMA)
1870 CASE_MATHFN (FMAX)
1871 CASE_MATHFN (FMIN)
1872 CASE_MATHFN (FMOD)
1873 CASE_MATHFN (FREXP)
1874 CASE_MATHFN (GAMMA)
1875 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1876 CASE_MATHFN (HUGE_VAL)
1877 CASE_MATHFN (HYPOT)
1878 CASE_MATHFN (ILOGB)
1879 CASE_MATHFN (ICEIL)
1880 CASE_MATHFN (IFLOOR)
1881 CASE_MATHFN (INF)
1882 CASE_MATHFN (IRINT)
1883 CASE_MATHFN (IROUND)
1884 CASE_MATHFN (ISINF)
1885 CASE_MATHFN (J0)
1886 CASE_MATHFN (J1)
1887 CASE_MATHFN (JN)
1888 CASE_MATHFN (LCEIL)
1889 CASE_MATHFN (LDEXP)
1890 CASE_MATHFN (LFLOOR)
1891 CASE_MATHFN (LGAMMA)
1892 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1893 CASE_MATHFN (LLCEIL)
1894 CASE_MATHFN (LLFLOOR)
1895 CASE_MATHFN (LLRINT)
1896 CASE_MATHFN (LLROUND)
1897 CASE_MATHFN (LOG)
1898 CASE_MATHFN (LOG10)
1899 CASE_MATHFN (LOG1P)
1900 CASE_MATHFN (LOG2)
1901 CASE_MATHFN (LOGB)
1902 CASE_MATHFN (LRINT)
1903 CASE_MATHFN (LROUND)
1904 CASE_MATHFN (MODF)
1905 CASE_MATHFN (NAN)
1906 CASE_MATHFN (NANS)
1907 CASE_MATHFN (NEARBYINT)
1908 CASE_MATHFN (NEXTAFTER)
1909 CASE_MATHFN (NEXTTOWARD)
1910 CASE_MATHFN (POW)
1911 CASE_MATHFN (POWI)
1912 CASE_MATHFN (POW10)
1913 CASE_MATHFN (REMAINDER)
1914 CASE_MATHFN (REMQUO)
1915 CASE_MATHFN (RINT)
1916 CASE_MATHFN (ROUND)
1917 CASE_MATHFN (SCALB)
1918 CASE_MATHFN (SCALBLN)
1919 CASE_MATHFN (SCALBN)
1920 CASE_MATHFN (SIGNBIT)
1921 CASE_MATHFN (SIGNIFICAND)
1922 CASE_MATHFN (SIN)
1923 CASE_MATHFN (SINCOS)
1924 CASE_MATHFN (SINH)
1925 CASE_MATHFN (SQRT)
1926 CASE_MATHFN (TAN)
1927 CASE_MATHFN (TANH)
1928 CASE_MATHFN (TGAMMA)
1929 CASE_MATHFN (TRUNC)
1930 CASE_MATHFN (Y0)
1931 CASE_MATHFN (Y1)
1932 CASE_MATHFN (YN)
1933
1934 default:
1935 return END_BUILTINS;
1936 }
1937
1938 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1939 return fcode;
1940 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1941 return fcodef;
1942 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1943 return fcodel;
1944 else
1945 return END_BUILTINS;
1946 }
1947
1948 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1949 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1950 otherwise use the explicit declaration. If we can't do the conversion,
1951 return null. */
1952
1953 static tree
1954 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1955 {
1956 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1957 if (fcode2 == END_BUILTINS)
1958 return NULL_TREE;
1959
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1962
1963 return builtin_decl_explicit (fcode2);
1964 }
1965
1966 /* Like mathfn_built_in_1, but always use the implicit array. */
1967
1968 tree
1969 mathfn_built_in (tree type, combined_fn fn)
1970 {
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1972 }
1973
1974 /* Like mathfn_built_in_1, but take a built_in_function and
1975 always use the implicit array. */
1976
1977 tree
1978 mathfn_built_in (tree type, enum built_in_function fn)
1979 {
1980 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1981 }
1982
1983 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1984 return its code, otherwise return IFN_LAST. Note that this function
1985 only tests whether the function is defined in internals.def, not whether
1986 it is actually available on the target. */
1987
1988 internal_fn
1989 associated_internal_fn (tree fndecl)
1990 {
1991 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1992 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1997 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1998 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1999 #include "internal-fn.def"
2000
2001 CASE_FLT_FN (BUILT_IN_POW10):
2002 return IFN_EXP10;
2003
2004 CASE_FLT_FN (BUILT_IN_DREM):
2005 return IFN_REMAINDER;
2006
2007 CASE_FLT_FN (BUILT_IN_SCALBN):
2008 CASE_FLT_FN (BUILT_IN_SCALBLN):
2009 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2010 return IFN_LDEXP;
2011 return IFN_LAST;
2012
2013 default:
2014 return IFN_LAST;
2015 }
2016 }
2017
2018 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2019 on the current target by a call to an internal function, return the
2020 code of that internal function, otherwise return IFN_LAST. The caller
2021 is responsible for ensuring that any side-effects of the built-in
2022 call are dealt with correctly. E.g. if CALL sets errno, the caller
2023 must decide that the errno result isn't needed or make it available
2024 in some other way. */
2025
2026 internal_fn
2027 replacement_internal_fn (gcall *call)
2028 {
2029 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2030 {
2031 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2032 if (ifn != IFN_LAST)
2033 {
2034 tree_pair types = direct_internal_fn_types (ifn, call);
2035 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2036 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2037 return ifn;
2038 }
2039 }
2040 return IFN_LAST;
2041 }
2042
2043 /* Expand a call to the builtin trinary math functions (fma).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2049
2050 static rtx
2051 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2052 {
2053 optab builtin_optab;
2054 rtx op0, op1, op2, result;
2055 rtx_insn *insns;
2056 tree fndecl = get_callee_fndecl (exp);
2057 tree arg0, arg1, arg2;
2058 machine_mode mode;
2059
2060 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2061 return NULL_RTX;
2062
2063 arg0 = CALL_EXPR_ARG (exp, 0);
2064 arg1 = CALL_EXPR_ARG (exp, 1);
2065 arg2 = CALL_EXPR_ARG (exp, 2);
2066
2067 switch (DECL_FUNCTION_CODE (fndecl))
2068 {
2069 CASE_FLT_FN (BUILT_IN_FMA):
2070 builtin_optab = fma_optab; break;
2071 default:
2072 gcc_unreachable ();
2073 }
2074
2075 /* Make a suitable register to place result in. */
2076 mode = TYPE_MODE (TREE_TYPE (exp));
2077
2078 /* Before working hard, check whether the instruction is available. */
2079 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2080 return NULL_RTX;
2081
2082 result = gen_reg_rtx (mode);
2083
2084 /* Always stabilize the argument list. */
2085 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2086 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2087 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2088
2089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2090 op1 = expand_normal (arg1);
2091 op2 = expand_normal (arg2);
2092
2093 start_sequence ();
2094
2095 /* Compute into RESULT.
2096 Set RESULT to wherever the result comes back. */
2097 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2098 result, 0);
2099
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
2103 if (result == 0)
2104 {
2105 end_sequence ();
2106 return expand_call (exp, target, target == const0_rtx);
2107 }
2108
2109 /* Output the entire sequence. */
2110 insns = get_insns ();
2111 end_sequence ();
2112 emit_insn (insns);
2113
2114 return result;
2115 }
2116
2117 /* Expand a call to the builtin sin and cos math functions.
2118 Return NULL_RTX if a normal call should be emitted rather than expanding the
2119 function in-line. EXP is the expression that is a call to the builtin
2120 function; if convenient, the result should be placed in TARGET.
2121 SUBTARGET may be used as the target for computing one of EXP's
2122 operands. */
2123
2124 static rtx
2125 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2126 {
2127 optab builtin_optab;
2128 rtx op0;
2129 rtx_insn *insns;
2130 tree fndecl = get_callee_fndecl (exp);
2131 machine_mode mode;
2132 tree arg;
2133
2134 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2135 return NULL_RTX;
2136
2137 arg = CALL_EXPR_ARG (exp, 0);
2138
2139 switch (DECL_FUNCTION_CODE (fndecl))
2140 {
2141 CASE_FLT_FN (BUILT_IN_SIN):
2142 CASE_FLT_FN (BUILT_IN_COS):
2143 builtin_optab = sincos_optab; break;
2144 default:
2145 gcc_unreachable ();
2146 }
2147
2148 /* Make a suitable register to place result in. */
2149 mode = TYPE_MODE (TREE_TYPE (exp));
2150
2151 /* Check if sincos insn is available, otherwise fallback
2152 to sin or cos insn. */
2153 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SIN):
2157 builtin_optab = sin_optab; break;
2158 CASE_FLT_FN (BUILT_IN_COS):
2159 builtin_optab = cos_optab; break;
2160 default:
2161 gcc_unreachable ();
2162 }
2163
2164 /* Before working hard, check whether the instruction is available. */
2165 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2166 {
2167 rtx result = gen_reg_rtx (mode);
2168
2169 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2170 need to expand the argument again. This way, we will not perform
2171 side-effects more the once. */
2172 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2173
2174 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2175
2176 start_sequence ();
2177
2178 /* Compute into RESULT.
2179 Set RESULT to wherever the result comes back. */
2180 if (builtin_optab == sincos_optab)
2181 {
2182 int ok;
2183
2184 switch (DECL_FUNCTION_CODE (fndecl))
2185 {
2186 CASE_FLT_FN (BUILT_IN_SIN):
2187 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2188 break;
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2191 break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195 gcc_assert (ok);
2196 }
2197 else
2198 result = expand_unop (mode, builtin_optab, op0, result, 0);
2199
2200 if (result != 0)
2201 {
2202 /* Output the entire sequence. */
2203 insns = get_insns ();
2204 end_sequence ();
2205 emit_insn (insns);
2206 return result;
2207 }
2208
2209 /* If we were unable to expand via the builtin, stop the sequence
2210 (without outputting the insns) and call to the library function
2211 with the stabilized argument list. */
2212 end_sequence ();
2213 }
2214
2215 return expand_call (exp, target, target == const0_rtx);
2216 }
2217
2218 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2219 return an RTL instruction code that implements the functionality.
2220 If that isn't possible or available return CODE_FOR_nothing. */
2221
2222 static enum insn_code
2223 interclass_mathfn_icode (tree arg, tree fndecl)
2224 {
2225 bool errno_set = false;
2226 optab builtin_optab = unknown_optab;
2227 machine_mode mode;
2228
2229 switch (DECL_FUNCTION_CODE (fndecl))
2230 {
2231 CASE_FLT_FN (BUILT_IN_ILOGB):
2232 errno_set = true; builtin_optab = ilogb_optab; break;
2233 CASE_FLT_FN (BUILT_IN_ISINF):
2234 builtin_optab = isinf_optab; break;
2235 case BUILT_IN_ISNORMAL:
2236 case BUILT_IN_ISFINITE:
2237 CASE_FLT_FN (BUILT_IN_FINITE):
2238 case BUILT_IN_FINITED32:
2239 case BUILT_IN_FINITED64:
2240 case BUILT_IN_FINITED128:
2241 case BUILT_IN_ISINFD32:
2242 case BUILT_IN_ISINFD64:
2243 case BUILT_IN_ISINFD128:
2244 /* These builtins have no optabs (yet). */
2245 break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* There's no easy way to detect the case we need to set EDOM. */
2251 if (flag_errno_math && errno_set)
2252 return CODE_FOR_nothing;
2253
2254 /* Optab mode depends on the mode of the input argument. */
2255 mode = TYPE_MODE (TREE_TYPE (arg));
2256
2257 if (builtin_optab)
2258 return optab_handler (builtin_optab, mode);
2259 return CODE_FOR_nothing;
2260 }
2261
2262 /* Expand a call to one of the builtin math functions that operate on
2263 floating point argument and output an integer result (ilogb, isinf,
2264 isnan, etc).
2265 Return 0 if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET. */
2268
2269 static rtx
2270 expand_builtin_interclass_mathfn (tree exp, rtx target)
2271 {
2272 enum insn_code icode = CODE_FOR_nothing;
2273 rtx op0;
2274 tree fndecl = get_callee_fndecl (exp);
2275 machine_mode mode;
2276 tree arg;
2277
2278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2280
2281 arg = CALL_EXPR_ARG (exp, 0);
2282 icode = interclass_mathfn_icode (arg, fndecl);
2283 mode = TYPE_MODE (TREE_TYPE (arg));
2284
2285 if (icode != CODE_FOR_nothing)
2286 {
2287 struct expand_operand ops[1];
2288 rtx_insn *last = get_last_insn ();
2289 tree orig_arg = arg;
2290
2291 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2292 need to expand the argument again. This way, we will not perform
2293 side-effects more the once. */
2294 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2295
2296 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2297
2298 if (mode != GET_MODE (op0))
2299 op0 = convert_to_mode (mode, op0, 0);
2300
2301 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2302 if (maybe_legitimize_operands (icode, 0, 1, ops)
2303 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2304 return ops[0].value;
2305
2306 delete_insns_since (last);
2307 CALL_EXPR_ARG (exp, 0) = orig_arg;
2308 }
2309
2310 return NULL_RTX;
2311 }
2312
2313 /* Expand a call to the builtin sincos math function.
2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
2315 function in-line. EXP is the expression that is a call to the builtin
2316 function. */
2317
2318 static rtx
2319 expand_builtin_sincos (tree exp)
2320 {
2321 rtx op0, op1, op2, target1, target2;
2322 machine_mode mode;
2323 tree arg, sinp, cosp;
2324 int result;
2325 location_t loc = EXPR_LOCATION (exp);
2326 tree alias_type, alias_off;
2327
2328 if (!validate_arglist (exp, REAL_TYPE,
2329 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2330 return NULL_RTX;
2331
2332 arg = CALL_EXPR_ARG (exp, 0);
2333 sinp = CALL_EXPR_ARG (exp, 1);
2334 cosp = CALL_EXPR_ARG (exp, 2);
2335
2336 /* Make a suitable register to place result in. */
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2338
2339 /* Check if sincos insn is available, otherwise emit the call. */
2340 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2341 return NULL_RTX;
2342
2343 target1 = gen_reg_rtx (mode);
2344 target2 = gen_reg_rtx (mode);
2345
2346 op0 = expand_normal (arg);
2347 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2348 alias_off = build_int_cst (alias_type, 0);
2349 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 sinp, alias_off));
2351 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2352 cosp, alias_off));
2353
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2357 gcc_assert (result);
2358
2359 /* Move target1 and target2 to the memory locations indicated
2360 by op1 and op2. */
2361 emit_move_insn (op1, target1);
2362 emit_move_insn (op2, target2);
2363
2364 return const0_rtx;
2365 }
2366
2367 /* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
2369 the result should be placed in TARGET. */
2370
2371 static rtx
2372 expand_builtin_cexpi (tree exp, rtx target)
2373 {
2374 tree fndecl = get_callee_fndecl (exp);
2375 tree arg, type;
2376 machine_mode mode;
2377 rtx op0, op1, op2;
2378 location_t loc = EXPR_LOCATION (exp);
2379
2380 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2381 return NULL_RTX;
2382
2383 arg = CALL_EXPR_ARG (exp, 0);
2384 type = TREE_TYPE (arg);
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2386
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
2390 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2391 {
2392 op1 = gen_reg_rtx (mode);
2393 op2 = gen_reg_rtx (mode);
2394
2395 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2396
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2399 }
2400 else if (targetm.libc_has_function (function_sincos))
2401 {
2402 tree call, fn = NULL_TREE;
2403 tree top1, top2;
2404 rtx op1a, op2a;
2405
2406 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2407 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2409 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2410 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2411 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2412 else
2413 gcc_unreachable ();
2414
2415 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2416 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2417 op1a = copy_addr_to_reg (XEXP (op1, 0));
2418 op2a = copy_addr_to_reg (XEXP (op2, 0));
2419 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2420 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2421
2422 /* Make sure not to fold the sincos call again. */
2423 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2425 call, 3, arg, top1, top2));
2426 }
2427 else
2428 {
2429 tree call, fn = NULL_TREE, narg;
2430 tree ctype = build_complex_type (type);
2431
2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2433 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2435 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2437 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2438 else
2439 gcc_unreachable ();
2440
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn == NULL_TREE)
2445 {
2446 tree fntype;
2447 const char *name = NULL;
2448
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 name = "cexpf";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 name = "cexp";
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 name = "cexpl";
2455
2456 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2457 fn = build_fn_decl (name, fntype);
2458 }
2459
2460 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2461 build_real (type, dconst0), arg);
2462
2463 /* Make sure not to fold the cexp call again. */
2464 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2465 return expand_expr (build_call_nary (ctype, call, 1, narg),
2466 target, VOIDmode, EXPAND_NORMAL);
2467 }
2468
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2471 make_tree (TREE_TYPE (arg), op2),
2472 make_tree (TREE_TYPE (arg), op1)),
2473 target, VOIDmode, EXPAND_NORMAL);
2474 }
2475
2476 /* Conveniently construct a function call expression. FNDECL names the
2477 function to be called, N is the number of arguments, and the "..."
2478 parameters are the argument expressions. Unlike build_call_exr
2479 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2480
2481 static tree
2482 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2483 {
2484 va_list ap;
2485 tree fntype = TREE_TYPE (fndecl);
2486 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2487
2488 va_start (ap, n);
2489 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2490 va_end (ap);
2491 SET_EXPR_LOCATION (fn, loc);
2492 return fn;
2493 }
2494
2495 /* Expand a call to one of the builtin rounding functions gcc defines
2496 as an extension (lfloor and lceil). As these are gcc extensions we
2497 do not need to worry about setting errno to EDOM.
2498 If expanding via optab fails, lower expression to (int)(floor(x)).
2499 EXP is the expression that is a call to the builtin function;
2500 if convenient, the result should be placed in TARGET. */
2501
2502 static rtx
2503 expand_builtin_int_roundingfn (tree exp, rtx target)
2504 {
2505 convert_optab builtin_optab;
2506 rtx op0, tmp;
2507 rtx_insn *insns;
2508 tree fndecl = get_callee_fndecl (exp);
2509 enum built_in_function fallback_fn;
2510 tree fallback_fndecl;
2511 machine_mode mode;
2512 tree arg;
2513
2514 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2515 gcc_unreachable ();
2516
2517 arg = CALL_EXPR_ARG (exp, 0);
2518
2519 switch (DECL_FUNCTION_CODE (fndecl))
2520 {
2521 CASE_FLT_FN (BUILT_IN_ICEIL):
2522 CASE_FLT_FN (BUILT_IN_LCEIL):
2523 CASE_FLT_FN (BUILT_IN_LLCEIL):
2524 builtin_optab = lceil_optab;
2525 fallback_fn = BUILT_IN_CEIL;
2526 break;
2527
2528 CASE_FLT_FN (BUILT_IN_IFLOOR):
2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2533 break;
2534
2535 default:
2536 gcc_unreachable ();
2537 }
2538
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2541
2542 target = gen_reg_rtx (mode);
2543
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2548
2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2550
2551 start_sequence ();
2552
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2555 {
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
2558 end_sequence ();
2559 emit_insn (insns);
2560 return target;
2561 }
2562
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2565 end_sequence ();
2566
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2569
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2575 {
2576 tree fntype;
2577 const char *name = NULL;
2578
2579 switch (DECL_FUNCTION_CODE (fndecl))
2580 {
2581 case BUILT_IN_ICEIL:
2582 case BUILT_IN_LCEIL:
2583 case BUILT_IN_LLCEIL:
2584 name = "ceil";
2585 break;
2586 case BUILT_IN_ICEILF:
2587 case BUILT_IN_LCEILF:
2588 case BUILT_IN_LLCEILF:
2589 name = "ceilf";
2590 break;
2591 case BUILT_IN_ICEILL:
2592 case BUILT_IN_LCEILL:
2593 case BUILT_IN_LLCEILL:
2594 name = "ceill";
2595 break;
2596 case BUILT_IN_IFLOOR:
2597 case BUILT_IN_LFLOOR:
2598 case BUILT_IN_LLFLOOR:
2599 name = "floor";
2600 break;
2601 case BUILT_IN_IFLOORF:
2602 case BUILT_IN_LFLOORF:
2603 case BUILT_IN_LLFLOORF:
2604 name = "floorf";
2605 break;
2606 case BUILT_IN_IFLOORL:
2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2609 name = "floorl";
2610 break;
2611 default:
2612 gcc_unreachable ();
2613 }
2614
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2618 }
2619
2620 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2621
2622 tmp = expand_normal (exp);
2623 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2624
2625 /* Truncate the result of floating point optab to integer
2626 via expand_fix (). */
2627 target = gen_reg_rtx (mode);
2628 expand_fix (target, tmp, 0);
2629
2630 return target;
2631 }
2632
2633 /* Expand a call to one of the builtin math functions doing integer
2634 conversion (lrint).
2635 Return 0 if a normal call should be emitted rather than expanding the
2636 function in-line. EXP is the expression that is a call to the builtin
2637 function; if convenient, the result should be placed in TARGET. */
2638
2639 static rtx
2640 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2641 {
2642 convert_optab builtin_optab;
2643 rtx op0;
2644 rtx_insn *insns;
2645 tree fndecl = get_callee_fndecl (exp);
2646 tree arg;
2647 machine_mode mode;
2648 enum built_in_function fallback_fn = BUILT_IN_NONE;
2649
2650 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2651 gcc_unreachable ();
2652
2653 arg = CALL_EXPR_ARG (exp, 0);
2654
2655 switch (DECL_FUNCTION_CODE (fndecl))
2656 {
2657 CASE_FLT_FN (BUILT_IN_IRINT):
2658 fallback_fn = BUILT_IN_LRINT;
2659 gcc_fallthrough ();
2660 CASE_FLT_FN (BUILT_IN_LRINT):
2661 CASE_FLT_FN (BUILT_IN_LLRINT):
2662 builtin_optab = lrint_optab;
2663 break;
2664
2665 CASE_FLT_FN (BUILT_IN_IROUND):
2666 fallback_fn = BUILT_IN_LROUND;
2667 gcc_fallthrough ();
2668 CASE_FLT_FN (BUILT_IN_LROUND):
2669 CASE_FLT_FN (BUILT_IN_LLROUND):
2670 builtin_optab = lround_optab;
2671 break;
2672
2673 default:
2674 gcc_unreachable ();
2675 }
2676
2677 /* There's no easy way to detect the case we need to set EDOM. */
2678 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2679 return NULL_RTX;
2680
2681 /* Make a suitable register to place result in. */
2682 mode = TYPE_MODE (TREE_TYPE (exp));
2683
2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (!flag_errno_math)
2686 {
2687 rtx result = gen_reg_rtx (mode);
2688
2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2693
2694 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2695
2696 start_sequence ();
2697
2698 if (expand_sfix_optab (result, op0, builtin_optab))
2699 {
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2702 end_sequence ();
2703 emit_insn (insns);
2704 return result;
2705 }
2706
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
2710 end_sequence ();
2711 }
2712
2713 if (fallback_fn != BUILT_IN_NONE)
2714 {
2715 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2716 targets, (int) round (x) should never be transformed into
2717 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2718 a call to lround in the hope that the target provides at least some
2719 C99 functions. This should result in the best user experience for
2720 not full C99 targets. */
2721 tree fallback_fndecl = mathfn_built_in_1
2722 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2723
2724 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2725 fallback_fndecl, 1, arg);
2726
2727 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2728 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2729 return convert_to_mode (mode, target, 0);
2730 }
2731
2732 return expand_call (exp, target, target == const0_rtx);
2733 }
2734
2735 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2736 a normal call should be emitted rather than expanding the function
2737 in-line. EXP is the expression that is a call to the builtin
2738 function; if convenient, the result should be placed in TARGET. */
2739
2740 static rtx
2741 expand_builtin_powi (tree exp, rtx target)
2742 {
2743 tree arg0, arg1;
2744 rtx op0, op1;
2745 machine_mode mode;
2746 machine_mode mode2;
2747
2748 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2749 return NULL_RTX;
2750
2751 arg0 = CALL_EXPR_ARG (exp, 0);
2752 arg1 = CALL_EXPR_ARG (exp, 1);
2753 mode = TYPE_MODE (TREE_TYPE (exp));
2754
2755 /* Emit a libcall to libgcc. */
2756
2757 /* Mode of the 2nd argument must match that of an int. */
2758 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2759
2760 if (target == NULL_RTX)
2761 target = gen_reg_rtx (mode);
2762
2763 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2764 if (GET_MODE (op0) != mode)
2765 op0 = convert_to_mode (mode, op0, 0);
2766 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2767 if (GET_MODE (op1) != mode2)
2768 op1 = convert_to_mode (mode2, op1, 0);
2769
2770 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2771 target, LCT_CONST, mode, 2,
2772 op0, mode, op1, mode2);
2773
2774 return target;
2775 }
2776
2777 /* Expand expression EXP which is a call to the strlen builtin. Return
2778 NULL_RTX if we failed the caller should emit a normal call, otherwise
2779 try to get the result in TARGET, if convenient. */
2780
2781 static rtx
2782 expand_builtin_strlen (tree exp, rtx target,
2783 machine_mode target_mode)
2784 {
2785 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2786 return NULL_RTX;
2787 else
2788 {
2789 struct expand_operand ops[4];
2790 rtx pat;
2791 tree len;
2792 tree src = CALL_EXPR_ARG (exp, 0);
2793 rtx src_reg;
2794 rtx_insn *before_strlen;
2795 machine_mode insn_mode;
2796 enum insn_code icode = CODE_FOR_nothing;
2797 unsigned int align;
2798
2799 /* If the length can be computed at compile-time, return it. */
2800 len = c_strlen (src, 0);
2801 if (len)
2802 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2803
2804 /* If the length can be computed at compile-time and is constant
2805 integer, but there are side-effects in src, evaluate
2806 src for side-effects, then return len.
2807 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2808 can be optimized into: i++; x = 3; */
2809 len = c_strlen (src, 1);
2810 if (len && TREE_CODE (len) == INTEGER_CST)
2811 {
2812 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2813 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2814 }
2815
2816 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2817
2818 /* If SRC is not a pointer type, don't do this operation inline. */
2819 if (align == 0)
2820 return NULL_RTX;
2821
2822 /* Bail out if we can't compute strlen in the right mode. */
2823 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2824 {
2825 icode = optab_handler (strlen_optab, insn_mode);
2826 if (icode != CODE_FOR_nothing)
2827 break;
2828 }
2829 if (insn_mode == VOIDmode)
2830 return NULL_RTX;
2831
2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg = gen_reg_rtx (Pmode);
2836
2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
2839 before_strlen = get_last_insn ();
2840
2841 create_output_operand (&ops[0], target, insn_mode);
2842 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2843 create_integer_operand (&ops[2], 0);
2844 create_integer_operand (&ops[3], align);
2845 if (!maybe_expand_insn (icode, 4, ops))
2846 return NULL_RTX;
2847
2848 /* Now that we are assured of success, expand the source. */
2849 start_sequence ();
2850 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2851 if (pat != src_reg)
2852 {
2853 #ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat) != Pmode)
2855 pat = convert_to_mode (Pmode, pat,
2856 POINTERS_EXTEND_UNSIGNED);
2857 #endif
2858 emit_move_insn (src_reg, pat);
2859 }
2860 pat = get_insns ();
2861 end_sequence ();
2862
2863 if (before_strlen)
2864 emit_insn_after (pat, before_strlen);
2865 else
2866 emit_insn_before (pat, get_insns ());
2867
2868 /* Return the value in the proper mode for this function. */
2869 if (GET_MODE (ops[0].value) == target_mode)
2870 target = ops[0].value;
2871 else if (target != 0)
2872 convert_move (target, ops[0].value, 0);
2873 else
2874 target = convert_to_mode (target_mode, ops[0].value, 0);
2875
2876 return target;
2877 }
2878 }
2879
2880 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2882 constant. */
2883
2884 static rtx
2885 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2886 machine_mode mode)
2887 {
2888 const char *str = (const char *) data;
2889
2890 gcc_assert (offset >= 0
2891 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2892 <= strlen (str) + 1));
2893
2894 return c_readstr (str + offset, mode);
2895 }
2896
2897 /* LEN specify length of the block of memcpy/memset operation.
2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
2901
2902 static void
2903 determine_block_size (tree len, rtx len_rtx,
2904 unsigned HOST_WIDE_INT *min_size,
2905 unsigned HOST_WIDE_INT *max_size,
2906 unsigned HOST_WIDE_INT *probable_max_size)
2907 {
2908 if (CONST_INT_P (len_rtx))
2909 {
2910 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2911 return;
2912 }
2913 else
2914 {
2915 wide_int min, max;
2916 enum value_range_type range_type = VR_UNDEFINED;
2917
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2920 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2921 else
2922 *min_size = 0;
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2924 *probable_max_size = *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2926 else
2927 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2928
2929 if (TREE_CODE (len) == SSA_NAME)
2930 range_type = get_range_info (len, &min, &max);
2931 if (range_type == VR_RANGE)
2932 {
2933 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2934 *min_size = min.to_uhwi ();
2935 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2936 *probable_max_size = *max_size = max.to_uhwi ();
2937 }
2938 else if (range_type == VR_ANTI_RANGE)
2939 {
2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
2941 if (min == 0)
2942 {
2943 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2944 *min_size = max.to_uhwi () + 1;
2945 }
2946 /* Code like
2947
2948 int n;
2949 if (n < 100)
2950 memcpy (a, b, n)
2951
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2954 */
2955 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2956 *probable_max_size = min.to_uhwi () - 1;
2957 }
2958 }
2959 gcc_checking_assert (*max_size <=
2960 (unsigned HOST_WIDE_INT)
2961 GET_MODE_MASK (GET_MODE (len_rtx)));
2962 }
2963
2964 /* Try to verify that the sizes and lengths of the arguments to a string
2965 manipulation function given by EXP are within valid bounds and that
2966 the operation does not lead to buffer overflow. Arguments other than
2967 EXP may be null. When non-null, the arguments have the following
2968 meaning:
2969 SIZE is the user-supplied size argument to the function (such as in
2970 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2971 number of bytes to write.
2972 MAXLEN is the user-supplied bound on the length of the source sequence
2973 (such as in strncat(d, s, N). It specifies the upper limit on the number
2974 of bytes to write.
2975 SRC is the source string (such as in strcpy(d, s)) when the expression
2976 EXP is a string function call (as opposed to a memory call like memcpy).
2977 As an exception, SRC can also be an integer denoting the precomputed
2978 size of the source string or object (for functions like memcpy).
2979 OBJSIZE is the size of the destination object specified by the last
2980 argument to the _chk builtins, typically resulting from the expansion
2981 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2982 OBJSIZE).
2983
2984 When SIZE is null LEN is checked to verify that it doesn't exceed
2985 SIZE_MAX.
2986
2987 If the call is successfully verified as safe from buffer overflow
2988 the function returns true, otherwise false.. */
2989
2990 static bool
2991 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
2992 {
2993 /* The size of the largest object is half the address space, or
2994 SSIZE_MAX. (This is way too permissive.) */
2995 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
2996
2997 tree slen = NULL_TREE;
2998
2999 tree range[2] = { NULL_TREE, NULL_TREE };
3000
3001 /* Set to true when the exact number of bytes written by a string
3002 function like strcpy is not known and the only thing that is
3003 known is that it must be at least one (for the terminating nul). */
3004 bool at_least_one = false;
3005 if (src)
3006 {
3007 /* SRC is normally a pointer to string but as a special case
3008 it can be an integer denoting the length of a string. */
3009 if (POINTER_TYPE_P (TREE_TYPE (src)))
3010 {
3011 /* Try to determine the range of lengths the source string
3012 refers to. If it can be determined and is less than
3013 the upper bound given by MAXLEN add one to it for
3014 the terminating nul. Otherwise, set it to one for
3015 the same reason, or to MAXLEN as appropriate. */
3016 get_range_strlen (src, range);
3017 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3018 {
3019 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3020 range[0] = range[1] = maxlen;
3021 else
3022 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3023 range[0], size_one_node);
3024
3025 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3026 range[1] = maxlen;
3027 else if (!integer_all_onesp (range[1]))
3028 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3029 range[1], size_one_node);
3030
3031 slen = range[0];
3032 }
3033 else
3034 {
3035 at_least_one = true;
3036 slen = size_one_node;
3037 }
3038 }
3039 else
3040 slen = src;
3041 }
3042
3043 if (!size && !maxlen)
3044 {
3045 /* When the only available piece of data is the object size
3046 there is nothing to do. */
3047 if (!slen)
3048 return true;
3049
3050 /* Otherwise, when the length of the source sequence is known
3051 (as with with strlen), set SIZE to it. */
3052 if (!range[0])
3053 size = slen;
3054 }
3055
3056 if (!objsize)
3057 objsize = maxobjsize;
3058
3059 /* The SIZE is exact if it's non-null, constant, and in range of
3060 unsigned HOST_WIDE_INT. */
3061 bool exactsize = size && tree_fits_uhwi_p (size);
3062
3063 if (size)
3064 get_size_range (size, range);
3065
3066 /* First check the number of bytes to be written against the maximum
3067 object size. */
3068 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3069 {
3070 location_t loc = tree_nonartificial_location (exp);
3071 loc = expansion_point_location_if_in_system_header (loc);
3072
3073 if (range[0] == range[1])
3074 warning_at (loc, opt,
3075 "%K%qD specified size %E "
3076 "exceeds maximum object size %E",
3077 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3078 else
3079 warning_at (loc, opt,
3080 "%K%qD specified size between %E and %E "
3081 "exceeds maximum object size %E",
3082 exp, get_callee_fndecl (exp),
3083 range[0], range[1], maxobjsize);
3084 return false;
3085 }
3086
3087 /* Next check the number of bytes to be written against the destination
3088 object size. */
3089 if (range[0] || !exactsize || integer_all_onesp (size))
3090 {
3091 if (range[0]
3092 && ((tree_fits_uhwi_p (objsize)
3093 && tree_int_cst_lt (objsize, range[0]))
3094 || (tree_fits_uhwi_p (size)
3095 && tree_int_cst_lt (size, range[0]))))
3096 {
3097 location_t loc = tree_nonartificial_location (exp);
3098 loc = expansion_point_location_if_in_system_header (loc);
3099
3100 if (size == slen && at_least_one)
3101 {
3102 /* This is a call to strcpy with a destination of 0 size
3103 and a source of unknown length. The call will write
3104 at least one byte past the end of the destination. */
3105 warning_at (loc, opt,
3106 "%K%qD writing %E or more bytes into a region "
3107 "of size %E overflows the destination",
3108 exp, get_callee_fndecl (exp), range[0], objsize);
3109 }
3110 else if (tree_int_cst_equal (range[0], range[1]))
3111 warning_at (loc, opt,
3112 (integer_onep (range[0])
3113 ? G_("%K%qD writing %E byte into a region "
3114 "of size %E overflows the destination")
3115 : G_("%K%qD writing %E bytes into a region "
3116 "of size %E overflows the destination")),
3117 exp, get_callee_fndecl (exp), range[0], objsize);
3118 else if (tree_int_cst_sign_bit (range[1]))
3119 {
3120 /* Avoid printing the upper bound if it's invalid. */
3121 warning_at (loc, opt,
3122 "%K%qD writing %E or more bytes into a region "
3123 "of size %E overflows the destination",
3124 exp, get_callee_fndecl (exp), range[0], objsize);
3125 }
3126 else
3127 warning_at (loc, opt,
3128 "%K%qD writing between %E and %E bytes into "
3129 "a region of size %E overflows the destination",
3130 exp, get_callee_fndecl (exp), range[0], range[1],
3131 objsize);
3132
3133 /* Return error when an overflow has been detected. */
3134 return false;
3135 }
3136 }
3137
3138 /* Check the maximum length of the source sequence against the size
3139 of the destination object if known, or against the maximum size
3140 of an object. */
3141 if (maxlen)
3142 {
3143 get_size_range (maxlen, range);
3144
3145 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3146 {
3147 location_t loc = tree_nonartificial_location (exp);
3148 loc = expansion_point_location_if_in_system_header (loc);
3149
3150 if (tree_int_cst_lt (maxobjsize, range[0]))
3151 {
3152 /* Warn about crazy big sizes first since that's more
3153 likely to be meaningful than saying that the bound
3154 is greater than the object size if both are big. */
3155 if (range[0] == range[1])
3156 warning_at (loc, opt,
3157 "%K%qD specified bound %E "
3158 "exceeds maximum object size %E",
3159 exp, get_callee_fndecl (exp),
3160 range[0], maxobjsize);
3161 else
3162 warning_at (loc, opt,
3163 "%K%qD specified bound between %E and %E "
3164 "exceeds maximum object size %E",
3165 exp, get_callee_fndecl (exp),
3166 range[0], range[1], maxobjsize);
3167
3168 return false;
3169 }
3170
3171 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3172 {
3173 if (tree_int_cst_equal (range[0], range[1]))
3174 warning_at (loc, opt,
3175 "%K%qD specified bound %E "
3176 "exceeds destination size %E",
3177 exp, get_callee_fndecl (exp),
3178 range[0], objsize);
3179 else
3180 warning_at (loc, opt,
3181 "%K%qD specified bound between %E and %E "
3182 "exceeds destination size %E",
3183 exp, get_callee_fndecl (exp),
3184 range[0], range[1], objsize);
3185 return false;
3186 }
3187 }
3188 }
3189
3190 if (slen
3191 && slen == src
3192 && size && range[0]
3193 && tree_int_cst_lt (slen, range[0]))
3194 {
3195 location_t loc = tree_nonartificial_location (exp);
3196
3197 if (tree_int_cst_equal (range[0], range[1]))
3198 warning_at (loc, opt,
3199 (tree_int_cst_equal (range[0], integer_one_node)
3200 ? G_("%K%qD reading %E byte from a region of size %E")
3201 : G_("%K%qD reading %E bytes from a region of size %E")),
3202 exp, get_callee_fndecl (exp), range[0], slen);
3203 else if (tree_int_cst_sign_bit (range[1]))
3204 {
3205 /* Avoid printing the upper bound if it's invalid. */
3206 warning_at (loc, opt,
3207 "%K%qD reading %E or more bytes from a region "
3208 "of size %E",
3209 exp, get_callee_fndecl (exp), range[0], slen);
3210 }
3211 else
3212 warning_at (loc, opt,
3213 "%K%qD reading between %E and %E bytes from a region "
3214 "of size %E",
3215 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3216 return false;
3217 }
3218
3219 return true;
3220 }
3221
3222 /* Helper to compute the size of the object referenced by the DEST
3223 expression which must of of pointer type, using Object Size type
3224 OSTYPE (only the least significant 2 bits are used). Return
3225 the size of the object if successful or NULL when the size cannot
3226 be determined. */
3227
3228 static inline tree
3229 compute_objsize (tree dest, int ostype)
3230 {
3231 unsigned HOST_WIDE_INT size;
3232 if (compute_builtin_object_size (dest, ostype & 3, &size))
3233 return build_int_cst (sizetype, size);
3234
3235 return NULL_TREE;
3236 }
3237
3238 /* Helper to determine and check the sizes of the source and the destination
3239 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3240 call expression, DEST is the destination argument, SRC is the source
3241 argument or null, and LEN is the number of bytes. Use Object Size type-0
3242 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3243 (no overflow or invalid sizes), false otherwise. */
3244
3245 static bool
3246 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3247 {
3248 if (!warn_stringop_overflow)
3249 return true;
3250
3251 /* For functions like memset and memcpy that operate on raw memory
3252 try to determine the size of the largest source and destination
3253 object using type-0 Object Size regardless of the object size
3254 type specified by the option. */
3255 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3256 tree dstsize = compute_objsize (dest, 0);
3257
3258 return check_sizes (OPT_Wstringop_overflow_, exp,
3259 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3260 }
3261
3262 /* Validate memchr arguments without performing any expansion.
3263 Return NULL_RTX. */
3264
3265 static rtx
3266 expand_builtin_memchr (tree exp, rtx)
3267 {
3268 if (!validate_arglist (exp,
3269 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3270 return NULL_RTX;
3271
3272 tree arg1 = CALL_EXPR_ARG (exp, 0);
3273 tree len = CALL_EXPR_ARG (exp, 2);
3274
3275 /* Diagnose calls where the specified length exceeds the size
3276 of the object. */
3277 if (warn_stringop_overflow)
3278 {
3279 tree size = compute_objsize (arg1, 0);
3280 check_sizes (OPT_Wstringop_overflow_,
3281 exp, len, /*maxlen=*/NULL_TREE,
3282 size, /*objsize=*/NULL_TREE);
3283 }
3284
3285 return NULL_RTX;
3286 }
3287
3288 /* Expand a call EXP to the memcpy builtin.
3289 Return NULL_RTX if we failed, the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3292
3293 static rtx
3294 expand_builtin_memcpy (tree exp, rtx target)
3295 {
3296 if (!validate_arglist (exp,
3297 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3298 return NULL_RTX;
3299
3300 tree dest = CALL_EXPR_ARG (exp, 0);
3301 tree src = CALL_EXPR_ARG (exp, 1);
3302 tree len = CALL_EXPR_ARG (exp, 2);
3303
3304 check_memop_sizes (exp, dest, src, len);
3305
3306 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3307 /*endp=*/ 0);
3308 }
3309
3310 /* Check a call EXP to the memmove built-in for validity.
3311 Return NULL_RTX on both success and failure. */
3312
3313 static rtx
3314 expand_builtin_memmove (tree exp, rtx)
3315 {
3316 if (!validate_arglist (exp,
3317 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3318 return NULL_RTX;
3319
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 1);
3322 tree len = CALL_EXPR_ARG (exp, 2);
3323
3324 check_memop_sizes (exp, dest, src, len);
3325
3326 return NULL_RTX;
3327 }
3328
3329 /* Expand an instrumented call EXP to the memcpy builtin.
3330 Return NULL_RTX if we failed, the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). */
3333
3334 static rtx
3335 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3336 {
3337 if (!validate_arglist (exp,
3338 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 INTEGER_TYPE, VOID_TYPE))
3341 return NULL_RTX;
3342 else
3343 {
3344 tree dest = CALL_EXPR_ARG (exp, 0);
3345 tree src = CALL_EXPR_ARG (exp, 2);
3346 tree len = CALL_EXPR_ARG (exp, 4);
3347 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3348 /*end_p=*/ 0);
3349
3350 /* Return src bounds with the result. */
3351 if (res)
3352 {
3353 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3354 expand_normal (CALL_EXPR_ARG (exp, 1)));
3355 res = chkp_join_splitted_slot (res, bnd);
3356 }
3357 return res;
3358 }
3359 }
3360
3361 /* Expand a call EXP to the mempcpy builtin.
3362 Return NULL_RTX if we failed; the caller should emit a normal call,
3363 otherwise try to get the result in TARGET, if convenient (and in
3364 mode MODE if that's convenient). If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 stpcpy. */
3368
3369 static rtx
3370 expand_builtin_mempcpy (tree exp, rtx target)
3371 {
3372 if (!validate_arglist (exp,
3373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3374 return NULL_RTX;
3375
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 tree len = CALL_EXPR_ARG (exp, 2);
3379
3380 /* Avoid expanding mempcpy into memcpy when the call is determined
3381 to overflow the buffer. This also prevents the same overflow
3382 from being diagnosed again when expanding memcpy. */
3383 if (!check_memop_sizes (exp, dest, src, len))
3384 return NULL_RTX;
3385
3386 return expand_builtin_mempcpy_args (dest, src, len,
3387 target, exp, /*endp=*/ 1);
3388 }
3389
3390 /* Expand an instrumented call EXP to the mempcpy builtin.
3391 Return NULL_RTX if we failed, the caller should emit a normal call,
3392 otherwise try to get the result in TARGET, if convenient (and in
3393 mode MODE if that's convenient). */
3394
3395 static rtx
3396 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3397 {
3398 if (!validate_arglist (exp,
3399 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 INTEGER_TYPE, VOID_TYPE))
3402 return NULL_RTX;
3403 else
3404 {
3405 tree dest = CALL_EXPR_ARG (exp, 0);
3406 tree src = CALL_EXPR_ARG (exp, 2);
3407 tree len = CALL_EXPR_ARG (exp, 4);
3408 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3409 exp, 1);
3410
3411 /* Return src bounds with the result. */
3412 if (res)
3413 {
3414 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3415 expand_normal (CALL_EXPR_ARG (exp, 1)));
3416 res = chkp_join_splitted_slot (res, bnd);
3417 }
3418 return res;
3419 }
3420 }
3421
3422 /* Helper function to do the actual work for expand of memory copy family
3423 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3424 of memory from SRC to DEST and assign to TARGET if convenient.
3425 If ENDP is 0 return the
3426 destination pointer, if ENDP is 1 return the end pointer ala
3427 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3428 stpcpy. */
3429
3430 static rtx
3431 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3432 rtx target, tree exp, int endp)
3433 {
3434 const char *src_str;
3435 unsigned int src_align = get_pointer_alignment (src);
3436 unsigned int dest_align = get_pointer_alignment (dest);
3437 rtx dest_mem, src_mem, dest_addr, len_rtx;
3438 HOST_WIDE_INT expected_size = -1;
3439 unsigned int expected_align = 0;
3440 unsigned HOST_WIDE_INT min_size;
3441 unsigned HOST_WIDE_INT max_size;
3442 unsigned HOST_WIDE_INT probable_max_size;
3443
3444 /* If DEST is not a pointer type, call the normal function. */
3445 if (dest_align == 0)
3446 return NULL_RTX;
3447
3448 /* If either SRC is not a pointer type, don't do this
3449 operation in-line. */
3450 if (src_align == 0)
3451 return NULL_RTX;
3452
3453 if (currently_expanding_gimple_stmt)
3454 stringop_block_profile (currently_expanding_gimple_stmt,
3455 &expected_align, &expected_size);
3456
3457 if (expected_align < dest_align)
3458 expected_align = dest_align;
3459 dest_mem = get_memory_rtx (dest, len);
3460 set_mem_align (dest_mem, dest_align);
3461 len_rtx = expand_normal (len);
3462 determine_block_size (len, len_rtx, &min_size, &max_size,
3463 &probable_max_size);
3464 src_str = c_getstr (src);
3465
3466 /* If SRC is a string constant and block move would be done
3467 by pieces, we can avoid loading the string from memory
3468 and only stored the computed constants. */
3469 if (src_str
3470 && CONST_INT_P (len_rtx)
3471 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3472 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3473 CONST_CAST (char *, src_str),
3474 dest_align, false))
3475 {
3476 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3477 builtin_memcpy_read_str,
3478 CONST_CAST (char *, src_str),
3479 dest_align, false, endp);
3480 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3481 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3482 return dest_mem;
3483 }
3484
3485 src_mem = get_memory_rtx (src, len);
3486 set_mem_align (src_mem, src_align);
3487
3488 /* Copy word part most expediently. */
3489 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3490 CALL_EXPR_TAILCALL (exp)
3491 && (endp == 0 || target == const0_rtx)
3492 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3493 expected_align, expected_size,
3494 min_size, max_size, probable_max_size);
3495
3496 if (dest_addr == 0)
3497 {
3498 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3499 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3500 }
3501
3502 if (endp && target != const0_rtx)
3503 {
3504 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3505 /* stpcpy pointer to last byte. */
3506 if (endp == 2)
3507 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3508 }
3509
3510 return dest_addr;
3511 }
3512
3513 static rtx
3514 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3515 rtx target, tree orig_exp, int endp)
3516 {
3517 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3518 endp);
3519 }
3520
3521 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3522 we failed, the caller should emit a normal call, otherwise try to
3523 get the result in TARGET, if convenient. If ENDP is 0 return the
3524 destination pointer, if ENDP is 1 return the end pointer ala
3525 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3526 stpcpy. */
3527
3528 static rtx
3529 expand_movstr (tree dest, tree src, rtx target, int endp)
3530 {
3531 struct expand_operand ops[3];
3532 rtx dest_mem;
3533 rtx src_mem;
3534
3535 if (!targetm.have_movstr ())
3536 return NULL_RTX;
3537
3538 dest_mem = get_memory_rtx (dest, NULL);
3539 src_mem = get_memory_rtx (src, NULL);
3540 if (!endp)
3541 {
3542 target = force_reg (Pmode, XEXP (dest_mem, 0));
3543 dest_mem = replace_equiv_address (dest_mem, target);
3544 }
3545
3546 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3547 create_fixed_operand (&ops[1], dest_mem);
3548 create_fixed_operand (&ops[2], src_mem);
3549 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3550 return NULL_RTX;
3551
3552 if (endp && target != const0_rtx)
3553 {
3554 target = ops[0].value;
3555 /* movstr is supposed to set end to the address of the NUL
3556 terminator. If the caller requested a mempcpy-like return value,
3557 adjust it. */
3558 if (endp == 1)
3559 {
3560 rtx tem = plus_constant (GET_MODE (target),
3561 gen_lowpart (GET_MODE (target), target), 1);
3562 emit_move_insn (target, force_operand (tem, NULL_RTX));
3563 }
3564 }
3565 return target;
3566 }
3567
3568 /* Do some very basic size validation of a call to the strcpy builtin
3569 given by EXP. Return NULL_RTX to have the built-in expand to a call
3570 to the library function. */
3571
3572 static rtx
3573 expand_builtin_strcat (tree exp, rtx)
3574 {
3575 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3576 || !warn_stringop_overflow)
3577 return NULL_RTX;
3578
3579 tree dest = CALL_EXPR_ARG (exp, 0);
3580 tree src = CALL_EXPR_ARG (exp, 1);
3581
3582 /* There is no way here to determine the length of the string in
3583 the destination to which the SRC string is being appended so
3584 just diagnose cases when the souce string is longer than
3585 the destination object. */
3586
3587 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3588
3589 check_sizes (OPT_Wstringop_overflow_,
3590 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3591
3592 return NULL_RTX;
3593 }
3594
3595 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3596 NULL_RTX if we failed the caller should emit a normal call, otherwise
3597 try to get the result in TARGET, if convenient (and in mode MODE if that's
3598 convenient). */
3599
3600 static rtx
3601 expand_builtin_strcpy (tree exp, rtx target)
3602 {
3603 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3604 return NULL_RTX;
3605
3606 tree dest = CALL_EXPR_ARG (exp, 0);
3607 tree src = CALL_EXPR_ARG (exp, 1);
3608
3609 if (warn_stringop_overflow)
3610 {
3611 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3612 check_sizes (OPT_Wstringop_overflow_,
3613 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3614 }
3615
3616 return expand_builtin_strcpy_args (dest, src, target);
3617 }
3618
3619 /* Helper function to do the actual work for expand_builtin_strcpy. The
3620 arguments to the builtin_strcpy call DEST and SRC are broken out
3621 so that this can also be called without constructing an actual CALL_EXPR.
3622 The other arguments and return value are the same as for
3623 expand_builtin_strcpy. */
3624
3625 static rtx
3626 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3627 {
3628 return expand_movstr (dest, src, target, /*endp=*/0);
3629 }
3630
3631 /* Expand a call EXP to the stpcpy builtin.
3632 Return NULL_RTX if we failed the caller should emit a normal call,
3633 otherwise try to get the result in TARGET, if convenient (and in
3634 mode MODE if that's convenient). */
3635
3636 static rtx
3637 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3638 {
3639 tree dst, src;
3640 location_t loc = EXPR_LOCATION (exp);
3641
3642 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3643 return NULL_RTX;
3644
3645 dst = CALL_EXPR_ARG (exp, 0);
3646 src = CALL_EXPR_ARG (exp, 1);
3647
3648 if (warn_stringop_overflow)
3649 {
3650 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3651 check_sizes (OPT_Wstringop_overflow_,
3652 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3653 }
3654
3655 /* If return value is ignored, transform stpcpy into strcpy. */
3656 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3657 {
3658 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3659 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3660 return expand_expr (result, target, mode, EXPAND_NORMAL);
3661 }
3662 else
3663 {
3664 tree len, lenp1;
3665 rtx ret;
3666
3667 /* Ensure we get an actual string whose length can be evaluated at
3668 compile-time, not an expression containing a string. This is
3669 because the latter will potentially produce pessimized code
3670 when used to produce the return value. */
3671 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3672 return expand_movstr (dst, src, target, /*endp=*/2);
3673
3674 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3675 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3676 target, exp, /*endp=*/2);
3677
3678 if (ret)
3679 return ret;
3680
3681 if (TREE_CODE (len) == INTEGER_CST)
3682 {
3683 rtx len_rtx = expand_normal (len);
3684
3685 if (CONST_INT_P (len_rtx))
3686 {
3687 ret = expand_builtin_strcpy_args (dst, src, target);
3688
3689 if (ret)
3690 {
3691 if (! target)
3692 {
3693 if (mode != VOIDmode)
3694 target = gen_reg_rtx (mode);
3695 else
3696 target = gen_reg_rtx (GET_MODE (ret));
3697 }
3698 if (GET_MODE (target) != GET_MODE (ret))
3699 ret = gen_lowpart (GET_MODE (target), ret);
3700
3701 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3702 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3703 gcc_assert (ret);
3704
3705 return target;
3706 }
3707 }
3708 }
3709
3710 return expand_movstr (dst, src, target, /*endp=*/2);
3711 }
3712 }
3713
3714 /* Check a call EXP to the stpncpy built-in for validity.
3715 Return NULL_RTX on both success and failure. */
3716
3717 static rtx
3718 expand_builtin_stpncpy (tree exp, rtx)
3719 {
3720 if (!validate_arglist (exp,
3721 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3722 || !warn_stringop_overflow)
3723 return NULL_RTX;
3724
3725 /* The source and destination of the call. */
3726 tree dest = CALL_EXPR_ARG (exp, 0);
3727 tree src = CALL_EXPR_ARG (exp, 1);
3728
3729 /* The exact number of bytes to write (not the maximum). */
3730 tree len = CALL_EXPR_ARG (exp, 2);
3731
3732 /* The size of the destination object. */
3733 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3734
3735 check_sizes (OPT_Wstringop_overflow_,
3736 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3737
3738 return NULL_RTX;
3739 }
3740
3741 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3742 bytes from constant string DATA + OFFSET and return it as target
3743 constant. */
3744
3745 rtx
3746 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3747 machine_mode mode)
3748 {
3749 const char *str = (const char *) data;
3750
3751 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3752 return const0_rtx;
3753
3754 return c_readstr (str + offset, mode);
3755 }
3756
3757 /* Helper to check the sizes of sequences and the destination of calls
3758 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3759 success (no overflow or invalid sizes), false otherwise. */
3760
3761 static bool
3762 check_strncat_sizes (tree exp, tree objsize)
3763 {
3764 tree dest = CALL_EXPR_ARG (exp, 0);
3765 tree src = CALL_EXPR_ARG (exp, 1);
3766 tree maxlen = CALL_EXPR_ARG (exp, 2);
3767
3768 /* Try to determine the range of lengths that the source expression
3769 refers to. */
3770 tree lenrange[2];
3771 get_range_strlen (src, lenrange);
3772
3773 /* Try to verify that the destination is big enough for the shortest
3774 string. */
3775
3776 if (!objsize && warn_stringop_overflow)
3777 {
3778 /* If it hasn't been provided by __strncat_chk, try to determine
3779 the size of the destination object into which the source is
3780 being copied. */
3781 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3782 }
3783
3784 /* Add one for the terminating nul. */
3785 tree srclen = (lenrange[0]
3786 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3787 size_one_node)
3788 : NULL_TREE);
3789
3790 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3791 nul so the specified upper bound should never be equal to (or greater
3792 than) the size of the destination. */
3793 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3794 && tree_int_cst_equal (objsize, maxlen))
3795 {
3796 location_t loc = tree_nonartificial_location (exp);
3797 loc = expansion_point_location_if_in_system_header (loc);
3798
3799 warning_at (loc, OPT_Wstringop_overflow_,
3800 "%K%qD specified bound %E equals destination size",
3801 exp, get_callee_fndecl (exp), maxlen);
3802
3803 return false;
3804 }
3805
3806 if (!srclen
3807 || (maxlen && tree_fits_uhwi_p (maxlen)
3808 && tree_fits_uhwi_p (srclen)
3809 && tree_int_cst_lt (maxlen, srclen)))
3810 srclen = maxlen;
3811
3812 /* The number of bytes to write is LEN but check_sizes will also
3813 check SRCLEN if LEN's value isn't known. */
3814 return check_sizes (OPT_Wstringop_overflow_,
3815 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3816 }
3817
3818 /* Similar to expand_builtin_strcat, do some very basic size validation
3819 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3820 the built-in expand to a call to the library function. */
3821
3822 static rtx
3823 expand_builtin_strncat (tree exp, rtx)
3824 {
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3827 || !warn_stringop_overflow)
3828 return NULL_RTX;
3829
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree src = CALL_EXPR_ARG (exp, 1);
3832 /* The upper bound on the number of bytes to write. */
3833 tree maxlen = CALL_EXPR_ARG (exp, 2);
3834 /* The length of the source sequence. */
3835 tree slen = c_strlen (src, 1);
3836
3837 /* Try to determine the range of lengths that the source expression
3838 refers to. */
3839 tree lenrange[2];
3840 if (slen)
3841 lenrange[0] = lenrange[1] = slen;
3842 else
3843 get_range_strlen (src, lenrange);
3844
3845 /* Try to verify that the destination is big enough for the shortest
3846 string. First try to determine the size of the destination object
3847 into which the source is being copied. */
3848 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3849
3850 /* Add one for the terminating nul. */
3851 tree srclen = (lenrange[0]
3852 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3853 size_one_node)
3854 : NULL_TREE);
3855
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3860 && tree_int_cst_equal (destsize, maxlen))
3861 {
3862 location_t loc = tree_nonartificial_location (exp);
3863 loc = expansion_point_location_if_in_system_header (loc);
3864
3865 warning_at (loc, OPT_Wstringop_overflow_,
3866 "%K%qD specified bound %E equals destination size",
3867 exp, get_callee_fndecl (exp), maxlen);
3868
3869 return NULL_RTX;
3870 }
3871
3872 if (!srclen
3873 || (maxlen && tree_fits_uhwi_p (maxlen)
3874 && tree_fits_uhwi_p (srclen)
3875 && tree_int_cst_lt (maxlen, srclen)))
3876 srclen = maxlen;
3877
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 check_sizes (OPT_Wstringop_overflow_,
3881 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3882
3883 return NULL_RTX;
3884 }
3885
3886 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3887 NULL_RTX if we failed the caller should emit a normal call. */
3888
3889 static rtx
3890 expand_builtin_strncpy (tree exp, rtx target)
3891 {
3892 location_t loc = EXPR_LOCATION (exp);
3893
3894 if (validate_arglist (exp,
3895 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3896 {
3897 tree dest = CALL_EXPR_ARG (exp, 0);
3898 tree src = CALL_EXPR_ARG (exp, 1);
3899 /* The number of bytes to write (not the maximum). */
3900 tree len = CALL_EXPR_ARG (exp, 2);
3901 /* The length of the source sequence. */
3902 tree slen = c_strlen (src, 1);
3903
3904 if (warn_stringop_overflow)
3905 {
3906 tree destsize = compute_objsize (dest,
3907 warn_stringop_overflow - 1);
3908
3909 /* The number of bytes to write is LEN but check_sizes will also
3910 check SLEN if LEN's value isn't known. */
3911 check_sizes (OPT_Wstringop_overflow_,
3912 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3913 }
3914
3915 /* We must be passed a constant len and src parameter. */
3916 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3917 return NULL_RTX;
3918
3919 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3920
3921 /* We're required to pad with trailing zeros if the requested
3922 len is greater than strlen(s2)+1. In that case try to
3923 use store_by_pieces, if it fails, punt. */
3924 if (tree_int_cst_lt (slen, len))
3925 {
3926 unsigned int dest_align = get_pointer_alignment (dest);
3927 const char *p = c_getstr (src);
3928 rtx dest_mem;
3929
3930 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3931 || !can_store_by_pieces (tree_to_uhwi (len),
3932 builtin_strncpy_read_str,
3933 CONST_CAST (char *, p),
3934 dest_align, false))
3935 return NULL_RTX;
3936
3937 dest_mem = get_memory_rtx (dest, len);
3938 store_by_pieces (dest_mem, tree_to_uhwi (len),
3939 builtin_strncpy_read_str,
3940 CONST_CAST (char *, p), dest_align, false, 0);
3941 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3942 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3943 return dest_mem;
3944 }
3945 }
3946 return NULL_RTX;
3947 }
3948
3949 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3950 bytes from constant string DATA + OFFSET and return it as target
3951 constant. */
3952
3953 rtx
3954 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3955 machine_mode mode)
3956 {
3957 const char *c = (const char *) data;
3958 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3959
3960 memset (p, *c, GET_MODE_SIZE (mode));
3961
3962 return c_readstr (p, mode);
3963 }
3964
3965 /* Callback routine for store_by_pieces. Return the RTL of a register
3966 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3967 char value given in the RTL register data. For example, if mode is
3968 4 bytes wide, return the RTL for 0x01010101*data. */
3969
3970 static rtx
3971 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3972 machine_mode mode)
3973 {
3974 rtx target, coeff;
3975 size_t size;
3976 char *p;
3977
3978 size = GET_MODE_SIZE (mode);
3979 if (size == 1)
3980 return (rtx) data;
3981
3982 p = XALLOCAVEC (char, size);
3983 memset (p, 1, size);
3984 coeff = c_readstr (p, mode);
3985
3986 target = convert_to_mode (mode, (rtx) data, 1);
3987 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3988 return force_reg (mode, target);
3989 }
3990
3991 /* Expand expression EXP, which is a call to the memset builtin. Return
3992 NULL_RTX if we failed the caller should emit a normal call, otherwise
3993 try to get the result in TARGET, if convenient (and in mode MODE if that's
3994 convenient). */
3995
3996 static rtx
3997 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3998 {
3999 if (!validate_arglist (exp,
4000 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4001 return NULL_RTX;
4002
4003 tree dest = CALL_EXPR_ARG (exp, 0);
4004 tree val = CALL_EXPR_ARG (exp, 1);
4005 tree len = CALL_EXPR_ARG (exp, 2);
4006
4007 check_memop_sizes (exp, dest, NULL_TREE, len);
4008
4009 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4010 }
4011
4012 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4013 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4014 try to get the result in TARGET, if convenient (and in mode MODE if that's
4015 convenient). */
4016
4017 static rtx
4018 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4019 {
4020 if (!validate_arglist (exp,
4021 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4022 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4023 return NULL_RTX;
4024 else
4025 {
4026 tree dest = CALL_EXPR_ARG (exp, 0);
4027 tree val = CALL_EXPR_ARG (exp, 2);
4028 tree len = CALL_EXPR_ARG (exp, 3);
4029 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4030
4031 /* Return src bounds with the result. */
4032 if (res)
4033 {
4034 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4035 expand_normal (CALL_EXPR_ARG (exp, 1)));
4036 res = chkp_join_splitted_slot (res, bnd);
4037 }
4038 return res;
4039 }
4040 }
4041
4042 /* Helper function to do the actual work for expand_builtin_memset. The
4043 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4044 so that this can also be called without constructing an actual CALL_EXPR.
4045 The other arguments and return value are the same as for
4046 expand_builtin_memset. */
4047
4048 static rtx
4049 expand_builtin_memset_args (tree dest, tree val, tree len,
4050 rtx target, machine_mode mode, tree orig_exp)
4051 {
4052 tree fndecl, fn;
4053 enum built_in_function fcode;
4054 machine_mode val_mode;
4055 char c;
4056 unsigned int dest_align;
4057 rtx dest_mem, dest_addr, len_rtx;
4058 HOST_WIDE_INT expected_size = -1;
4059 unsigned int expected_align = 0;
4060 unsigned HOST_WIDE_INT min_size;
4061 unsigned HOST_WIDE_INT max_size;
4062 unsigned HOST_WIDE_INT probable_max_size;
4063
4064 dest_align = get_pointer_alignment (dest);
4065
4066 /* If DEST is not a pointer type, don't do this operation in-line. */
4067 if (dest_align == 0)
4068 return NULL_RTX;
4069
4070 if (currently_expanding_gimple_stmt)
4071 stringop_block_profile (currently_expanding_gimple_stmt,
4072 &expected_align, &expected_size);
4073
4074 if (expected_align < dest_align)
4075 expected_align = dest_align;
4076
4077 /* If the LEN parameter is zero, return DEST. */
4078 if (integer_zerop (len))
4079 {
4080 /* Evaluate and ignore VAL in case it has side-effects. */
4081 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4082 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4083 }
4084
4085 /* Stabilize the arguments in case we fail. */
4086 dest = builtin_save_expr (dest);
4087 val = builtin_save_expr (val);
4088 len = builtin_save_expr (len);
4089
4090 len_rtx = expand_normal (len);
4091 determine_block_size (len, len_rtx, &min_size, &max_size,
4092 &probable_max_size);
4093 dest_mem = get_memory_rtx (dest, len);
4094 val_mode = TYPE_MODE (unsigned_char_type_node);
4095
4096 if (TREE_CODE (val) != INTEGER_CST)
4097 {
4098 rtx val_rtx;
4099
4100 val_rtx = expand_normal (val);
4101 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4102
4103 /* Assume that we can memset by pieces if we can store
4104 * the coefficients by pieces (in the required modes).
4105 * We can't pass builtin_memset_gen_str as that emits RTL. */
4106 c = 1;
4107 if (tree_fits_uhwi_p (len)
4108 && can_store_by_pieces (tree_to_uhwi (len),
4109 builtin_memset_read_str, &c, dest_align,
4110 true))
4111 {
4112 val_rtx = force_reg (val_mode, val_rtx);
4113 store_by_pieces (dest_mem, tree_to_uhwi (len),
4114 builtin_memset_gen_str, val_rtx, dest_align,
4115 true, 0);
4116 }
4117 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4118 dest_align, expected_align,
4119 expected_size, min_size, max_size,
4120 probable_max_size))
4121 goto do_libcall;
4122
4123 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4124 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4125 return dest_mem;
4126 }
4127
4128 if (target_char_cast (val, &c))
4129 goto do_libcall;
4130
4131 if (c)
4132 {
4133 if (tree_fits_uhwi_p (len)
4134 && can_store_by_pieces (tree_to_uhwi (len),
4135 builtin_memset_read_str, &c, dest_align,
4136 true))
4137 store_by_pieces (dest_mem, tree_to_uhwi (len),
4138 builtin_memset_read_str, &c, dest_align, true, 0);
4139 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4140 gen_int_mode (c, val_mode),
4141 dest_align, expected_align,
4142 expected_size, min_size, max_size,
4143 probable_max_size))
4144 goto do_libcall;
4145
4146 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4147 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4148 return dest_mem;
4149 }
4150
4151 set_mem_align (dest_mem, dest_align);
4152 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4153 CALL_EXPR_TAILCALL (orig_exp)
4154 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4155 expected_align, expected_size,
4156 min_size, max_size,
4157 probable_max_size);
4158
4159 if (dest_addr == 0)
4160 {
4161 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4162 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4163 }
4164
4165 return dest_addr;
4166
4167 do_libcall:
4168 fndecl = get_callee_fndecl (orig_exp);
4169 fcode = DECL_FUNCTION_CODE (fndecl);
4170 if (fcode == BUILT_IN_MEMSET
4171 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4172 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4173 dest, val, len);
4174 else if (fcode == BUILT_IN_BZERO)
4175 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4176 dest, len);
4177 else
4178 gcc_unreachable ();
4179 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4180 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4181 return expand_call (fn, target, target == const0_rtx);
4182 }
4183
4184 /* Expand expression EXP, which is a call to the bzero builtin. Return
4185 NULL_RTX if we failed the caller should emit a normal call. */
4186
4187 static rtx
4188 expand_builtin_bzero (tree exp)
4189 {
4190 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4191 return NULL_RTX;
4192
4193 tree dest = CALL_EXPR_ARG (exp, 0);
4194 tree size = CALL_EXPR_ARG (exp, 1);
4195
4196 check_memop_sizes (exp, dest, NULL_TREE, size);
4197
4198 /* New argument list transforming bzero(ptr x, int y) to
4199 memset(ptr x, int 0, size_t y). This is done this way
4200 so that if it isn't expanded inline, we fallback to
4201 calling bzero instead of memset. */
4202
4203 location_t loc = EXPR_LOCATION (exp);
4204
4205 return expand_builtin_memset_args (dest, integer_zero_node,
4206 fold_convert_loc (loc,
4207 size_type_node, size),
4208 const0_rtx, VOIDmode, exp);
4209 }
4210
4211 /* Try to expand cmpstr operation ICODE with the given operands.
4212 Return the result rtx on success, otherwise return null. */
4213
4214 static rtx
4215 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4216 HOST_WIDE_INT align)
4217 {
4218 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4219
4220 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4221 target = NULL_RTX;
4222
4223 struct expand_operand ops[4];
4224 create_output_operand (&ops[0], target, insn_mode);
4225 create_fixed_operand (&ops[1], arg1_rtx);
4226 create_fixed_operand (&ops[2], arg2_rtx);
4227 create_integer_operand (&ops[3], align);
4228 if (maybe_expand_insn (icode, 4, ops))
4229 return ops[0].value;
4230 return NULL_RTX;
4231 }
4232
4233 /* Expand expression EXP, which is a call to the memcmp built-in function.
4234 Return NULL_RTX if we failed and the caller should emit a normal call,
4235 otherwise try to get the result in TARGET, if convenient.
4236 RESULT_EQ is true if we can relax the returned value to be either zero
4237 or nonzero, without caring about the sign. */
4238
4239 static rtx
4240 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4241 {
4242 if (!validate_arglist (exp,
4243 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4244 return NULL_RTX;
4245
4246 tree arg1 = CALL_EXPR_ARG (exp, 0);
4247 tree arg2 = CALL_EXPR_ARG (exp, 1);
4248 tree len = CALL_EXPR_ARG (exp, 2);
4249
4250 /* Diagnose calls where the specified length exceeds the size of either
4251 object. */
4252 if (warn_stringop_overflow)
4253 {
4254 tree size = compute_objsize (arg1, 0);
4255 if (check_sizes (OPT_Wstringop_overflow_,
4256 exp, len, /*maxlen=*/NULL_TREE,
4257 size, /*objsize=*/NULL_TREE))
4258 {
4259 size = compute_objsize (arg2, 0);
4260 check_sizes (OPT_Wstringop_overflow_,
4261 exp, len, /*maxlen=*/NULL_TREE,
4262 size, /*objsize=*/NULL_TREE);
4263 }
4264 }
4265
4266 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4267 location_t loc = EXPR_LOCATION (exp);
4268
4269 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4270 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4271
4272 /* If we don't have POINTER_TYPE, call the function. */
4273 if (arg1_align == 0 || arg2_align == 0)
4274 return NULL_RTX;
4275
4276 rtx arg1_rtx = get_memory_rtx (arg1, len);
4277 rtx arg2_rtx = get_memory_rtx (arg2, len);
4278 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4279
4280 /* Set MEM_SIZE as appropriate. */
4281 if (CONST_INT_P (len_rtx))
4282 {
4283 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4284 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4285 }
4286
4287 by_pieces_constfn constfn = NULL;
4288
4289 const char *src_str = c_getstr (arg2);
4290 if (result_eq && src_str == NULL)
4291 {
4292 src_str = c_getstr (arg1);
4293 if (src_str != NULL)
4294 std::swap (arg1_rtx, arg2_rtx);
4295 }
4296
4297 /* If SRC is a string constant and block move would be done
4298 by pieces, we can avoid loading the string from memory
4299 and only stored the computed constants. */
4300 if (src_str
4301 && CONST_INT_P (len_rtx)
4302 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4303 constfn = builtin_memcpy_read_str;
4304
4305 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4306 TREE_TYPE (len), target,
4307 result_eq, constfn,
4308 CONST_CAST (char *, src_str));
4309
4310 if (result)
4311 {
4312 /* Return the value in the proper mode for this function. */
4313 if (GET_MODE (result) == mode)
4314 return result;
4315
4316 if (target != 0)
4317 {
4318 convert_move (target, result, 0);
4319 return target;
4320 }
4321
4322 return convert_to_mode (mode, result, 0);
4323 }
4324
4325 return NULL_RTX;
4326 }
4327
4328 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4329 if we failed the caller should emit a normal call, otherwise try to get
4330 the result in TARGET, if convenient. */
4331
4332 static rtx
4333 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4334 {
4335 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4336 return NULL_RTX;
4337
4338 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4339 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4340 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4341 {
4342 rtx arg1_rtx, arg2_rtx;
4343 tree fndecl, fn;
4344 tree arg1 = CALL_EXPR_ARG (exp, 0);
4345 tree arg2 = CALL_EXPR_ARG (exp, 1);
4346 rtx result = NULL_RTX;
4347
4348 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4349 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4350
4351 /* If we don't have POINTER_TYPE, call the function. */
4352 if (arg1_align == 0 || arg2_align == 0)
4353 return NULL_RTX;
4354
4355 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4356 arg1 = builtin_save_expr (arg1);
4357 arg2 = builtin_save_expr (arg2);
4358
4359 arg1_rtx = get_memory_rtx (arg1, NULL);
4360 arg2_rtx = get_memory_rtx (arg2, NULL);
4361
4362 /* Try to call cmpstrsi. */
4363 if (cmpstr_icode != CODE_FOR_nothing)
4364 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4365 MIN (arg1_align, arg2_align));
4366
4367 /* Try to determine at least one length and call cmpstrnsi. */
4368 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4369 {
4370 tree len;
4371 rtx arg3_rtx;
4372
4373 tree len1 = c_strlen (arg1, 1);
4374 tree len2 = c_strlen (arg2, 1);
4375
4376 if (len1)
4377 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4378 if (len2)
4379 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4380
4381 /* If we don't have a constant length for the first, use the length
4382 of the second, if we know it. We don't require a constant for
4383 this case; some cost analysis could be done if both are available
4384 but neither is constant. For now, assume they're equally cheap,
4385 unless one has side effects. If both strings have constant lengths,
4386 use the smaller. */
4387
4388 if (!len1)
4389 len = len2;
4390 else if (!len2)
4391 len = len1;
4392 else if (TREE_SIDE_EFFECTS (len1))
4393 len = len2;
4394 else if (TREE_SIDE_EFFECTS (len2))
4395 len = len1;
4396 else if (TREE_CODE (len1) != INTEGER_CST)
4397 len = len2;
4398 else if (TREE_CODE (len2) != INTEGER_CST)
4399 len = len1;
4400 else if (tree_int_cst_lt (len1, len2))
4401 len = len1;
4402 else
4403 len = len2;
4404
4405 /* If both arguments have side effects, we cannot optimize. */
4406 if (len && !TREE_SIDE_EFFECTS (len))
4407 {
4408 arg3_rtx = expand_normal (len);
4409 result = expand_cmpstrn_or_cmpmem
4410 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4411 arg3_rtx, MIN (arg1_align, arg2_align));
4412 }
4413 }
4414
4415 if (result)
4416 {
4417 /* Return the value in the proper mode for this function. */
4418 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4419 if (GET_MODE (result) == mode)
4420 return result;
4421 if (target == 0)
4422 return convert_to_mode (mode, result, 0);
4423 convert_move (target, result, 0);
4424 return target;
4425 }
4426
4427 /* Expand the library call ourselves using a stabilized argument
4428 list to avoid re-evaluating the function's arguments twice. */
4429 fndecl = get_callee_fndecl (exp);
4430 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4431 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4432 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4433 return expand_call (fn, target, target == const0_rtx);
4434 }
4435 return NULL_RTX;
4436 }
4437
4438 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4439 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4440 the result in TARGET, if convenient. */
4441
4442 static rtx
4443 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4444 ATTRIBUTE_UNUSED machine_mode mode)
4445 {
4446 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4447
4448 if (!validate_arglist (exp,
4449 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4450 return NULL_RTX;
4451
4452 /* If c_strlen can determine an expression for one of the string
4453 lengths, and it doesn't have side effects, then emit cmpstrnsi
4454 using length MIN(strlen(string)+1, arg3). */
4455 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4456 if (cmpstrn_icode != CODE_FOR_nothing)
4457 {
4458 tree len, len1, len2, len3;
4459 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4460 rtx result;
4461 tree fndecl, fn;
4462 tree arg1 = CALL_EXPR_ARG (exp, 0);
4463 tree arg2 = CALL_EXPR_ARG (exp, 1);
4464 tree arg3 = CALL_EXPR_ARG (exp, 2);
4465
4466 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4467 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4468
4469 len1 = c_strlen (arg1, 1);
4470 len2 = c_strlen (arg2, 1);
4471
4472 if (len1)
4473 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4474 if (len2)
4475 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4476
4477 len3 = fold_convert_loc (loc, sizetype, arg3);
4478
4479 /* If we don't have a constant length for the first, use the length
4480 of the second, if we know it. If neither string is constant length,
4481 use the given length argument. We don't require a constant for
4482 this case; some cost analysis could be done if both are available
4483 but neither is constant. For now, assume they're equally cheap,
4484 unless one has side effects. If both strings have constant lengths,
4485 use the smaller. */
4486
4487 if (!len1 && !len2)
4488 len = len3;
4489 else if (!len1)
4490 len = len2;
4491 else if (!len2)
4492 len = len1;
4493 else if (TREE_SIDE_EFFECTS (len1))
4494 len = len2;
4495 else if (TREE_SIDE_EFFECTS (len2))
4496 len = len1;
4497 else if (TREE_CODE (len1) != INTEGER_CST)
4498 len = len2;
4499 else if (TREE_CODE (len2) != INTEGER_CST)
4500 len = len1;
4501 else if (tree_int_cst_lt (len1, len2))
4502 len = len1;
4503 else
4504 len = len2;
4505
4506 /* If we are not using the given length, we must incorporate it here.
4507 The actual new length parameter will be MIN(len,arg3) in this case. */
4508 if (len != len3)
4509 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4510 arg1_rtx = get_memory_rtx (arg1, len);
4511 arg2_rtx = get_memory_rtx (arg2, len);
4512 arg3_rtx = expand_normal (len);
4513 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4514 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4515 MIN (arg1_align, arg2_align));
4516 if (result)
4517 {
4518 /* Return the value in the proper mode for this function. */
4519 mode = TYPE_MODE (TREE_TYPE (exp));
4520 if (GET_MODE (result) == mode)
4521 return result;
4522 if (target == 0)
4523 return convert_to_mode (mode, result, 0);
4524 convert_move (target, result, 0);
4525 return target;
4526 }
4527
4528 /* Expand the library call ourselves using a stabilized argument
4529 list to avoid re-evaluating the function's arguments twice. */
4530 fndecl = get_callee_fndecl (exp);
4531 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4532 arg1, arg2, len);
4533 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4534 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4535 return expand_call (fn, target, target == const0_rtx);
4536 }
4537 return NULL_RTX;
4538 }
4539
4540 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4541 if that's convenient. */
4542
4543 rtx
4544 expand_builtin_saveregs (void)
4545 {
4546 rtx val;
4547 rtx_insn *seq;
4548
4549 /* Don't do __builtin_saveregs more than once in a function.
4550 Save the result of the first call and reuse it. */
4551 if (saveregs_value != 0)
4552 return saveregs_value;
4553
4554 /* When this function is called, it means that registers must be
4555 saved on entry to this function. So we migrate the call to the
4556 first insn of this function. */
4557
4558 start_sequence ();
4559
4560 /* Do whatever the machine needs done in this case. */
4561 val = targetm.calls.expand_builtin_saveregs ();
4562
4563 seq = get_insns ();
4564 end_sequence ();
4565
4566 saveregs_value = val;
4567
4568 /* Put the insns after the NOTE that starts the function. If this
4569 is inside a start_sequence, make the outer-level insn chain current, so
4570 the code is placed at the start of the function. */
4571 push_topmost_sequence ();
4572 emit_insn_after (seq, entry_of_function ());
4573 pop_topmost_sequence ();
4574
4575 return val;
4576 }
4577
4578 /* Expand a call to __builtin_next_arg. */
4579
4580 static rtx
4581 expand_builtin_next_arg (void)
4582 {
4583 /* Checking arguments is already done in fold_builtin_next_arg
4584 that must be called before this function. */
4585 return expand_binop (ptr_mode, add_optab,
4586 crtl->args.internal_arg_pointer,
4587 crtl->args.arg_offset_rtx,
4588 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4589 }
4590
4591 /* Make it easier for the backends by protecting the valist argument
4592 from multiple evaluations. */
4593
4594 static tree
4595 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4596 {
4597 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4598
4599 /* The current way of determining the type of valist is completely
4600 bogus. We should have the information on the va builtin instead. */
4601 if (!vatype)
4602 vatype = targetm.fn_abi_va_list (cfun->decl);
4603
4604 if (TREE_CODE (vatype) == ARRAY_TYPE)
4605 {
4606 if (TREE_SIDE_EFFECTS (valist))
4607 valist = save_expr (valist);
4608
4609 /* For this case, the backends will be expecting a pointer to
4610 vatype, but it's possible we've actually been given an array
4611 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4612 So fix it. */
4613 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4614 {
4615 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4616 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4617 }
4618 }
4619 else
4620 {
4621 tree pt = build_pointer_type (vatype);
4622
4623 if (! needs_lvalue)
4624 {
4625 if (! TREE_SIDE_EFFECTS (valist))
4626 return valist;
4627
4628 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4629 TREE_SIDE_EFFECTS (valist) = 1;
4630 }
4631
4632 if (TREE_SIDE_EFFECTS (valist))
4633 valist = save_expr (valist);
4634 valist = fold_build2_loc (loc, MEM_REF,
4635 vatype, valist, build_int_cst (pt, 0));
4636 }
4637
4638 return valist;
4639 }
4640
4641 /* The "standard" definition of va_list is void*. */
4642
4643 tree
4644 std_build_builtin_va_list (void)
4645 {
4646 return ptr_type_node;
4647 }
4648
4649 /* The "standard" abi va_list is va_list_type_node. */
4650
4651 tree
4652 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4653 {
4654 return va_list_type_node;
4655 }
4656
4657 /* The "standard" type of va_list is va_list_type_node. */
4658
4659 tree
4660 std_canonical_va_list_type (tree type)
4661 {
4662 tree wtype, htype;
4663
4664 wtype = va_list_type_node;
4665 htype = type;
4666
4667 if (TREE_CODE (wtype) == ARRAY_TYPE)
4668 {
4669 /* If va_list is an array type, the argument may have decayed
4670 to a pointer type, e.g. by being passed to another function.
4671 In that case, unwrap both types so that we can compare the
4672 underlying records. */
4673 if (TREE_CODE (htype) == ARRAY_TYPE
4674 || POINTER_TYPE_P (htype))
4675 {
4676 wtype = TREE_TYPE (wtype);
4677 htype = TREE_TYPE (htype);
4678 }
4679 }
4680 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4681 return va_list_type_node;
4682
4683 return NULL_TREE;
4684 }
4685
4686 /* The "standard" implementation of va_start: just assign `nextarg' to
4687 the variable. */
4688
4689 void
4690 std_expand_builtin_va_start (tree valist, rtx nextarg)
4691 {
4692 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4693 convert_move (va_r, nextarg, 0);
4694
4695 /* We do not have any valid bounds for the pointer, so
4696 just store zero bounds for it. */
4697 if (chkp_function_instrumented_p (current_function_decl))
4698 chkp_expand_bounds_reset_for_mem (valist,
4699 make_tree (TREE_TYPE (valist),
4700 nextarg));
4701 }
4702
4703 /* Expand EXP, a call to __builtin_va_start. */
4704
4705 static rtx
4706 expand_builtin_va_start (tree exp)
4707 {
4708 rtx nextarg;
4709 tree valist;
4710 location_t loc = EXPR_LOCATION (exp);
4711
4712 if (call_expr_nargs (exp) < 2)
4713 {
4714 error_at (loc, "too few arguments to function %<va_start%>");
4715 return const0_rtx;
4716 }
4717
4718 if (fold_builtin_next_arg (exp, true))
4719 return const0_rtx;
4720
4721 nextarg = expand_builtin_next_arg ();
4722 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4723
4724 if (targetm.expand_builtin_va_start)
4725 targetm.expand_builtin_va_start (valist, nextarg);
4726 else
4727 std_expand_builtin_va_start (valist, nextarg);
4728
4729 return const0_rtx;
4730 }
4731
4732 /* Expand EXP, a call to __builtin_va_end. */
4733
4734 static rtx
4735 expand_builtin_va_end (tree exp)
4736 {
4737 tree valist = CALL_EXPR_ARG (exp, 0);
4738
4739 /* Evaluate for side effects, if needed. I hate macros that don't
4740 do that. */
4741 if (TREE_SIDE_EFFECTS (valist))
4742 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4743
4744 return const0_rtx;
4745 }
4746
4747 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4748 builtin rather than just as an assignment in stdarg.h because of the
4749 nastiness of array-type va_list types. */
4750
4751 static rtx
4752 expand_builtin_va_copy (tree exp)
4753 {
4754 tree dst, src, t;
4755 location_t loc = EXPR_LOCATION (exp);
4756
4757 dst = CALL_EXPR_ARG (exp, 0);
4758 src = CALL_EXPR_ARG (exp, 1);
4759
4760 dst = stabilize_va_list_loc (loc, dst, 1);
4761 src = stabilize_va_list_loc (loc, src, 0);
4762
4763 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4764
4765 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4766 {
4767 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4768 TREE_SIDE_EFFECTS (t) = 1;
4769 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4770 }
4771 else
4772 {
4773 rtx dstb, srcb, size;
4774
4775 /* Evaluate to pointers. */
4776 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4777 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4778 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4779 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4780
4781 dstb = convert_memory_address (Pmode, dstb);
4782 srcb = convert_memory_address (Pmode, srcb);
4783
4784 /* "Dereference" to BLKmode memories. */
4785 dstb = gen_rtx_MEM (BLKmode, dstb);
4786 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4787 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4788 srcb = gen_rtx_MEM (BLKmode, srcb);
4789 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4790 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4791
4792 /* Copy. */
4793 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4794 }
4795
4796 return const0_rtx;
4797 }
4798
4799 /* Expand a call to one of the builtin functions __builtin_frame_address or
4800 __builtin_return_address. */
4801
4802 static rtx
4803 expand_builtin_frame_address (tree fndecl, tree exp)
4804 {
4805 /* The argument must be a nonnegative integer constant.
4806 It counts the number of frames to scan up the stack.
4807 The value is either the frame pointer value or the return
4808 address saved in that frame. */
4809 if (call_expr_nargs (exp) == 0)
4810 /* Warning about missing arg was already issued. */
4811 return const0_rtx;
4812 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4813 {
4814 error ("invalid argument to %qD", fndecl);
4815 return const0_rtx;
4816 }
4817 else
4818 {
4819 /* Number of frames to scan up the stack. */
4820 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4821
4822 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4823
4824 /* Some ports cannot access arbitrary stack frames. */
4825 if (tem == NULL)
4826 {
4827 warning (0, "unsupported argument to %qD", fndecl);
4828 return const0_rtx;
4829 }
4830
4831 if (count)
4832 {
4833 /* Warn since no effort is made to ensure that any frame
4834 beyond the current one exists or can be safely reached. */
4835 warning (OPT_Wframe_address, "calling %qD with "
4836 "a nonzero argument is unsafe", fndecl);
4837 }
4838
4839 /* For __builtin_frame_address, return what we've got. */
4840 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4841 return tem;
4842
4843 if (!REG_P (tem)
4844 && ! CONSTANT_P (tem))
4845 tem = copy_addr_to_reg (tem);
4846 return tem;
4847 }
4848 }
4849
4850 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4851 failed and the caller should emit a normal call. */
4852
4853 static rtx
4854 expand_builtin_alloca (tree exp)
4855 {
4856 rtx op0;
4857 rtx result;
4858 unsigned int align;
4859 tree fndecl = get_callee_fndecl (exp);
4860 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4861 == BUILT_IN_ALLOCA_WITH_ALIGN);
4862 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4863 bool valid_arglist
4864 = (alloca_with_align
4865 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4866 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4867
4868 if (!valid_arglist)
4869 return NULL_RTX;
4870
4871 if ((alloca_with_align && !warn_vla_limit)
4872 || (!alloca_with_align && !warn_alloca_limit))
4873 {
4874 /* -Walloca-larger-than and -Wvla-larger-than settings override
4875 the more general -Walloc-size-larger-than so unless either of
4876 the former options is specified check the alloca arguments for
4877 overflow. */
4878 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4879 int idx[] = { 0, -1 };
4880 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4881 }
4882
4883 /* Compute the argument. */
4884 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4885
4886 /* Compute the alignment. */
4887 align = (alloca_with_align
4888 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4889 : BIGGEST_ALIGNMENT);
4890
4891 /* Allocate the desired space. If the allocation stems from the declaration
4892 of a variable-sized object, it cannot accumulate. */
4893 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4894 result = convert_memory_address (ptr_mode, result);
4895
4896 return result;
4897 }
4898
4899 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4900 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4901 dummy value into second parameter relying on this function to perform the
4902 change. See motivation for this in comment to handle_builtin_stack_restore
4903 function. */
4904
4905 static rtx
4906 expand_asan_emit_allocas_unpoison (tree exp)
4907 {
4908 tree arg0 = CALL_EXPR_ARG (exp, 0);
4909 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4910 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
4911 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4912 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
4913 ptr_mode, bot, ptr_mode);
4914 return ret;
4915 }
4916
4917 /* Expand a call to bswap builtin in EXP.
4918 Return NULL_RTX if a normal call should be emitted rather than expanding the
4919 function in-line. If convenient, the result should be placed in TARGET.
4920 SUBTARGET may be used as the target for computing one of EXP's operands. */
4921
4922 static rtx
4923 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4924 rtx subtarget)
4925 {
4926 tree arg;
4927 rtx op0;
4928
4929 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4930 return NULL_RTX;
4931
4932 arg = CALL_EXPR_ARG (exp, 0);
4933 op0 = expand_expr (arg,
4934 subtarget && GET_MODE (subtarget) == target_mode
4935 ? subtarget : NULL_RTX,
4936 target_mode, EXPAND_NORMAL);
4937 if (GET_MODE (op0) != target_mode)
4938 op0 = convert_to_mode (target_mode, op0, 1);
4939
4940 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4941
4942 gcc_assert (target);
4943
4944 return convert_to_mode (target_mode, target, 1);
4945 }
4946
4947 /* Expand a call to a unary builtin in EXP.
4948 Return NULL_RTX if a normal call should be emitted rather than expanding the
4949 function in-line. If convenient, the result should be placed in TARGET.
4950 SUBTARGET may be used as the target for computing one of EXP's operands. */
4951
4952 static rtx
4953 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4954 rtx subtarget, optab op_optab)
4955 {
4956 rtx op0;
4957
4958 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4959 return NULL_RTX;
4960
4961 /* Compute the argument. */
4962 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4963 (subtarget
4964 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4965 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4966 VOIDmode, EXPAND_NORMAL);
4967 /* Compute op, into TARGET if possible.
4968 Set TARGET to wherever the result comes back. */
4969 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4970 op_optab, op0, target, op_optab != clrsb_optab);
4971 gcc_assert (target);
4972
4973 return convert_to_mode (target_mode, target, 0);
4974 }
4975
4976 /* Expand a call to __builtin_expect. We just return our argument
4977 as the builtin_expect semantic should've been already executed by
4978 tree branch prediction pass. */
4979
4980 static rtx
4981 expand_builtin_expect (tree exp, rtx target)
4982 {
4983 tree arg;
4984
4985 if (call_expr_nargs (exp) < 2)
4986 return const0_rtx;
4987 arg = CALL_EXPR_ARG (exp, 0);
4988
4989 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4990 /* When guessing was done, the hints should be already stripped away. */
4991 gcc_assert (!flag_guess_branch_prob
4992 || optimize == 0 || seen_error ());
4993 return target;
4994 }
4995
4996 /* Expand a call to __builtin_assume_aligned. We just return our first
4997 argument as the builtin_assume_aligned semantic should've been already
4998 executed by CCP. */
4999
5000 static rtx
5001 expand_builtin_assume_aligned (tree exp, rtx target)
5002 {
5003 if (call_expr_nargs (exp) < 2)
5004 return const0_rtx;
5005 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5006 EXPAND_NORMAL);
5007 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5008 && (call_expr_nargs (exp) < 3
5009 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5010 return target;
5011 }
5012
5013 void
5014 expand_builtin_trap (void)
5015 {
5016 if (targetm.have_trap ())
5017 {
5018 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5019 /* For trap insns when not accumulating outgoing args force
5020 REG_ARGS_SIZE note to prevent crossjumping of calls with
5021 different args sizes. */
5022 if (!ACCUMULATE_OUTGOING_ARGS)
5023 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5024 }
5025 else
5026 {
5027 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5028 tree call_expr = build_call_expr (fn, 0);
5029 expand_call (call_expr, NULL_RTX, false);
5030 }
5031
5032 emit_barrier ();
5033 }
5034
5035 /* Expand a call to __builtin_unreachable. We do nothing except emit
5036 a barrier saying that control flow will not pass here.
5037
5038 It is the responsibility of the program being compiled to ensure
5039 that control flow does never reach __builtin_unreachable. */
5040 static void
5041 expand_builtin_unreachable (void)
5042 {
5043 emit_barrier ();
5044 }
5045
5046 /* Expand EXP, a call to fabs, fabsf or fabsl.
5047 Return NULL_RTX if a normal call should be emitted rather than expanding
5048 the function inline. If convenient, the result should be placed
5049 in TARGET. SUBTARGET may be used as the target for computing
5050 the operand. */
5051
5052 static rtx
5053 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5054 {
5055 machine_mode mode;
5056 tree arg;
5057 rtx op0;
5058
5059 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5060 return NULL_RTX;
5061
5062 arg = CALL_EXPR_ARG (exp, 0);
5063 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5064 mode = TYPE_MODE (TREE_TYPE (arg));
5065 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5066 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5067 }
5068
5069 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5070 Return NULL is a normal call should be emitted rather than expanding the
5071 function inline. If convenient, the result should be placed in TARGET.
5072 SUBTARGET may be used as the target for computing the operand. */
5073
5074 static rtx
5075 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5076 {
5077 rtx op0, op1;
5078 tree arg;
5079
5080 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5081 return NULL_RTX;
5082
5083 arg = CALL_EXPR_ARG (exp, 0);
5084 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5085
5086 arg = CALL_EXPR_ARG (exp, 1);
5087 op1 = expand_normal (arg);
5088
5089 return expand_copysign (op0, op1, target);
5090 }
5091
5092 /* Expand a call to __builtin___clear_cache. */
5093
5094 static rtx
5095 expand_builtin___clear_cache (tree exp)
5096 {
5097 if (!targetm.code_for_clear_cache)
5098 {
5099 #ifdef CLEAR_INSN_CACHE
5100 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5101 does something. Just do the default expansion to a call to
5102 __clear_cache(). */
5103 return NULL_RTX;
5104 #else
5105 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5106 does nothing. There is no need to call it. Do nothing. */
5107 return const0_rtx;
5108 #endif /* CLEAR_INSN_CACHE */
5109 }
5110
5111 /* We have a "clear_cache" insn, and it will handle everything. */
5112 tree begin, end;
5113 rtx begin_rtx, end_rtx;
5114
5115 /* We must not expand to a library call. If we did, any
5116 fallback library function in libgcc that might contain a call to
5117 __builtin___clear_cache() would recurse infinitely. */
5118 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5119 {
5120 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5121 return const0_rtx;
5122 }
5123
5124 if (targetm.have_clear_cache ())
5125 {
5126 struct expand_operand ops[2];
5127
5128 begin = CALL_EXPR_ARG (exp, 0);
5129 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5130
5131 end = CALL_EXPR_ARG (exp, 1);
5132 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5133
5134 create_address_operand (&ops[0], begin_rtx);
5135 create_address_operand (&ops[1], end_rtx);
5136 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5137 return const0_rtx;
5138 }
5139 return const0_rtx;
5140 }
5141
5142 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5143
5144 static rtx
5145 round_trampoline_addr (rtx tramp)
5146 {
5147 rtx temp, addend, mask;
5148
5149 /* If we don't need too much alignment, we'll have been guaranteed
5150 proper alignment by get_trampoline_type. */
5151 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5152 return tramp;
5153
5154 /* Round address up to desired boundary. */
5155 temp = gen_reg_rtx (Pmode);
5156 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5157 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5158
5159 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5160 temp, 0, OPTAB_LIB_WIDEN);
5161 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5162 temp, 0, OPTAB_LIB_WIDEN);
5163
5164 return tramp;
5165 }
5166
5167 static rtx
5168 expand_builtin_init_trampoline (tree exp, bool onstack)
5169 {
5170 tree t_tramp, t_func, t_chain;
5171 rtx m_tramp, r_tramp, r_chain, tmp;
5172
5173 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5174 POINTER_TYPE, VOID_TYPE))
5175 return NULL_RTX;
5176
5177 t_tramp = CALL_EXPR_ARG (exp, 0);
5178 t_func = CALL_EXPR_ARG (exp, 1);
5179 t_chain = CALL_EXPR_ARG (exp, 2);
5180
5181 r_tramp = expand_normal (t_tramp);
5182 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5183 MEM_NOTRAP_P (m_tramp) = 1;
5184
5185 /* If ONSTACK, the TRAMP argument should be the address of a field
5186 within the local function's FRAME decl. Either way, let's see if
5187 we can fill in the MEM_ATTRs for this memory. */
5188 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5189 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5190
5191 /* Creator of a heap trampoline is responsible for making sure the
5192 address is aligned to at least STACK_BOUNDARY. Normally malloc
5193 will ensure this anyhow. */
5194 tmp = round_trampoline_addr (r_tramp);
5195 if (tmp != r_tramp)
5196 {
5197 m_tramp = change_address (m_tramp, BLKmode, tmp);
5198 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5199 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5200 }
5201
5202 /* The FUNC argument should be the address of the nested function.
5203 Extract the actual function decl to pass to the hook. */
5204 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5205 t_func = TREE_OPERAND (t_func, 0);
5206 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5207
5208 r_chain = expand_normal (t_chain);
5209
5210 /* Generate insns to initialize the trampoline. */
5211 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5212
5213 if (onstack)
5214 {
5215 trampolines_created = 1;
5216
5217 if (targetm.calls.custom_function_descriptors != 0)
5218 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5219 "trampoline generated for nested function %qD", t_func);
5220 }
5221
5222 return const0_rtx;
5223 }
5224
5225 static rtx
5226 expand_builtin_adjust_trampoline (tree exp)
5227 {
5228 rtx tramp;
5229
5230 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5231 return NULL_RTX;
5232
5233 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5234 tramp = round_trampoline_addr (tramp);
5235 if (targetm.calls.trampoline_adjust_address)
5236 tramp = targetm.calls.trampoline_adjust_address (tramp);
5237
5238 return tramp;
5239 }
5240
5241 /* Expand a call to the builtin descriptor initialization routine.
5242 A descriptor is made up of a couple of pointers to the static
5243 chain and the code entry in this order. */
5244
5245 static rtx
5246 expand_builtin_init_descriptor (tree exp)
5247 {
5248 tree t_descr, t_func, t_chain;
5249 rtx m_descr, r_descr, r_func, r_chain;
5250
5251 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5252 VOID_TYPE))
5253 return NULL_RTX;
5254
5255 t_descr = CALL_EXPR_ARG (exp, 0);
5256 t_func = CALL_EXPR_ARG (exp, 1);
5257 t_chain = CALL_EXPR_ARG (exp, 2);
5258
5259 r_descr = expand_normal (t_descr);
5260 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5261 MEM_NOTRAP_P (m_descr) = 1;
5262
5263 r_func = expand_normal (t_func);
5264 r_chain = expand_normal (t_chain);
5265
5266 /* Generate insns to initialize the descriptor. */
5267 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5268 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5269 POINTER_SIZE / BITS_PER_UNIT), r_func);
5270
5271 return const0_rtx;
5272 }
5273
5274 /* Expand a call to the builtin descriptor adjustment routine. */
5275
5276 static rtx
5277 expand_builtin_adjust_descriptor (tree exp)
5278 {
5279 rtx tramp;
5280
5281 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5282 return NULL_RTX;
5283
5284 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5285
5286 /* Unalign the descriptor to allow runtime identification. */
5287 tramp = plus_constant (ptr_mode, tramp,
5288 targetm.calls.custom_function_descriptors);
5289
5290 return force_operand (tramp, NULL_RTX);
5291 }
5292
5293 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5294 function. The function first checks whether the back end provides
5295 an insn to implement signbit for the respective mode. If not, it
5296 checks whether the floating point format of the value is such that
5297 the sign bit can be extracted. If that is not the case, error out.
5298 EXP is the expression that is a call to the builtin function; if
5299 convenient, the result should be placed in TARGET. */
5300 static rtx
5301 expand_builtin_signbit (tree exp, rtx target)
5302 {
5303 const struct real_format *fmt;
5304 scalar_float_mode fmode;
5305 machine_mode imode, rmode;
5306 tree arg;
5307 int word, bitpos;
5308 enum insn_code icode;
5309 rtx temp;
5310 location_t loc = EXPR_LOCATION (exp);
5311
5312 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5313 return NULL_RTX;
5314
5315 arg = CALL_EXPR_ARG (exp, 0);
5316 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5317 rmode = TYPE_MODE (TREE_TYPE (exp));
5318 fmt = REAL_MODE_FORMAT (fmode);
5319
5320 arg = builtin_save_expr (arg);
5321
5322 /* Expand the argument yielding a RTX expression. */
5323 temp = expand_normal (arg);
5324
5325 /* Check if the back end provides an insn that handles signbit for the
5326 argument's mode. */
5327 icode = optab_handler (signbit_optab, fmode);
5328 if (icode != CODE_FOR_nothing)
5329 {
5330 rtx_insn *last = get_last_insn ();
5331 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5332 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5333 return target;
5334 delete_insns_since (last);
5335 }
5336
5337 /* For floating point formats without a sign bit, implement signbit
5338 as "ARG < 0.0". */
5339 bitpos = fmt->signbit_ro;
5340 if (bitpos < 0)
5341 {
5342 /* But we can't do this if the format supports signed zero. */
5343 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5344
5345 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5346 build_real (TREE_TYPE (arg), dconst0));
5347 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5348 }
5349
5350 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5351 {
5352 imode = int_mode_for_mode (fmode);
5353 gcc_assert (imode != BLKmode);
5354 temp = gen_lowpart (imode, temp);
5355 }
5356 else
5357 {
5358 imode = word_mode;
5359 /* Handle targets with different FP word orders. */
5360 if (FLOAT_WORDS_BIG_ENDIAN)
5361 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5362 else
5363 word = bitpos / BITS_PER_WORD;
5364 temp = operand_subword_force (temp, word, fmode);
5365 bitpos = bitpos % BITS_PER_WORD;
5366 }
5367
5368 /* Force the intermediate word_mode (or narrower) result into a
5369 register. This avoids attempting to create paradoxical SUBREGs
5370 of floating point modes below. */
5371 temp = force_reg (imode, temp);
5372
5373 /* If the bitpos is within the "result mode" lowpart, the operation
5374 can be implement with a single bitwise AND. Otherwise, we need
5375 a right shift and an AND. */
5376
5377 if (bitpos < GET_MODE_BITSIZE (rmode))
5378 {
5379 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5380
5381 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5382 temp = gen_lowpart (rmode, temp);
5383 temp = expand_binop (rmode, and_optab, temp,
5384 immed_wide_int_const (mask, rmode),
5385 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5386 }
5387 else
5388 {
5389 /* Perform a logical right shift to place the signbit in the least
5390 significant bit, then truncate the result to the desired mode
5391 and mask just this bit. */
5392 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5393 temp = gen_lowpart (rmode, temp);
5394 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5395 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5396 }
5397
5398 return temp;
5399 }
5400
5401 /* Expand fork or exec calls. TARGET is the desired target of the
5402 call. EXP is the call. FN is the
5403 identificator of the actual function. IGNORE is nonzero if the
5404 value is to be ignored. */
5405
5406 static rtx
5407 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5408 {
5409 tree id, decl;
5410 tree call;
5411
5412 /* If we are not profiling, just call the function. */
5413 if (!profile_arc_flag)
5414 return NULL_RTX;
5415
5416 /* Otherwise call the wrapper. This should be equivalent for the rest of
5417 compiler, so the code does not diverge, and the wrapper may run the
5418 code necessary for keeping the profiling sane. */
5419
5420 switch (DECL_FUNCTION_CODE (fn))
5421 {
5422 case BUILT_IN_FORK:
5423 id = get_identifier ("__gcov_fork");
5424 break;
5425
5426 case BUILT_IN_EXECL:
5427 id = get_identifier ("__gcov_execl");
5428 break;
5429
5430 case BUILT_IN_EXECV:
5431 id = get_identifier ("__gcov_execv");
5432 break;
5433
5434 case BUILT_IN_EXECLP:
5435 id = get_identifier ("__gcov_execlp");
5436 break;
5437
5438 case BUILT_IN_EXECLE:
5439 id = get_identifier ("__gcov_execle");
5440 break;
5441
5442 case BUILT_IN_EXECVP:
5443 id = get_identifier ("__gcov_execvp");
5444 break;
5445
5446 case BUILT_IN_EXECVE:
5447 id = get_identifier ("__gcov_execve");
5448 break;
5449
5450 default:
5451 gcc_unreachable ();
5452 }
5453
5454 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5455 FUNCTION_DECL, id, TREE_TYPE (fn));
5456 DECL_EXTERNAL (decl) = 1;
5457 TREE_PUBLIC (decl) = 1;
5458 DECL_ARTIFICIAL (decl) = 1;
5459 TREE_NOTHROW (decl) = 1;
5460 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5461 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5462 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5463 return expand_call (call, target, ignore);
5464 }
5465
5466
5467 \f
5468 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5469 the pointer in these functions is void*, the tree optimizers may remove
5470 casts. The mode computed in expand_builtin isn't reliable either, due
5471 to __sync_bool_compare_and_swap.
5472
5473 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5474 group of builtins. This gives us log2 of the mode size. */
5475
5476 static inline machine_mode
5477 get_builtin_sync_mode (int fcode_diff)
5478 {
5479 /* The size is not negotiable, so ask not to get BLKmode in return
5480 if the target indicates that a smaller size would be better. */
5481 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5482 }
5483
5484 /* Expand the memory expression LOC and return the appropriate memory operand
5485 for the builtin_sync operations. */
5486
5487 static rtx
5488 get_builtin_sync_mem (tree loc, machine_mode mode)
5489 {
5490 rtx addr, mem;
5491
5492 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5493 addr = convert_memory_address (Pmode, addr);
5494
5495 /* Note that we explicitly do not want any alias information for this
5496 memory, so that we kill all other live memories. Otherwise we don't
5497 satisfy the full barrier semantics of the intrinsic. */
5498 mem = validize_mem (gen_rtx_MEM (mode, addr));
5499
5500 /* The alignment needs to be at least according to that of the mode. */
5501 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5502 get_pointer_alignment (loc)));
5503 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5504 MEM_VOLATILE_P (mem) = 1;
5505
5506 return mem;
5507 }
5508
5509 /* Make sure an argument is in the right mode.
5510 EXP is the tree argument.
5511 MODE is the mode it should be in. */
5512
5513 static rtx
5514 expand_expr_force_mode (tree exp, machine_mode mode)
5515 {
5516 rtx val;
5517 machine_mode old_mode;
5518
5519 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5520 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5521 of CONST_INTs, where we know the old_mode only from the call argument. */
5522
5523 old_mode = GET_MODE (val);
5524 if (old_mode == VOIDmode)
5525 old_mode = TYPE_MODE (TREE_TYPE (exp));
5526 val = convert_modes (mode, old_mode, val, 1);
5527 return val;
5528 }
5529
5530
5531 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5532 EXP is the CALL_EXPR. CODE is the rtx code
5533 that corresponds to the arithmetic or logical operation from the name;
5534 an exception here is that NOT actually means NAND. TARGET is an optional
5535 place for us to store the results; AFTER is true if this is the
5536 fetch_and_xxx form. */
5537
5538 static rtx
5539 expand_builtin_sync_operation (machine_mode mode, tree exp,
5540 enum rtx_code code, bool after,
5541 rtx target)
5542 {
5543 rtx val, mem;
5544 location_t loc = EXPR_LOCATION (exp);
5545
5546 if (code == NOT && warn_sync_nand)
5547 {
5548 tree fndecl = get_callee_fndecl (exp);
5549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5550
5551 static bool warned_f_a_n, warned_n_a_f;
5552
5553 switch (fcode)
5554 {
5555 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5556 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5557 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5558 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5559 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5560 if (warned_f_a_n)
5561 break;
5562
5563 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5564 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5565 warned_f_a_n = true;
5566 break;
5567
5568 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5569 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5570 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5571 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5572 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5573 if (warned_n_a_f)
5574 break;
5575
5576 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5577 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5578 warned_n_a_f = true;
5579 break;
5580
5581 default:
5582 gcc_unreachable ();
5583 }
5584 }
5585
5586 /* Expand the operands. */
5587 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5588 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5589
5590 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5591 after);
5592 }
5593
5594 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5595 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5596 true if this is the boolean form. TARGET is a place for us to store the
5597 results; this is NOT optional if IS_BOOL is true. */
5598
5599 static rtx
5600 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5601 bool is_bool, rtx target)
5602 {
5603 rtx old_val, new_val, mem;
5604 rtx *pbool, *poval;
5605
5606 /* Expand the operands. */
5607 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5608 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5609 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5610
5611 pbool = poval = NULL;
5612 if (target != const0_rtx)
5613 {
5614 if (is_bool)
5615 pbool = &target;
5616 else
5617 poval = &target;
5618 }
5619 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5620 false, MEMMODEL_SYNC_SEQ_CST,
5621 MEMMODEL_SYNC_SEQ_CST))
5622 return NULL_RTX;
5623
5624 return target;
5625 }
5626
5627 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5628 general form is actually an atomic exchange, and some targets only
5629 support a reduced form with the second argument being a constant 1.
5630 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5631 the results. */
5632
5633 static rtx
5634 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5635 rtx target)
5636 {
5637 rtx val, mem;
5638
5639 /* Expand the operands. */
5640 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5641 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5642
5643 return expand_sync_lock_test_and_set (target, mem, val);
5644 }
5645
5646 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5647
5648 static void
5649 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5650 {
5651 rtx mem;
5652
5653 /* Expand the operands. */
5654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5655
5656 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5657 }
5658
5659 /* Given an integer representing an ``enum memmodel'', verify its
5660 correctness and return the memory model enum. */
5661
5662 static enum memmodel
5663 get_memmodel (tree exp)
5664 {
5665 rtx op;
5666 unsigned HOST_WIDE_INT val;
5667 source_location loc
5668 = expansion_point_location_if_in_system_header (input_location);
5669
5670 /* If the parameter is not a constant, it's a run time value so we'll just
5671 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5672 if (TREE_CODE (exp) != INTEGER_CST)
5673 return MEMMODEL_SEQ_CST;
5674
5675 op = expand_normal (exp);
5676
5677 val = INTVAL (op);
5678 if (targetm.memmodel_check)
5679 val = targetm.memmodel_check (val);
5680 else if (val & ~MEMMODEL_MASK)
5681 {
5682 warning_at (loc, OPT_Winvalid_memory_model,
5683 "unknown architecture specifier in memory model to builtin");
5684 return MEMMODEL_SEQ_CST;
5685 }
5686
5687 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5688 if (memmodel_base (val) >= MEMMODEL_LAST)
5689 {
5690 warning_at (loc, OPT_Winvalid_memory_model,
5691 "invalid memory model argument to builtin");
5692 return MEMMODEL_SEQ_CST;
5693 }
5694
5695 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5696 be conservative and promote consume to acquire. */
5697 if (val == MEMMODEL_CONSUME)
5698 val = MEMMODEL_ACQUIRE;
5699
5700 return (enum memmodel) val;
5701 }
5702
5703 /* Expand the __atomic_exchange intrinsic:
5704 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5705 EXP is the CALL_EXPR.
5706 TARGET is an optional place for us to store the results. */
5707
5708 static rtx
5709 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5710 {
5711 rtx val, mem;
5712 enum memmodel model;
5713
5714 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5715
5716 if (!flag_inline_atomics)
5717 return NULL_RTX;
5718
5719 /* Expand the operands. */
5720 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5721 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5722
5723 return expand_atomic_exchange (target, mem, val, model);
5724 }
5725
5726 /* Expand the __atomic_compare_exchange intrinsic:
5727 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5728 TYPE desired, BOOL weak,
5729 enum memmodel success,
5730 enum memmodel failure)
5731 EXP is the CALL_EXPR.
5732 TARGET is an optional place for us to store the results. */
5733
5734 static rtx
5735 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5736 rtx target)
5737 {
5738 rtx expect, desired, mem, oldval;
5739 rtx_code_label *label;
5740 enum memmodel success, failure;
5741 tree weak;
5742 bool is_weak;
5743 source_location loc
5744 = expansion_point_location_if_in_system_header (input_location);
5745
5746 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5747 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5748
5749 if (failure > success)
5750 {
5751 warning_at (loc, OPT_Winvalid_memory_model,
5752 "failure memory model cannot be stronger than success "
5753 "memory model for %<__atomic_compare_exchange%>");
5754 success = MEMMODEL_SEQ_CST;
5755 }
5756
5757 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5758 {
5759 warning_at (loc, OPT_Winvalid_memory_model,
5760 "invalid failure memory model for "
5761 "%<__atomic_compare_exchange%>");
5762 failure = MEMMODEL_SEQ_CST;
5763 success = MEMMODEL_SEQ_CST;
5764 }
5765
5766
5767 if (!flag_inline_atomics)
5768 return NULL_RTX;
5769
5770 /* Expand the operands. */
5771 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5772
5773 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5774 expect = convert_memory_address (Pmode, expect);
5775 expect = gen_rtx_MEM (mode, expect);
5776 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5777
5778 weak = CALL_EXPR_ARG (exp, 3);
5779 is_weak = false;
5780 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5781 is_weak = true;
5782
5783 if (target == const0_rtx)
5784 target = NULL;
5785
5786 /* Lest the rtl backend create a race condition with an imporoper store
5787 to memory, always create a new pseudo for OLDVAL. */
5788 oldval = NULL;
5789
5790 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5791 is_weak, success, failure))
5792 return NULL_RTX;
5793
5794 /* Conditionally store back to EXPECT, lest we create a race condition
5795 with an improper store to memory. */
5796 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5797 the normal case where EXPECT is totally private, i.e. a register. At
5798 which point the store can be unconditional. */
5799 label = gen_label_rtx ();
5800 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5801 GET_MODE (target), 1, label);
5802 emit_move_insn (expect, oldval);
5803 emit_label (label);
5804
5805 return target;
5806 }
5807
5808 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5809 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5810 call. The weak parameter must be dropped to match the expected parameter
5811 list and the expected argument changed from value to pointer to memory
5812 slot. */
5813
5814 static void
5815 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5816 {
5817 unsigned int z;
5818 vec<tree, va_gc> *vec;
5819
5820 vec_alloc (vec, 5);
5821 vec->quick_push (gimple_call_arg (call, 0));
5822 tree expected = gimple_call_arg (call, 1);
5823 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5824 TREE_TYPE (expected));
5825 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5826 if (expd != x)
5827 emit_move_insn (x, expd);
5828 tree v = make_tree (TREE_TYPE (expected), x);
5829 vec->quick_push (build1 (ADDR_EXPR,
5830 build_pointer_type (TREE_TYPE (expected)), v));
5831 vec->quick_push (gimple_call_arg (call, 2));
5832 /* Skip the boolean weak parameter. */
5833 for (z = 4; z < 6; z++)
5834 vec->quick_push (gimple_call_arg (call, z));
5835 built_in_function fncode
5836 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5837 + exact_log2 (GET_MODE_SIZE (mode)));
5838 tree fndecl = builtin_decl_explicit (fncode);
5839 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5840 fndecl);
5841 tree exp = build_call_vec (boolean_type_node, fn, vec);
5842 tree lhs = gimple_call_lhs (call);
5843 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5844 if (lhs)
5845 {
5846 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5847 if (GET_MODE (boolret) != mode)
5848 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5849 x = force_reg (mode, x);
5850 write_complex_part (target, boolret, true);
5851 write_complex_part (target, x, false);
5852 }
5853 }
5854
5855 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5856
5857 void
5858 expand_ifn_atomic_compare_exchange (gcall *call)
5859 {
5860 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5861 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5862 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5863 rtx expect, desired, mem, oldval, boolret;
5864 enum memmodel success, failure;
5865 tree lhs;
5866 bool is_weak;
5867 source_location loc
5868 = expansion_point_location_if_in_system_header (gimple_location (call));
5869
5870 success = get_memmodel (gimple_call_arg (call, 4));
5871 failure = get_memmodel (gimple_call_arg (call, 5));
5872
5873 if (failure > success)
5874 {
5875 warning_at (loc, OPT_Winvalid_memory_model,
5876 "failure memory model cannot be stronger than success "
5877 "memory model for %<__atomic_compare_exchange%>");
5878 success = MEMMODEL_SEQ_CST;
5879 }
5880
5881 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5882 {
5883 warning_at (loc, OPT_Winvalid_memory_model,
5884 "invalid failure memory model for "
5885 "%<__atomic_compare_exchange%>");
5886 failure = MEMMODEL_SEQ_CST;
5887 success = MEMMODEL_SEQ_CST;
5888 }
5889
5890 if (!flag_inline_atomics)
5891 {
5892 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5893 return;
5894 }
5895
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5898
5899 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5900 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5901
5902 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5903
5904 boolret = NULL;
5905 oldval = NULL;
5906
5907 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5908 is_weak, success, failure))
5909 {
5910 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5911 return;
5912 }
5913
5914 lhs = gimple_call_lhs (call);
5915 if (lhs)
5916 {
5917 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5918 if (GET_MODE (boolret) != mode)
5919 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5920 write_complex_part (target, boolret, true);
5921 write_complex_part (target, oldval, false);
5922 }
5923 }
5924
5925 /* Expand the __atomic_load intrinsic:
5926 TYPE __atomic_load (TYPE *object, enum memmodel)
5927 EXP is the CALL_EXPR.
5928 TARGET is an optional place for us to store the results. */
5929
5930 static rtx
5931 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5932 {
5933 rtx mem;
5934 enum memmodel model;
5935
5936 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5937 if (is_mm_release (model) || is_mm_acq_rel (model))
5938 {
5939 source_location loc
5940 = expansion_point_location_if_in_system_header (input_location);
5941 warning_at (loc, OPT_Winvalid_memory_model,
5942 "invalid memory model for %<__atomic_load%>");
5943 model = MEMMODEL_SEQ_CST;
5944 }
5945
5946 if (!flag_inline_atomics)
5947 return NULL_RTX;
5948
5949 /* Expand the operand. */
5950 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5951
5952 return expand_atomic_load (target, mem, model);
5953 }
5954
5955
5956 /* Expand the __atomic_store intrinsic:
5957 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5958 EXP is the CALL_EXPR.
5959 TARGET is an optional place for us to store the results. */
5960
5961 static rtx
5962 expand_builtin_atomic_store (machine_mode mode, tree exp)
5963 {
5964 rtx mem, val;
5965 enum memmodel model;
5966
5967 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5968 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5969 || is_mm_release (model)))
5970 {
5971 source_location loc
5972 = expansion_point_location_if_in_system_header (input_location);
5973 warning_at (loc, OPT_Winvalid_memory_model,
5974 "invalid memory model for %<__atomic_store%>");
5975 model = MEMMODEL_SEQ_CST;
5976 }
5977
5978 if (!flag_inline_atomics)
5979 return NULL_RTX;
5980
5981 /* Expand the operands. */
5982 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5983 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5984
5985 return expand_atomic_store (mem, val, model, false);
5986 }
5987
5988 /* Expand the __atomic_fetch_XXX intrinsic:
5989 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5990 EXP is the CALL_EXPR.
5991 TARGET is an optional place for us to store the results.
5992 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5993 FETCH_AFTER is true if returning the result of the operation.
5994 FETCH_AFTER is false if returning the value before the operation.
5995 IGNORE is true if the result is not used.
5996 EXT_CALL is the correct builtin for an external call if this cannot be
5997 resolved to an instruction sequence. */
5998
5999 static rtx
6000 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6001 enum rtx_code code, bool fetch_after,
6002 bool ignore, enum built_in_function ext_call)
6003 {
6004 rtx val, mem, ret;
6005 enum memmodel model;
6006 tree fndecl;
6007 tree addr;
6008
6009 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6010
6011 /* Expand the operands. */
6012 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6013 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6014
6015 /* Only try generating instructions if inlining is turned on. */
6016 if (flag_inline_atomics)
6017 {
6018 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6019 if (ret)
6020 return ret;
6021 }
6022
6023 /* Return if a different routine isn't needed for the library call. */
6024 if (ext_call == BUILT_IN_NONE)
6025 return NULL_RTX;
6026
6027 /* Change the call to the specified function. */
6028 fndecl = get_callee_fndecl (exp);
6029 addr = CALL_EXPR_FN (exp);
6030 STRIP_NOPS (addr);
6031
6032 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6033 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6034
6035 /* If we will emit code after the call, the call can not be a tail call.
6036 If it is emitted as a tail call, a barrier is emitted after it, and
6037 then all trailing code is removed. */
6038 if (!ignore)
6039 CALL_EXPR_TAILCALL (exp) = 0;
6040
6041 /* Expand the call here so we can emit trailing code. */
6042 ret = expand_call (exp, target, ignore);
6043
6044 /* Replace the original function just in case it matters. */
6045 TREE_OPERAND (addr, 0) = fndecl;
6046
6047 /* Then issue the arithmetic correction to return the right result. */
6048 if (!ignore)
6049 {
6050 if (code == NOT)
6051 {
6052 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6053 OPTAB_LIB_WIDEN);
6054 ret = expand_simple_unop (mode, NOT, ret, target, true);
6055 }
6056 else
6057 ret = expand_simple_binop (mode, code, ret, val, target, true,
6058 OPTAB_LIB_WIDEN);
6059 }
6060 return ret;
6061 }
6062
6063 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6064
6065 void
6066 expand_ifn_atomic_bit_test_and (gcall *call)
6067 {
6068 tree ptr = gimple_call_arg (call, 0);
6069 tree bit = gimple_call_arg (call, 1);
6070 tree flag = gimple_call_arg (call, 2);
6071 tree lhs = gimple_call_lhs (call);
6072 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6073 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6074 enum rtx_code code;
6075 optab optab;
6076 struct expand_operand ops[5];
6077
6078 gcc_assert (flag_inline_atomics);
6079
6080 if (gimple_call_num_args (call) == 4)
6081 model = get_memmodel (gimple_call_arg (call, 3));
6082
6083 rtx mem = get_builtin_sync_mem (ptr, mode);
6084 rtx val = expand_expr_force_mode (bit, mode);
6085
6086 switch (gimple_call_internal_fn (call))
6087 {
6088 case IFN_ATOMIC_BIT_TEST_AND_SET:
6089 code = IOR;
6090 optab = atomic_bit_test_and_set_optab;
6091 break;
6092 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6093 code = XOR;
6094 optab = atomic_bit_test_and_complement_optab;
6095 break;
6096 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6097 code = AND;
6098 optab = atomic_bit_test_and_reset_optab;
6099 break;
6100 default:
6101 gcc_unreachable ();
6102 }
6103
6104 if (lhs == NULL_TREE)
6105 {
6106 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6107 val, NULL_RTX, true, OPTAB_DIRECT);
6108 if (code == AND)
6109 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6110 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6111 return;
6112 }
6113
6114 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6115 enum insn_code icode = direct_optab_handler (optab, mode);
6116 gcc_assert (icode != CODE_FOR_nothing);
6117 create_output_operand (&ops[0], target, mode);
6118 create_fixed_operand (&ops[1], mem);
6119 create_convert_operand_to (&ops[2], val, mode, true);
6120 create_integer_operand (&ops[3], model);
6121 create_integer_operand (&ops[4], integer_onep (flag));
6122 if (maybe_expand_insn (icode, 5, ops))
6123 return;
6124
6125 rtx bitval = val;
6126 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6127 val, NULL_RTX, true, OPTAB_DIRECT);
6128 rtx maskval = val;
6129 if (code == AND)
6130 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6131 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6132 code, model, false);
6133 if (integer_onep (flag))
6134 {
6135 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6136 NULL_RTX, true, OPTAB_DIRECT);
6137 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6138 true, OPTAB_DIRECT);
6139 }
6140 else
6141 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6142 OPTAB_DIRECT);
6143 if (result != target)
6144 emit_move_insn (target, result);
6145 }
6146
6147 /* Expand an atomic clear operation.
6148 void _atomic_clear (BOOL *obj, enum memmodel)
6149 EXP is the call expression. */
6150
6151 static rtx
6152 expand_builtin_atomic_clear (tree exp)
6153 {
6154 machine_mode mode;
6155 rtx mem, ret;
6156 enum memmodel model;
6157
6158 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6161
6162 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6163 {
6164 source_location loc
6165 = expansion_point_location_if_in_system_header (input_location);
6166 warning_at (loc, OPT_Winvalid_memory_model,
6167 "invalid memory model for %<__atomic_store%>");
6168 model = MEMMODEL_SEQ_CST;
6169 }
6170
6171 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6172 Failing that, a store is issued by __atomic_store. The only way this can
6173 fail is if the bool type is larger than a word size. Unlikely, but
6174 handle it anyway for completeness. Assume a single threaded model since
6175 there is no atomic support in this case, and no barriers are required. */
6176 ret = expand_atomic_store (mem, const0_rtx, model, true);
6177 if (!ret)
6178 emit_move_insn (mem, const0_rtx);
6179 return const0_rtx;
6180 }
6181
6182 /* Expand an atomic test_and_set operation.
6183 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6184 EXP is the call expression. */
6185
6186 static rtx
6187 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6188 {
6189 rtx mem;
6190 enum memmodel model;
6191 machine_mode mode;
6192
6193 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6194 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6195 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6196
6197 return expand_atomic_test_and_set (target, mem, model);
6198 }
6199
6200
6201 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6202 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6203
6204 static tree
6205 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6206 {
6207 int size;
6208 machine_mode mode;
6209 unsigned int mode_align, type_align;
6210
6211 if (TREE_CODE (arg0) != INTEGER_CST)
6212 return NULL_TREE;
6213
6214 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6215 mode = mode_for_size (size, MODE_INT, 0);
6216 mode_align = GET_MODE_ALIGNMENT (mode);
6217
6218 if (TREE_CODE (arg1) == INTEGER_CST)
6219 {
6220 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6221
6222 /* Either this argument is null, or it's a fake pointer encoding
6223 the alignment of the object. */
6224 val = least_bit_hwi (val);
6225 val *= BITS_PER_UNIT;
6226
6227 if (val == 0 || mode_align < val)
6228 type_align = mode_align;
6229 else
6230 type_align = val;
6231 }
6232 else
6233 {
6234 tree ttype = TREE_TYPE (arg1);
6235
6236 /* This function is usually invoked and folded immediately by the front
6237 end before anything else has a chance to look at it. The pointer
6238 parameter at this point is usually cast to a void *, so check for that
6239 and look past the cast. */
6240 if (CONVERT_EXPR_P (arg1)
6241 && POINTER_TYPE_P (ttype)
6242 && VOID_TYPE_P (TREE_TYPE (ttype))
6243 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6244 arg1 = TREE_OPERAND (arg1, 0);
6245
6246 ttype = TREE_TYPE (arg1);
6247 gcc_assert (POINTER_TYPE_P (ttype));
6248
6249 /* Get the underlying type of the object. */
6250 ttype = TREE_TYPE (ttype);
6251 type_align = TYPE_ALIGN (ttype);
6252 }
6253
6254 /* If the object has smaller alignment, the lock free routines cannot
6255 be used. */
6256 if (type_align < mode_align)
6257 return boolean_false_node;
6258
6259 /* Check if a compare_and_swap pattern exists for the mode which represents
6260 the required size. The pattern is not allowed to fail, so the existence
6261 of the pattern indicates support is present. Also require that an
6262 atomic load exists for the required size. */
6263 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6264 return boolean_true_node;
6265 else
6266 return boolean_false_node;
6267 }
6268
6269 /* Return true if the parameters to call EXP represent an object which will
6270 always generate lock free instructions. The first argument represents the
6271 size of the object, and the second parameter is a pointer to the object
6272 itself. If NULL is passed for the object, then the result is based on
6273 typical alignment for an object of the specified size. Otherwise return
6274 false. */
6275
6276 static rtx
6277 expand_builtin_atomic_always_lock_free (tree exp)
6278 {
6279 tree size;
6280 tree arg0 = CALL_EXPR_ARG (exp, 0);
6281 tree arg1 = CALL_EXPR_ARG (exp, 1);
6282
6283 if (TREE_CODE (arg0) != INTEGER_CST)
6284 {
6285 error ("non-constant argument 1 to __atomic_always_lock_free");
6286 return const0_rtx;
6287 }
6288
6289 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6290 if (size == boolean_true_node)
6291 return const1_rtx;
6292 return const0_rtx;
6293 }
6294
6295 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6296 is lock free on this architecture. */
6297
6298 static tree
6299 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6300 {
6301 if (!flag_inline_atomics)
6302 return NULL_TREE;
6303
6304 /* If it isn't always lock free, don't generate a result. */
6305 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6306 return boolean_true_node;
6307
6308 return NULL_TREE;
6309 }
6310
6311 /* Return true if the parameters to call EXP represent an object which will
6312 always generate lock free instructions. The first argument represents the
6313 size of the object, and the second parameter is a pointer to the object
6314 itself. If NULL is passed for the object, then the result is based on
6315 typical alignment for an object of the specified size. Otherwise return
6316 NULL*/
6317
6318 static rtx
6319 expand_builtin_atomic_is_lock_free (tree exp)
6320 {
6321 tree size;
6322 tree arg0 = CALL_EXPR_ARG (exp, 0);
6323 tree arg1 = CALL_EXPR_ARG (exp, 1);
6324
6325 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6326 {
6327 error ("non-integer argument 1 to __atomic_is_lock_free");
6328 return NULL_RTX;
6329 }
6330
6331 if (!flag_inline_atomics)
6332 return NULL_RTX;
6333
6334 /* If the value is known at compile time, return the RTX for it. */
6335 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6336 if (size == boolean_true_node)
6337 return const1_rtx;
6338
6339 return NULL_RTX;
6340 }
6341
6342 /* Expand the __atomic_thread_fence intrinsic:
6343 void __atomic_thread_fence (enum memmodel)
6344 EXP is the CALL_EXPR. */
6345
6346 static void
6347 expand_builtin_atomic_thread_fence (tree exp)
6348 {
6349 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6350 expand_mem_thread_fence (model);
6351 }
6352
6353 /* Expand the __atomic_signal_fence intrinsic:
6354 void __atomic_signal_fence (enum memmodel)
6355 EXP is the CALL_EXPR. */
6356
6357 static void
6358 expand_builtin_atomic_signal_fence (tree exp)
6359 {
6360 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6361 expand_mem_signal_fence (model);
6362 }
6363
6364 /* Expand the __sync_synchronize intrinsic. */
6365
6366 static void
6367 expand_builtin_sync_synchronize (void)
6368 {
6369 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6370 }
6371
6372 static rtx
6373 expand_builtin_thread_pointer (tree exp, rtx target)
6374 {
6375 enum insn_code icode;
6376 if (!validate_arglist (exp, VOID_TYPE))
6377 return const0_rtx;
6378 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6379 if (icode != CODE_FOR_nothing)
6380 {
6381 struct expand_operand op;
6382 /* If the target is not sutitable then create a new target. */
6383 if (target == NULL_RTX
6384 || !REG_P (target)
6385 || GET_MODE (target) != Pmode)
6386 target = gen_reg_rtx (Pmode);
6387 create_output_operand (&op, target, Pmode);
6388 expand_insn (icode, 1, &op);
6389 return target;
6390 }
6391 error ("__builtin_thread_pointer is not supported on this target");
6392 return const0_rtx;
6393 }
6394
6395 static void
6396 expand_builtin_set_thread_pointer (tree exp)
6397 {
6398 enum insn_code icode;
6399 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6400 return;
6401 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6402 if (icode != CODE_FOR_nothing)
6403 {
6404 struct expand_operand op;
6405 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6406 Pmode, EXPAND_NORMAL);
6407 create_input_operand (&op, val, Pmode);
6408 expand_insn (icode, 1, &op);
6409 return;
6410 }
6411 error ("__builtin_set_thread_pointer is not supported on this target");
6412 }
6413
6414 \f
6415 /* Emit code to restore the current value of stack. */
6416
6417 static void
6418 expand_stack_restore (tree var)
6419 {
6420 rtx_insn *prev;
6421 rtx sa = expand_normal (var);
6422
6423 sa = convert_memory_address (Pmode, sa);
6424
6425 prev = get_last_insn ();
6426 emit_stack_restore (SAVE_BLOCK, sa);
6427
6428 record_new_stack_level ();
6429
6430 fixup_args_size_notes (prev, get_last_insn (), 0);
6431 }
6432
6433 /* Emit code to save the current value of stack. */
6434
6435 static rtx
6436 expand_stack_save (void)
6437 {
6438 rtx ret = NULL_RTX;
6439
6440 emit_stack_save (SAVE_BLOCK, &ret);
6441 return ret;
6442 }
6443
6444
6445 /* Expand an expression EXP that calls a built-in function,
6446 with result going to TARGET if that's convenient
6447 (and in mode MODE if that's convenient).
6448 SUBTARGET may be used as the target for computing one of EXP's operands.
6449 IGNORE is nonzero if the value is to be ignored. */
6450
6451 rtx
6452 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6453 int ignore)
6454 {
6455 tree fndecl = get_callee_fndecl (exp);
6456 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6457 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6458 int flags;
6459
6460 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6461 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6462
6463 /* When ASan is enabled, we don't want to expand some memory/string
6464 builtins and rely on libsanitizer's hooks. This allows us to avoid
6465 redundant checks and be sure, that possible overflow will be detected
6466 by ASan. */
6467
6468 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6469 return expand_call (exp, target, ignore);
6470
6471 /* When not optimizing, generate calls to library functions for a certain
6472 set of builtins. */
6473 if (!optimize
6474 && !called_as_built_in (fndecl)
6475 && fcode != BUILT_IN_FORK
6476 && fcode != BUILT_IN_EXECL
6477 && fcode != BUILT_IN_EXECV
6478 && fcode != BUILT_IN_EXECLP
6479 && fcode != BUILT_IN_EXECLE
6480 && fcode != BUILT_IN_EXECVP
6481 && fcode != BUILT_IN_EXECVE
6482 && fcode != BUILT_IN_ALLOCA
6483 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6484 && fcode != BUILT_IN_FREE
6485 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6486 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6487 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6488 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6489 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6490 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6491 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6492 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6493 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6494 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6495 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6496 && fcode != BUILT_IN_CHKP_BNDRET)
6497 return expand_call (exp, target, ignore);
6498
6499 /* The built-in function expanders test for target == const0_rtx
6500 to determine whether the function's result will be ignored. */
6501 if (ignore)
6502 target = const0_rtx;
6503
6504 /* If the result of a pure or const built-in function is ignored, and
6505 none of its arguments are volatile, we can avoid expanding the
6506 built-in call and just evaluate the arguments for side-effects. */
6507 if (target == const0_rtx
6508 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6509 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6510 {
6511 bool volatilep = false;
6512 tree arg;
6513 call_expr_arg_iterator iter;
6514
6515 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6516 if (TREE_THIS_VOLATILE (arg))
6517 {
6518 volatilep = true;
6519 break;
6520 }
6521
6522 if (! volatilep)
6523 {
6524 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6525 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6526 return const0_rtx;
6527 }
6528 }
6529
6530 /* expand_builtin_with_bounds is supposed to be used for
6531 instrumented builtin calls. */
6532 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6533
6534 switch (fcode)
6535 {
6536 CASE_FLT_FN (BUILT_IN_FABS):
6537 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6538 case BUILT_IN_FABSD32:
6539 case BUILT_IN_FABSD64:
6540 case BUILT_IN_FABSD128:
6541 target = expand_builtin_fabs (exp, target, subtarget);
6542 if (target)
6543 return target;
6544 break;
6545
6546 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6547 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6548 target = expand_builtin_copysign (exp, target, subtarget);
6549 if (target)
6550 return target;
6551 break;
6552
6553 /* Just do a normal library call if we were unable to fold
6554 the values. */
6555 CASE_FLT_FN (BUILT_IN_CABS):
6556 break;
6557
6558 CASE_FLT_FN (BUILT_IN_FMA):
6559 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6560 if (target)
6561 return target;
6562 break;
6563
6564 CASE_FLT_FN (BUILT_IN_ILOGB):
6565 if (! flag_unsafe_math_optimizations)
6566 break;
6567 gcc_fallthrough ();
6568 CASE_FLT_FN (BUILT_IN_ISINF):
6569 CASE_FLT_FN (BUILT_IN_FINITE):
6570 case BUILT_IN_ISFINITE:
6571 case BUILT_IN_ISNORMAL:
6572 target = expand_builtin_interclass_mathfn (exp, target);
6573 if (target)
6574 return target;
6575 break;
6576
6577 CASE_FLT_FN (BUILT_IN_ICEIL):
6578 CASE_FLT_FN (BUILT_IN_LCEIL):
6579 CASE_FLT_FN (BUILT_IN_LLCEIL):
6580 CASE_FLT_FN (BUILT_IN_LFLOOR):
6581 CASE_FLT_FN (BUILT_IN_IFLOOR):
6582 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6583 target = expand_builtin_int_roundingfn (exp, target);
6584 if (target)
6585 return target;
6586 break;
6587
6588 CASE_FLT_FN (BUILT_IN_IRINT):
6589 CASE_FLT_FN (BUILT_IN_LRINT):
6590 CASE_FLT_FN (BUILT_IN_LLRINT):
6591 CASE_FLT_FN (BUILT_IN_IROUND):
6592 CASE_FLT_FN (BUILT_IN_LROUND):
6593 CASE_FLT_FN (BUILT_IN_LLROUND):
6594 target = expand_builtin_int_roundingfn_2 (exp, target);
6595 if (target)
6596 return target;
6597 break;
6598
6599 CASE_FLT_FN (BUILT_IN_POWI):
6600 target = expand_builtin_powi (exp, target);
6601 if (target)
6602 return target;
6603 break;
6604
6605 CASE_FLT_FN (BUILT_IN_CEXPI):
6606 target = expand_builtin_cexpi (exp, target);
6607 gcc_assert (target);
6608 return target;
6609
6610 CASE_FLT_FN (BUILT_IN_SIN):
6611 CASE_FLT_FN (BUILT_IN_COS):
6612 if (! flag_unsafe_math_optimizations)
6613 break;
6614 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6615 if (target)
6616 return target;
6617 break;
6618
6619 CASE_FLT_FN (BUILT_IN_SINCOS):
6620 if (! flag_unsafe_math_optimizations)
6621 break;
6622 target = expand_builtin_sincos (exp);
6623 if (target)
6624 return target;
6625 break;
6626
6627 case BUILT_IN_APPLY_ARGS:
6628 return expand_builtin_apply_args ();
6629
6630 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6631 FUNCTION with a copy of the parameters described by
6632 ARGUMENTS, and ARGSIZE. It returns a block of memory
6633 allocated on the stack into which is stored all the registers
6634 that might possibly be used for returning the result of a
6635 function. ARGUMENTS is the value returned by
6636 __builtin_apply_args. ARGSIZE is the number of bytes of
6637 arguments that must be copied. ??? How should this value be
6638 computed? We'll also need a safe worst case value for varargs
6639 functions. */
6640 case BUILT_IN_APPLY:
6641 if (!validate_arglist (exp, POINTER_TYPE,
6642 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6643 && !validate_arglist (exp, REFERENCE_TYPE,
6644 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6645 return const0_rtx;
6646 else
6647 {
6648 rtx ops[3];
6649
6650 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6651 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6652 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6653
6654 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6655 }
6656
6657 /* __builtin_return (RESULT) causes the function to return the
6658 value described by RESULT. RESULT is address of the block of
6659 memory returned by __builtin_apply. */
6660 case BUILT_IN_RETURN:
6661 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6662 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6663 return const0_rtx;
6664
6665 case BUILT_IN_SAVEREGS:
6666 return expand_builtin_saveregs ();
6667
6668 case BUILT_IN_VA_ARG_PACK:
6669 /* All valid uses of __builtin_va_arg_pack () are removed during
6670 inlining. */
6671 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6672 return const0_rtx;
6673
6674 case BUILT_IN_VA_ARG_PACK_LEN:
6675 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6676 inlining. */
6677 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6678 return const0_rtx;
6679
6680 /* Return the address of the first anonymous stack arg. */
6681 case BUILT_IN_NEXT_ARG:
6682 if (fold_builtin_next_arg (exp, false))
6683 return const0_rtx;
6684 return expand_builtin_next_arg ();
6685
6686 case BUILT_IN_CLEAR_CACHE:
6687 target = expand_builtin___clear_cache (exp);
6688 if (target)
6689 return target;
6690 break;
6691
6692 case BUILT_IN_CLASSIFY_TYPE:
6693 return expand_builtin_classify_type (exp);
6694
6695 case BUILT_IN_CONSTANT_P:
6696 return const0_rtx;
6697
6698 case BUILT_IN_FRAME_ADDRESS:
6699 case BUILT_IN_RETURN_ADDRESS:
6700 return expand_builtin_frame_address (fndecl, exp);
6701
6702 /* Returns the address of the area where the structure is returned.
6703 0 otherwise. */
6704 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6705 if (call_expr_nargs (exp) != 0
6706 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6707 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6708 return const0_rtx;
6709 else
6710 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6711
6712 case BUILT_IN_ALLOCA:
6713 case BUILT_IN_ALLOCA_WITH_ALIGN:
6714 target = expand_builtin_alloca (exp);
6715 if (target)
6716 return target;
6717 break;
6718
6719 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6720 return expand_asan_emit_allocas_unpoison (exp);
6721
6722 case BUILT_IN_STACK_SAVE:
6723 return expand_stack_save ();
6724
6725 case BUILT_IN_STACK_RESTORE:
6726 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6727 return const0_rtx;
6728
6729 case BUILT_IN_BSWAP16:
6730 case BUILT_IN_BSWAP32:
6731 case BUILT_IN_BSWAP64:
6732 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6733 if (target)
6734 return target;
6735 break;
6736
6737 CASE_INT_FN (BUILT_IN_FFS):
6738 target = expand_builtin_unop (target_mode, exp, target,
6739 subtarget, ffs_optab);
6740 if (target)
6741 return target;
6742 break;
6743
6744 CASE_INT_FN (BUILT_IN_CLZ):
6745 target = expand_builtin_unop (target_mode, exp, target,
6746 subtarget, clz_optab);
6747 if (target)
6748 return target;
6749 break;
6750
6751 CASE_INT_FN (BUILT_IN_CTZ):
6752 target = expand_builtin_unop (target_mode, exp, target,
6753 subtarget, ctz_optab);
6754 if (target)
6755 return target;
6756 break;
6757
6758 CASE_INT_FN (BUILT_IN_CLRSB):
6759 target = expand_builtin_unop (target_mode, exp, target,
6760 subtarget, clrsb_optab);
6761 if (target)
6762 return target;
6763 break;
6764
6765 CASE_INT_FN (BUILT_IN_POPCOUNT):
6766 target = expand_builtin_unop (target_mode, exp, target,
6767 subtarget, popcount_optab);
6768 if (target)
6769 return target;
6770 break;
6771
6772 CASE_INT_FN (BUILT_IN_PARITY):
6773 target = expand_builtin_unop (target_mode, exp, target,
6774 subtarget, parity_optab);
6775 if (target)
6776 return target;
6777 break;
6778
6779 case BUILT_IN_STRLEN:
6780 target = expand_builtin_strlen (exp, target, target_mode);
6781 if (target)
6782 return target;
6783 break;
6784
6785 case BUILT_IN_STRCAT:
6786 target = expand_builtin_strcat (exp, target);
6787 if (target)
6788 return target;
6789 break;
6790
6791 case BUILT_IN_STRCPY:
6792 target = expand_builtin_strcpy (exp, target);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_STRNCAT:
6798 target = expand_builtin_strncat (exp, target);
6799 if (target)
6800 return target;
6801 break;
6802
6803 case BUILT_IN_STRNCPY:
6804 target = expand_builtin_strncpy (exp, target);
6805 if (target)
6806 return target;
6807 break;
6808
6809 case BUILT_IN_STPCPY:
6810 target = expand_builtin_stpcpy (exp, target, mode);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_STPNCPY:
6816 target = expand_builtin_stpncpy (exp, target);
6817 if (target)
6818 return target;
6819 break;
6820
6821 case BUILT_IN_MEMCHR:
6822 target = expand_builtin_memchr (exp, target);
6823 if (target)
6824 return target;
6825 break;
6826
6827 case BUILT_IN_MEMCPY:
6828 target = expand_builtin_memcpy (exp, target);
6829 if (target)
6830 return target;
6831 break;
6832
6833 case BUILT_IN_MEMMOVE:
6834 target = expand_builtin_memmove (exp, target);
6835 if (target)
6836 return target;
6837 break;
6838
6839 case BUILT_IN_MEMPCPY:
6840 target = expand_builtin_mempcpy (exp, target);
6841 if (target)
6842 return target;
6843 break;
6844
6845 case BUILT_IN_MEMSET:
6846 target = expand_builtin_memset (exp, target, mode);
6847 if (target)
6848 return target;
6849 break;
6850
6851 case BUILT_IN_BZERO:
6852 target = expand_builtin_bzero (exp);
6853 if (target)
6854 return target;
6855 break;
6856
6857 case BUILT_IN_STRCMP:
6858 target = expand_builtin_strcmp (exp, target);
6859 if (target)
6860 return target;
6861 break;
6862
6863 case BUILT_IN_STRNCMP:
6864 target = expand_builtin_strncmp (exp, target, mode);
6865 if (target)
6866 return target;
6867 break;
6868
6869 case BUILT_IN_BCMP:
6870 case BUILT_IN_MEMCMP:
6871 case BUILT_IN_MEMCMP_EQ:
6872 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6873 if (target)
6874 return target;
6875 if (fcode == BUILT_IN_MEMCMP_EQ)
6876 {
6877 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6878 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6879 }
6880 break;
6881
6882 case BUILT_IN_SETJMP:
6883 /* This should have been lowered to the builtins below. */
6884 gcc_unreachable ();
6885
6886 case BUILT_IN_SETJMP_SETUP:
6887 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6888 and the receiver label. */
6889 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6890 {
6891 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6892 VOIDmode, EXPAND_NORMAL);
6893 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6894 rtx_insn *label_r = label_rtx (label);
6895
6896 /* This is copied from the handling of non-local gotos. */
6897 expand_builtin_setjmp_setup (buf_addr, label_r);
6898 nonlocal_goto_handler_labels
6899 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6900 nonlocal_goto_handler_labels);
6901 /* ??? Do not let expand_label treat us as such since we would
6902 not want to be both on the list of non-local labels and on
6903 the list of forced labels. */
6904 FORCED_LABEL (label) = 0;
6905 return const0_rtx;
6906 }
6907 break;
6908
6909 case BUILT_IN_SETJMP_RECEIVER:
6910 /* __builtin_setjmp_receiver is passed the receiver label. */
6911 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6912 {
6913 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6914 rtx_insn *label_r = label_rtx (label);
6915
6916 expand_builtin_setjmp_receiver (label_r);
6917 return const0_rtx;
6918 }
6919 break;
6920
6921 /* __builtin_longjmp is passed a pointer to an array of five words.
6922 It's similar to the C library longjmp function but works with
6923 __builtin_setjmp above. */
6924 case BUILT_IN_LONGJMP:
6925 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6926 {
6927 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6928 VOIDmode, EXPAND_NORMAL);
6929 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6930
6931 if (value != const1_rtx)
6932 {
6933 error ("%<__builtin_longjmp%> second argument must be 1");
6934 return const0_rtx;
6935 }
6936
6937 expand_builtin_longjmp (buf_addr, value);
6938 return const0_rtx;
6939 }
6940 break;
6941
6942 case BUILT_IN_NONLOCAL_GOTO:
6943 target = expand_builtin_nonlocal_goto (exp);
6944 if (target)
6945 return target;
6946 break;
6947
6948 /* This updates the setjmp buffer that is its argument with the value
6949 of the current stack pointer. */
6950 case BUILT_IN_UPDATE_SETJMP_BUF:
6951 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6952 {
6953 rtx buf_addr
6954 = expand_normal (CALL_EXPR_ARG (exp, 0));
6955
6956 expand_builtin_update_setjmp_buf (buf_addr);
6957 return const0_rtx;
6958 }
6959 break;
6960
6961 case BUILT_IN_TRAP:
6962 expand_builtin_trap ();
6963 return const0_rtx;
6964
6965 case BUILT_IN_UNREACHABLE:
6966 expand_builtin_unreachable ();
6967 return const0_rtx;
6968
6969 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6970 case BUILT_IN_SIGNBITD32:
6971 case BUILT_IN_SIGNBITD64:
6972 case BUILT_IN_SIGNBITD128:
6973 target = expand_builtin_signbit (exp, target);
6974 if (target)
6975 return target;
6976 break;
6977
6978 /* Various hooks for the DWARF 2 __throw routine. */
6979 case BUILT_IN_UNWIND_INIT:
6980 expand_builtin_unwind_init ();
6981 return const0_rtx;
6982 case BUILT_IN_DWARF_CFA:
6983 return virtual_cfa_rtx;
6984 #ifdef DWARF2_UNWIND_INFO
6985 case BUILT_IN_DWARF_SP_COLUMN:
6986 return expand_builtin_dwarf_sp_column ();
6987 case BUILT_IN_INIT_DWARF_REG_SIZES:
6988 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6989 return const0_rtx;
6990 #endif
6991 case BUILT_IN_FROB_RETURN_ADDR:
6992 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6993 case BUILT_IN_EXTRACT_RETURN_ADDR:
6994 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6995 case BUILT_IN_EH_RETURN:
6996 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6997 CALL_EXPR_ARG (exp, 1));
6998 return const0_rtx;
6999 case BUILT_IN_EH_RETURN_DATA_REGNO:
7000 return expand_builtin_eh_return_data_regno (exp);
7001 case BUILT_IN_EXTEND_POINTER:
7002 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7003 case BUILT_IN_EH_POINTER:
7004 return expand_builtin_eh_pointer (exp);
7005 case BUILT_IN_EH_FILTER:
7006 return expand_builtin_eh_filter (exp);
7007 case BUILT_IN_EH_COPY_VALUES:
7008 return expand_builtin_eh_copy_values (exp);
7009
7010 case BUILT_IN_VA_START:
7011 return expand_builtin_va_start (exp);
7012 case BUILT_IN_VA_END:
7013 return expand_builtin_va_end (exp);
7014 case BUILT_IN_VA_COPY:
7015 return expand_builtin_va_copy (exp);
7016 case BUILT_IN_EXPECT:
7017 return expand_builtin_expect (exp, target);
7018 case BUILT_IN_ASSUME_ALIGNED:
7019 return expand_builtin_assume_aligned (exp, target);
7020 case BUILT_IN_PREFETCH:
7021 expand_builtin_prefetch (exp);
7022 return const0_rtx;
7023
7024 case BUILT_IN_INIT_TRAMPOLINE:
7025 return expand_builtin_init_trampoline (exp, true);
7026 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7027 return expand_builtin_init_trampoline (exp, false);
7028 case BUILT_IN_ADJUST_TRAMPOLINE:
7029 return expand_builtin_adjust_trampoline (exp);
7030
7031 case BUILT_IN_INIT_DESCRIPTOR:
7032 return expand_builtin_init_descriptor (exp);
7033 case BUILT_IN_ADJUST_DESCRIPTOR:
7034 return expand_builtin_adjust_descriptor (exp);
7035
7036 case BUILT_IN_FORK:
7037 case BUILT_IN_EXECL:
7038 case BUILT_IN_EXECV:
7039 case BUILT_IN_EXECLP:
7040 case BUILT_IN_EXECLE:
7041 case BUILT_IN_EXECVP:
7042 case BUILT_IN_EXECVE:
7043 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7044 if (target)
7045 return target;
7046 break;
7047
7048 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7049 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7050 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7051 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7052 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7053 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7054 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7055 if (target)
7056 return target;
7057 break;
7058
7059 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7060 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7061 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7062 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7063 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7064 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7065 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7066 if (target)
7067 return target;
7068 break;
7069
7070 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7071 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7072 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7073 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7074 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7075 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7076 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7077 if (target)
7078 return target;
7079 break;
7080
7081 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7082 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7083 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7084 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7085 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7086 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7087 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7088 if (target)
7089 return target;
7090 break;
7091
7092 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7093 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7094 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7095 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7096 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7097 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7098 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7099 if (target)
7100 return target;
7101 break;
7102
7103 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7104 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7105 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7106 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7107 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7108 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7109 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7110 if (target)
7111 return target;
7112 break;
7113
7114 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7115 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7116 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7117 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7118 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7119 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7120 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7121 if (target)
7122 return target;
7123 break;
7124
7125 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7126 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7127 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7128 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7129 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7130 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7131 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7132 if (target)
7133 return target;
7134 break;
7135
7136 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7137 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7138 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7139 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7140 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7141 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7142 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7143 if (target)
7144 return target;
7145 break;
7146
7147 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7148 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7149 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7150 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7151 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7152 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7153 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7154 if (target)
7155 return target;
7156 break;
7157
7158 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7159 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7160 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7161 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7162 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7163 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7164 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7165 if (target)
7166 return target;
7167 break;
7168
7169 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7170 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7171 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7172 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7173 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7174 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7175 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7176 if (target)
7177 return target;
7178 break;
7179
7180 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7181 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7182 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7183 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7184 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7185 if (mode == VOIDmode)
7186 mode = TYPE_MODE (boolean_type_node);
7187 if (!target || !register_operand (target, mode))
7188 target = gen_reg_rtx (mode);
7189
7190 mode = get_builtin_sync_mode
7191 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7192 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7193 if (target)
7194 return target;
7195 break;
7196
7197 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7198 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7199 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7200 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7201 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7202 mode = get_builtin_sync_mode
7203 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7204 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7205 if (target)
7206 return target;
7207 break;
7208
7209 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7210 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7211 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7212 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7213 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7214 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7215 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7216 if (target)
7217 return target;
7218 break;
7219
7220 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7221 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7222 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7223 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7224 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7225 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7226 expand_builtin_sync_lock_release (mode, exp);
7227 return const0_rtx;
7228
7229 case BUILT_IN_SYNC_SYNCHRONIZE:
7230 expand_builtin_sync_synchronize ();
7231 return const0_rtx;
7232
7233 case BUILT_IN_ATOMIC_EXCHANGE_1:
7234 case BUILT_IN_ATOMIC_EXCHANGE_2:
7235 case BUILT_IN_ATOMIC_EXCHANGE_4:
7236 case BUILT_IN_ATOMIC_EXCHANGE_8:
7237 case BUILT_IN_ATOMIC_EXCHANGE_16:
7238 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7239 target = expand_builtin_atomic_exchange (mode, exp, target);
7240 if (target)
7241 return target;
7242 break;
7243
7244 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7245 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7246 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7247 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7248 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7249 {
7250 unsigned int nargs, z;
7251 vec<tree, va_gc> *vec;
7252
7253 mode =
7254 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7255 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7256 if (target)
7257 return target;
7258
7259 /* If this is turned into an external library call, the weak parameter
7260 must be dropped to match the expected parameter list. */
7261 nargs = call_expr_nargs (exp);
7262 vec_alloc (vec, nargs - 1);
7263 for (z = 0; z < 3; z++)
7264 vec->quick_push (CALL_EXPR_ARG (exp, z));
7265 /* Skip the boolean weak parameter. */
7266 for (z = 4; z < 6; z++)
7267 vec->quick_push (CALL_EXPR_ARG (exp, z));
7268 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7269 break;
7270 }
7271
7272 case BUILT_IN_ATOMIC_LOAD_1:
7273 case BUILT_IN_ATOMIC_LOAD_2:
7274 case BUILT_IN_ATOMIC_LOAD_4:
7275 case BUILT_IN_ATOMIC_LOAD_8:
7276 case BUILT_IN_ATOMIC_LOAD_16:
7277 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7278 target = expand_builtin_atomic_load (mode, exp, target);
7279 if (target)
7280 return target;
7281 break;
7282
7283 case BUILT_IN_ATOMIC_STORE_1:
7284 case BUILT_IN_ATOMIC_STORE_2:
7285 case BUILT_IN_ATOMIC_STORE_4:
7286 case BUILT_IN_ATOMIC_STORE_8:
7287 case BUILT_IN_ATOMIC_STORE_16:
7288 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7289 target = expand_builtin_atomic_store (mode, exp);
7290 if (target)
7291 return const0_rtx;
7292 break;
7293
7294 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7295 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7296 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7297 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7298 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7299 {
7300 enum built_in_function lib;
7301 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7302 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7303 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7304 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7305 ignore, lib);
7306 if (target)
7307 return target;
7308 break;
7309 }
7310 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7311 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7312 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7313 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7314 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7315 {
7316 enum built_in_function lib;
7317 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7318 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7319 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7320 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7321 ignore, lib);
7322 if (target)
7323 return target;
7324 break;
7325 }
7326 case BUILT_IN_ATOMIC_AND_FETCH_1:
7327 case BUILT_IN_ATOMIC_AND_FETCH_2:
7328 case BUILT_IN_ATOMIC_AND_FETCH_4:
7329 case BUILT_IN_ATOMIC_AND_FETCH_8:
7330 case BUILT_IN_ATOMIC_AND_FETCH_16:
7331 {
7332 enum built_in_function lib;
7333 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7334 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7335 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7336 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7337 ignore, lib);
7338 if (target)
7339 return target;
7340 break;
7341 }
7342 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7343 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7344 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7345 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7346 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7347 {
7348 enum built_in_function lib;
7349 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7350 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7351 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7352 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7353 ignore, lib);
7354 if (target)
7355 return target;
7356 break;
7357 }
7358 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7359 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7360 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7361 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7362 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7363 {
7364 enum built_in_function lib;
7365 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7366 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7367 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7368 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7369 ignore, lib);
7370 if (target)
7371 return target;
7372 break;
7373 }
7374 case BUILT_IN_ATOMIC_OR_FETCH_1:
7375 case BUILT_IN_ATOMIC_OR_FETCH_2:
7376 case BUILT_IN_ATOMIC_OR_FETCH_4:
7377 case BUILT_IN_ATOMIC_OR_FETCH_8:
7378 case BUILT_IN_ATOMIC_OR_FETCH_16:
7379 {
7380 enum built_in_function lib;
7381 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7382 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7383 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7384 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7385 ignore, lib);
7386 if (target)
7387 return target;
7388 break;
7389 }
7390 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7391 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7392 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7393 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7394 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7395 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7396 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7397 ignore, BUILT_IN_NONE);
7398 if (target)
7399 return target;
7400 break;
7401
7402 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7403 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7404 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7405 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7406 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7407 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7408 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7409 ignore, BUILT_IN_NONE);
7410 if (target)
7411 return target;
7412 break;
7413
7414 case BUILT_IN_ATOMIC_FETCH_AND_1:
7415 case BUILT_IN_ATOMIC_FETCH_AND_2:
7416 case BUILT_IN_ATOMIC_FETCH_AND_4:
7417 case BUILT_IN_ATOMIC_FETCH_AND_8:
7418 case BUILT_IN_ATOMIC_FETCH_AND_16:
7419 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7420 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7421 ignore, BUILT_IN_NONE);
7422 if (target)
7423 return target;
7424 break;
7425
7426 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7427 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7428 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7429 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7430 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7431 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7432 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7433 ignore, BUILT_IN_NONE);
7434 if (target)
7435 return target;
7436 break;
7437
7438 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7439 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7440 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7441 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7442 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7443 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7444 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7445 ignore, BUILT_IN_NONE);
7446 if (target)
7447 return target;
7448 break;
7449
7450 case BUILT_IN_ATOMIC_FETCH_OR_1:
7451 case BUILT_IN_ATOMIC_FETCH_OR_2:
7452 case BUILT_IN_ATOMIC_FETCH_OR_4:
7453 case BUILT_IN_ATOMIC_FETCH_OR_8:
7454 case BUILT_IN_ATOMIC_FETCH_OR_16:
7455 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7456 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7457 ignore, BUILT_IN_NONE);
7458 if (target)
7459 return target;
7460 break;
7461
7462 case BUILT_IN_ATOMIC_TEST_AND_SET:
7463 return expand_builtin_atomic_test_and_set (exp, target);
7464
7465 case BUILT_IN_ATOMIC_CLEAR:
7466 return expand_builtin_atomic_clear (exp);
7467
7468 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7469 return expand_builtin_atomic_always_lock_free (exp);
7470
7471 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7472 target = expand_builtin_atomic_is_lock_free (exp);
7473 if (target)
7474 return target;
7475 break;
7476
7477 case BUILT_IN_ATOMIC_THREAD_FENCE:
7478 expand_builtin_atomic_thread_fence (exp);
7479 return const0_rtx;
7480
7481 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7482 expand_builtin_atomic_signal_fence (exp);
7483 return const0_rtx;
7484
7485 case BUILT_IN_OBJECT_SIZE:
7486 return expand_builtin_object_size (exp);
7487
7488 case BUILT_IN_MEMCPY_CHK:
7489 case BUILT_IN_MEMPCPY_CHK:
7490 case BUILT_IN_MEMMOVE_CHK:
7491 case BUILT_IN_MEMSET_CHK:
7492 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7493 if (target)
7494 return target;
7495 break;
7496
7497 case BUILT_IN_STRCPY_CHK:
7498 case BUILT_IN_STPCPY_CHK:
7499 case BUILT_IN_STRNCPY_CHK:
7500 case BUILT_IN_STPNCPY_CHK:
7501 case BUILT_IN_STRCAT_CHK:
7502 case BUILT_IN_STRNCAT_CHK:
7503 case BUILT_IN_SNPRINTF_CHK:
7504 case BUILT_IN_VSNPRINTF_CHK:
7505 maybe_emit_chk_warning (exp, fcode);
7506 break;
7507
7508 case BUILT_IN_SPRINTF_CHK:
7509 case BUILT_IN_VSPRINTF_CHK:
7510 maybe_emit_sprintf_chk_warning (exp, fcode);
7511 break;
7512
7513 case BUILT_IN_FREE:
7514 if (warn_free_nonheap_object)
7515 maybe_emit_free_warning (exp);
7516 break;
7517
7518 case BUILT_IN_THREAD_POINTER:
7519 return expand_builtin_thread_pointer (exp, target);
7520
7521 case BUILT_IN_SET_THREAD_POINTER:
7522 expand_builtin_set_thread_pointer (exp);
7523 return const0_rtx;
7524
7525 case BUILT_IN_CILK_DETACH:
7526 expand_builtin_cilk_detach (exp);
7527 return const0_rtx;
7528
7529 case BUILT_IN_CILK_POP_FRAME:
7530 expand_builtin_cilk_pop_frame (exp);
7531 return const0_rtx;
7532
7533 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7534 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7535 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7536 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7537 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7538 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7539 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7540 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7541 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7542 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7543 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7544 /* We allow user CHKP builtins if Pointer Bounds
7545 Checker is off. */
7546 if (!chkp_function_instrumented_p (current_function_decl))
7547 {
7548 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7549 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7550 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7551 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7552 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7553 return expand_normal (CALL_EXPR_ARG (exp, 0));
7554 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7555 return expand_normal (size_zero_node);
7556 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7557 return expand_normal (size_int (-1));
7558 else
7559 return const0_rtx;
7560 }
7561 /* FALLTHROUGH */
7562
7563 case BUILT_IN_CHKP_BNDMK:
7564 case BUILT_IN_CHKP_BNDSTX:
7565 case BUILT_IN_CHKP_BNDCL:
7566 case BUILT_IN_CHKP_BNDCU:
7567 case BUILT_IN_CHKP_BNDLDX:
7568 case BUILT_IN_CHKP_BNDRET:
7569 case BUILT_IN_CHKP_INTERSECT:
7570 case BUILT_IN_CHKP_NARROW:
7571 case BUILT_IN_CHKP_EXTRACT_LOWER:
7572 case BUILT_IN_CHKP_EXTRACT_UPPER:
7573 /* Software implementation of Pointer Bounds Checker is NYI.
7574 Target support is required. */
7575 error ("Your target platform does not support -fcheck-pointer-bounds");
7576 break;
7577
7578 case BUILT_IN_ACC_ON_DEVICE:
7579 /* Do library call, if we failed to expand the builtin when
7580 folding. */
7581 break;
7582
7583 default: /* just do library call, if unknown builtin */
7584 break;
7585 }
7586
7587 /* The switch statement above can drop through to cause the function
7588 to be called normally. */
7589 return expand_call (exp, target, ignore);
7590 }
7591
7592 /* Similar to expand_builtin but is used for instrumented calls. */
7593
7594 rtx
7595 expand_builtin_with_bounds (tree exp, rtx target,
7596 rtx subtarget ATTRIBUTE_UNUSED,
7597 machine_mode mode, int ignore)
7598 {
7599 tree fndecl = get_callee_fndecl (exp);
7600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7601
7602 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7603
7604 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7605 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7606
7607 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7608 && fcode < END_CHKP_BUILTINS);
7609
7610 switch (fcode)
7611 {
7612 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7613 target = expand_builtin_memcpy_with_bounds (exp, target);
7614 if (target)
7615 return target;
7616 break;
7617
7618 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7619 target = expand_builtin_mempcpy_with_bounds (exp, target);
7620 if (target)
7621 return target;
7622 break;
7623
7624 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7625 target = expand_builtin_memset_with_bounds (exp, target, mode);
7626 if (target)
7627 return target;
7628 break;
7629
7630 default:
7631 break;
7632 }
7633
7634 /* The switch statement above can drop through to cause the function
7635 to be called normally. */
7636 return expand_call (exp, target, ignore);
7637 }
7638
7639 /* Determine whether a tree node represents a call to a built-in
7640 function. If the tree T is a call to a built-in function with
7641 the right number of arguments of the appropriate types, return
7642 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7643 Otherwise the return value is END_BUILTINS. */
7644
7645 enum built_in_function
7646 builtin_mathfn_code (const_tree t)
7647 {
7648 const_tree fndecl, arg, parmlist;
7649 const_tree argtype, parmtype;
7650 const_call_expr_arg_iterator iter;
7651
7652 if (TREE_CODE (t) != CALL_EXPR
7653 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7654 return END_BUILTINS;
7655
7656 fndecl = get_callee_fndecl (t);
7657 if (fndecl == NULL_TREE
7658 || TREE_CODE (fndecl) != FUNCTION_DECL
7659 || ! DECL_BUILT_IN (fndecl)
7660 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7661 return END_BUILTINS;
7662
7663 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7664 init_const_call_expr_arg_iterator (t, &iter);
7665 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7666 {
7667 /* If a function doesn't take a variable number of arguments,
7668 the last element in the list will have type `void'. */
7669 parmtype = TREE_VALUE (parmlist);
7670 if (VOID_TYPE_P (parmtype))
7671 {
7672 if (more_const_call_expr_args_p (&iter))
7673 return END_BUILTINS;
7674 return DECL_FUNCTION_CODE (fndecl);
7675 }
7676
7677 if (! more_const_call_expr_args_p (&iter))
7678 return END_BUILTINS;
7679
7680 arg = next_const_call_expr_arg (&iter);
7681 argtype = TREE_TYPE (arg);
7682
7683 if (SCALAR_FLOAT_TYPE_P (parmtype))
7684 {
7685 if (! SCALAR_FLOAT_TYPE_P (argtype))
7686 return END_BUILTINS;
7687 }
7688 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7689 {
7690 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7691 return END_BUILTINS;
7692 }
7693 else if (POINTER_TYPE_P (parmtype))
7694 {
7695 if (! POINTER_TYPE_P (argtype))
7696 return END_BUILTINS;
7697 }
7698 else if (INTEGRAL_TYPE_P (parmtype))
7699 {
7700 if (! INTEGRAL_TYPE_P (argtype))
7701 return END_BUILTINS;
7702 }
7703 else
7704 return END_BUILTINS;
7705 }
7706
7707 /* Variable-length argument list. */
7708 return DECL_FUNCTION_CODE (fndecl);
7709 }
7710
7711 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7712 evaluate to a constant. */
7713
7714 static tree
7715 fold_builtin_constant_p (tree arg)
7716 {
7717 /* We return 1 for a numeric type that's known to be a constant
7718 value at compile-time or for an aggregate type that's a
7719 literal constant. */
7720 STRIP_NOPS (arg);
7721
7722 /* If we know this is a constant, emit the constant of one. */
7723 if (CONSTANT_CLASS_P (arg)
7724 || (TREE_CODE (arg) == CONSTRUCTOR
7725 && TREE_CONSTANT (arg)))
7726 return integer_one_node;
7727 if (TREE_CODE (arg) == ADDR_EXPR)
7728 {
7729 tree op = TREE_OPERAND (arg, 0);
7730 if (TREE_CODE (op) == STRING_CST
7731 || (TREE_CODE (op) == ARRAY_REF
7732 && integer_zerop (TREE_OPERAND (op, 1))
7733 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7734 return integer_one_node;
7735 }
7736
7737 /* If this expression has side effects, show we don't know it to be a
7738 constant. Likewise if it's a pointer or aggregate type since in
7739 those case we only want literals, since those are only optimized
7740 when generating RTL, not later.
7741 And finally, if we are compiling an initializer, not code, we
7742 need to return a definite result now; there's not going to be any
7743 more optimization done. */
7744 if (TREE_SIDE_EFFECTS (arg)
7745 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7746 || POINTER_TYPE_P (TREE_TYPE (arg))
7747 || cfun == 0
7748 || folding_initializer
7749 || force_folding_builtin_constant_p)
7750 return integer_zero_node;
7751
7752 return NULL_TREE;
7753 }
7754
7755 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7756 return it as a truthvalue. */
7757
7758 static tree
7759 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7760 tree predictor)
7761 {
7762 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7763
7764 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7765 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7766 ret_type = TREE_TYPE (TREE_TYPE (fn));
7767 pred_type = TREE_VALUE (arg_types);
7768 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7769
7770 pred = fold_convert_loc (loc, pred_type, pred);
7771 expected = fold_convert_loc (loc, expected_type, expected);
7772 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7773 predictor);
7774
7775 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7776 build_int_cst (ret_type, 0));
7777 }
7778
7779 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7780 NULL_TREE if no simplification is possible. */
7781
7782 tree
7783 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7784 {
7785 tree inner, fndecl, inner_arg0;
7786 enum tree_code code;
7787
7788 /* Distribute the expected value over short-circuiting operators.
7789 See through the cast from truthvalue_type_node to long. */
7790 inner_arg0 = arg0;
7791 while (CONVERT_EXPR_P (inner_arg0)
7792 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7793 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7794 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7795
7796 /* If this is a builtin_expect within a builtin_expect keep the
7797 inner one. See through a comparison against a constant. It
7798 might have been added to create a thruthvalue. */
7799 inner = inner_arg0;
7800
7801 if (COMPARISON_CLASS_P (inner)
7802 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7803 inner = TREE_OPERAND (inner, 0);
7804
7805 if (TREE_CODE (inner) == CALL_EXPR
7806 && (fndecl = get_callee_fndecl (inner))
7807 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7808 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7809 return arg0;
7810
7811 inner = inner_arg0;
7812 code = TREE_CODE (inner);
7813 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7814 {
7815 tree op0 = TREE_OPERAND (inner, 0);
7816 tree op1 = TREE_OPERAND (inner, 1);
7817
7818 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7819 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7820 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7821
7822 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7823 }
7824
7825 /* If the argument isn't invariant then there's nothing else we can do. */
7826 if (!TREE_CONSTANT (inner_arg0))
7827 return NULL_TREE;
7828
7829 /* If we expect that a comparison against the argument will fold to
7830 a constant return the constant. In practice, this means a true
7831 constant or the address of a non-weak symbol. */
7832 inner = inner_arg0;
7833 STRIP_NOPS (inner);
7834 if (TREE_CODE (inner) == ADDR_EXPR)
7835 {
7836 do
7837 {
7838 inner = TREE_OPERAND (inner, 0);
7839 }
7840 while (TREE_CODE (inner) == COMPONENT_REF
7841 || TREE_CODE (inner) == ARRAY_REF);
7842 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7843 return NULL_TREE;
7844 }
7845
7846 /* Otherwise, ARG0 already has the proper type for the return value. */
7847 return arg0;
7848 }
7849
7850 /* Fold a call to __builtin_classify_type with argument ARG. */
7851
7852 static tree
7853 fold_builtin_classify_type (tree arg)
7854 {
7855 if (arg == 0)
7856 return build_int_cst (integer_type_node, no_type_class);
7857
7858 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7859 }
7860
7861 /* Fold a call to __builtin_strlen with argument ARG. */
7862
7863 static tree
7864 fold_builtin_strlen (location_t loc, tree type, tree arg)
7865 {
7866 if (!validate_arg (arg, POINTER_TYPE))
7867 return NULL_TREE;
7868 else
7869 {
7870 tree len = c_strlen (arg, 0);
7871
7872 if (len)
7873 return fold_convert_loc (loc, type, len);
7874
7875 return NULL_TREE;
7876 }
7877 }
7878
7879 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7880
7881 static tree
7882 fold_builtin_inf (location_t loc, tree type, int warn)
7883 {
7884 REAL_VALUE_TYPE real;
7885
7886 /* __builtin_inff is intended to be usable to define INFINITY on all
7887 targets. If an infinity is not available, INFINITY expands "to a
7888 positive constant of type float that overflows at translation
7889 time", footnote "In this case, using INFINITY will violate the
7890 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7891 Thus we pedwarn to ensure this constraint violation is
7892 diagnosed. */
7893 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7894 pedwarn (loc, 0, "target format does not support infinity");
7895
7896 real_inf (&real);
7897 return build_real (type, real);
7898 }
7899
7900 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7901 NULL_TREE if no simplification can be made. */
7902
7903 static tree
7904 fold_builtin_sincos (location_t loc,
7905 tree arg0, tree arg1, tree arg2)
7906 {
7907 tree type;
7908 tree fndecl, call = NULL_TREE;
7909
7910 if (!validate_arg (arg0, REAL_TYPE)
7911 || !validate_arg (arg1, POINTER_TYPE)
7912 || !validate_arg (arg2, POINTER_TYPE))
7913 return NULL_TREE;
7914
7915 type = TREE_TYPE (arg0);
7916
7917 /* Calculate the result when the argument is a constant. */
7918 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7919 if (fn == END_BUILTINS)
7920 return NULL_TREE;
7921
7922 /* Canonicalize sincos to cexpi. */
7923 if (TREE_CODE (arg0) == REAL_CST)
7924 {
7925 tree complex_type = build_complex_type (type);
7926 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7927 }
7928 if (!call)
7929 {
7930 if (!targetm.libc_has_function (function_c99_math_complex)
7931 || !builtin_decl_implicit_p (fn))
7932 return NULL_TREE;
7933 fndecl = builtin_decl_explicit (fn);
7934 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7935 call = builtin_save_expr (call);
7936 }
7937
7938 return build2 (COMPOUND_EXPR, void_type_node,
7939 build2 (MODIFY_EXPR, void_type_node,
7940 build_fold_indirect_ref_loc (loc, arg1),
7941 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7942 build2 (MODIFY_EXPR, void_type_node,
7943 build_fold_indirect_ref_loc (loc, arg2),
7944 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7945 }
7946
7947 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7948 Return NULL_TREE if no simplification can be made. */
7949
7950 static tree
7951 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7952 {
7953 if (!validate_arg (arg1, POINTER_TYPE)
7954 || !validate_arg (arg2, POINTER_TYPE)
7955 || !validate_arg (len, INTEGER_TYPE))
7956 return NULL_TREE;
7957
7958 /* If the LEN parameter is zero, return zero. */
7959 if (integer_zerop (len))
7960 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7961 arg1, arg2);
7962
7963 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7964 if (operand_equal_p (arg1, arg2, 0))
7965 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7966
7967 /* If len parameter is one, return an expression corresponding to
7968 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7969 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7970 {
7971 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7972 tree cst_uchar_ptr_node
7973 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7974
7975 tree ind1
7976 = fold_convert_loc (loc, integer_type_node,
7977 build1 (INDIRECT_REF, cst_uchar_node,
7978 fold_convert_loc (loc,
7979 cst_uchar_ptr_node,
7980 arg1)));
7981 tree ind2
7982 = fold_convert_loc (loc, integer_type_node,
7983 build1 (INDIRECT_REF, cst_uchar_node,
7984 fold_convert_loc (loc,
7985 cst_uchar_ptr_node,
7986 arg2)));
7987 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7988 }
7989
7990 return NULL_TREE;
7991 }
7992
7993 /* Fold a call to builtin isascii with argument ARG. */
7994
7995 static tree
7996 fold_builtin_isascii (location_t loc, tree arg)
7997 {
7998 if (!validate_arg (arg, INTEGER_TYPE))
7999 return NULL_TREE;
8000 else
8001 {
8002 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8003 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8004 build_int_cst (integer_type_node,
8005 ~ (unsigned HOST_WIDE_INT) 0x7f));
8006 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8007 arg, integer_zero_node);
8008 }
8009 }
8010
8011 /* Fold a call to builtin toascii with argument ARG. */
8012
8013 static tree
8014 fold_builtin_toascii (location_t loc, tree arg)
8015 {
8016 if (!validate_arg (arg, INTEGER_TYPE))
8017 return NULL_TREE;
8018
8019 /* Transform toascii(c) -> (c & 0x7f). */
8020 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8021 build_int_cst (integer_type_node, 0x7f));
8022 }
8023
8024 /* Fold a call to builtin isdigit with argument ARG. */
8025
8026 static tree
8027 fold_builtin_isdigit (location_t loc, tree arg)
8028 {
8029 if (!validate_arg (arg, INTEGER_TYPE))
8030 return NULL_TREE;
8031 else
8032 {
8033 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8034 /* According to the C standard, isdigit is unaffected by locale.
8035 However, it definitely is affected by the target character set. */
8036 unsigned HOST_WIDE_INT target_digit0
8037 = lang_hooks.to_target_charset ('0');
8038
8039 if (target_digit0 == 0)
8040 return NULL_TREE;
8041
8042 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8043 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8044 build_int_cst (unsigned_type_node, target_digit0));
8045 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8046 build_int_cst (unsigned_type_node, 9));
8047 }
8048 }
8049
8050 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8051
8052 static tree
8053 fold_builtin_fabs (location_t loc, tree arg, tree type)
8054 {
8055 if (!validate_arg (arg, REAL_TYPE))
8056 return NULL_TREE;
8057
8058 arg = fold_convert_loc (loc, type, arg);
8059 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8060 }
8061
8062 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8063
8064 static tree
8065 fold_builtin_abs (location_t loc, tree arg, tree type)
8066 {
8067 if (!validate_arg (arg, INTEGER_TYPE))
8068 return NULL_TREE;
8069
8070 arg = fold_convert_loc (loc, type, arg);
8071 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8072 }
8073
8074 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8075
8076 static tree
8077 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8078 {
8079 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8080 if (validate_arg (arg0, REAL_TYPE)
8081 && validate_arg (arg1, REAL_TYPE)
8082 && validate_arg (arg2, REAL_TYPE)
8083 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8084 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8085
8086 return NULL_TREE;
8087 }
8088
8089 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8090
8091 static tree
8092 fold_builtin_carg (location_t loc, tree arg, tree type)
8093 {
8094 if (validate_arg (arg, COMPLEX_TYPE)
8095 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8096 {
8097 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8098
8099 if (atan2_fn)
8100 {
8101 tree new_arg = builtin_save_expr (arg);
8102 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8103 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8104 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8105 }
8106 }
8107
8108 return NULL_TREE;
8109 }
8110
8111 /* Fold a call to builtin frexp, we can assume the base is 2. */
8112
8113 static tree
8114 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8115 {
8116 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8117 return NULL_TREE;
8118
8119 STRIP_NOPS (arg0);
8120
8121 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8122 return NULL_TREE;
8123
8124 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8125
8126 /* Proceed if a valid pointer type was passed in. */
8127 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8128 {
8129 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8130 tree frac, exp;
8131
8132 switch (value->cl)
8133 {
8134 case rvc_zero:
8135 /* For +-0, return (*exp = 0, +-0). */
8136 exp = integer_zero_node;
8137 frac = arg0;
8138 break;
8139 case rvc_nan:
8140 case rvc_inf:
8141 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8142 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8143 case rvc_normal:
8144 {
8145 /* Since the frexp function always expects base 2, and in
8146 GCC normalized significands are already in the range
8147 [0.5, 1.0), we have exactly what frexp wants. */
8148 REAL_VALUE_TYPE frac_rvt = *value;
8149 SET_REAL_EXP (&frac_rvt, 0);
8150 frac = build_real (rettype, frac_rvt);
8151 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8152 }
8153 break;
8154 default:
8155 gcc_unreachable ();
8156 }
8157
8158 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8159 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8160 TREE_SIDE_EFFECTS (arg1) = 1;
8161 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8162 }
8163
8164 return NULL_TREE;
8165 }
8166
8167 /* Fold a call to builtin modf. */
8168
8169 static tree
8170 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8171 {
8172 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8173 return NULL_TREE;
8174
8175 STRIP_NOPS (arg0);
8176
8177 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8178 return NULL_TREE;
8179
8180 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8181
8182 /* Proceed if a valid pointer type was passed in. */
8183 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8184 {
8185 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8186 REAL_VALUE_TYPE trunc, frac;
8187
8188 switch (value->cl)
8189 {
8190 case rvc_nan:
8191 case rvc_zero:
8192 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8193 trunc = frac = *value;
8194 break;
8195 case rvc_inf:
8196 /* For +-Inf, return (*arg1 = arg0, +-0). */
8197 frac = dconst0;
8198 frac.sign = value->sign;
8199 trunc = *value;
8200 break;
8201 case rvc_normal:
8202 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8203 real_trunc (&trunc, VOIDmode, value);
8204 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8205 /* If the original number was negative and already
8206 integral, then the fractional part is -0.0. */
8207 if (value->sign && frac.cl == rvc_zero)
8208 frac.sign = value->sign;
8209 break;
8210 }
8211
8212 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8213 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8214 build_real (rettype, trunc));
8215 TREE_SIDE_EFFECTS (arg1) = 1;
8216 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8217 build_real (rettype, frac));
8218 }
8219
8220 return NULL_TREE;
8221 }
8222
8223 /* Given a location LOC, an interclass builtin function decl FNDECL
8224 and its single argument ARG, return an folded expression computing
8225 the same, or NULL_TREE if we either couldn't or didn't want to fold
8226 (the latter happen if there's an RTL instruction available). */
8227
8228 static tree
8229 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8230 {
8231 machine_mode mode;
8232
8233 if (!validate_arg (arg, REAL_TYPE))
8234 return NULL_TREE;
8235
8236 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8237 return NULL_TREE;
8238
8239 mode = TYPE_MODE (TREE_TYPE (arg));
8240
8241 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8242
8243 /* If there is no optab, try generic code. */
8244 switch (DECL_FUNCTION_CODE (fndecl))
8245 {
8246 tree result;
8247
8248 CASE_FLT_FN (BUILT_IN_ISINF):
8249 {
8250 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8251 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8252 tree type = TREE_TYPE (arg);
8253 REAL_VALUE_TYPE r;
8254 char buf[128];
8255
8256 if (is_ibm_extended)
8257 {
8258 /* NaN and Inf are encoded in the high-order double value
8259 only. The low-order value is not significant. */
8260 type = double_type_node;
8261 mode = DFmode;
8262 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8263 }
8264 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8265 real_from_string (&r, buf);
8266 result = build_call_expr (isgr_fn, 2,
8267 fold_build1_loc (loc, ABS_EXPR, type, arg),
8268 build_real (type, r));
8269 return result;
8270 }
8271 CASE_FLT_FN (BUILT_IN_FINITE):
8272 case BUILT_IN_ISFINITE:
8273 {
8274 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8275 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8276 tree type = TREE_TYPE (arg);
8277 REAL_VALUE_TYPE r;
8278 char buf[128];
8279
8280 if (is_ibm_extended)
8281 {
8282 /* NaN and Inf are encoded in the high-order double value
8283 only. The low-order value is not significant. */
8284 type = double_type_node;
8285 mode = DFmode;
8286 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8287 }
8288 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8289 real_from_string (&r, buf);
8290 result = build_call_expr (isle_fn, 2,
8291 fold_build1_loc (loc, ABS_EXPR, type, arg),
8292 build_real (type, r));
8293 /*result = fold_build2_loc (loc, UNGT_EXPR,
8294 TREE_TYPE (TREE_TYPE (fndecl)),
8295 fold_build1_loc (loc, ABS_EXPR, type, arg),
8296 build_real (type, r));
8297 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8298 TREE_TYPE (TREE_TYPE (fndecl)),
8299 result);*/
8300 return result;
8301 }
8302 case BUILT_IN_ISNORMAL:
8303 {
8304 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8305 islessequal(fabs(x),DBL_MAX). */
8306 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8307 tree type = TREE_TYPE (arg);
8308 tree orig_arg, max_exp, min_exp;
8309 machine_mode orig_mode = mode;
8310 REAL_VALUE_TYPE rmax, rmin;
8311 char buf[128];
8312
8313 orig_arg = arg = builtin_save_expr (arg);
8314 if (is_ibm_extended)
8315 {
8316 /* Use double to test the normal range of IBM extended
8317 precision. Emin for IBM extended precision is
8318 different to emin for IEEE double, being 53 higher
8319 since the low double exponent is at least 53 lower
8320 than the high double exponent. */
8321 type = double_type_node;
8322 mode = DFmode;
8323 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8324 }
8325 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8326
8327 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8328 real_from_string (&rmax, buf);
8329 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8330 real_from_string (&rmin, buf);
8331 max_exp = build_real (type, rmax);
8332 min_exp = build_real (type, rmin);
8333
8334 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8335 if (is_ibm_extended)
8336 {
8337 /* Testing the high end of the range is done just using
8338 the high double, using the same test as isfinite().
8339 For the subnormal end of the range we first test the
8340 high double, then if its magnitude is equal to the
8341 limit of 0x1p-969, we test whether the low double is
8342 non-zero and opposite sign to the high double. */
8343 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8344 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8345 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8346 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8347 arg, min_exp);
8348 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8349 complex_double_type_node, orig_arg);
8350 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8351 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8352 tree zero = build_real (type, dconst0);
8353 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8354 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8355 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8356 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8357 fold_build3 (COND_EXPR,
8358 integer_type_node,
8359 hilt, logt, lolt));
8360 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8361 eq_min, ok_lo);
8362 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8363 gt_min, eq_min);
8364 }
8365 else
8366 {
8367 tree const isge_fn
8368 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8369 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8370 }
8371 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8372 max_exp, min_exp);
8373 return result;
8374 }
8375 default:
8376 break;
8377 }
8378
8379 return NULL_TREE;
8380 }
8381
8382 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8383 ARG is the argument for the call. */
8384
8385 static tree
8386 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8387 {
8388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8389
8390 if (!validate_arg (arg, REAL_TYPE))
8391 return NULL_TREE;
8392
8393 switch (builtin_index)
8394 {
8395 case BUILT_IN_ISINF:
8396 if (!HONOR_INFINITIES (arg))
8397 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8398
8399 return NULL_TREE;
8400
8401 case BUILT_IN_ISINF_SIGN:
8402 {
8403 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8404 /* In a boolean context, GCC will fold the inner COND_EXPR to
8405 1. So e.g. "if (isinf_sign(x))" would be folded to just
8406 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8407 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8408 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8409 tree tmp = NULL_TREE;
8410
8411 arg = builtin_save_expr (arg);
8412
8413 if (signbit_fn && isinf_fn)
8414 {
8415 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8416 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8417
8418 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8419 signbit_call, integer_zero_node);
8420 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8421 isinf_call, integer_zero_node);
8422
8423 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8424 integer_minus_one_node, integer_one_node);
8425 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8426 isinf_call, tmp,
8427 integer_zero_node);
8428 }
8429
8430 return tmp;
8431 }
8432
8433 case BUILT_IN_ISFINITE:
8434 if (!HONOR_NANS (arg)
8435 && !HONOR_INFINITIES (arg))
8436 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8437
8438 return NULL_TREE;
8439
8440 case BUILT_IN_ISNAN:
8441 if (!HONOR_NANS (arg))
8442 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8443
8444 {
8445 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8446 if (is_ibm_extended)
8447 {
8448 /* NaN and Inf are encoded in the high-order double value
8449 only. The low-order value is not significant. */
8450 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8451 }
8452 }
8453 arg = builtin_save_expr (arg);
8454 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8455
8456 default:
8457 gcc_unreachable ();
8458 }
8459 }
8460
8461 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8462 This builtin will generate code to return the appropriate floating
8463 point classification depending on the value of the floating point
8464 number passed in. The possible return values must be supplied as
8465 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8466 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8467 one floating point argument which is "type generic". */
8468
8469 static tree
8470 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8471 {
8472 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8473 arg, type, res, tmp;
8474 machine_mode mode;
8475 REAL_VALUE_TYPE r;
8476 char buf[128];
8477
8478 /* Verify the required arguments in the original call. */
8479 if (nargs != 6
8480 || !validate_arg (args[0], INTEGER_TYPE)
8481 || !validate_arg (args[1], INTEGER_TYPE)
8482 || !validate_arg (args[2], INTEGER_TYPE)
8483 || !validate_arg (args[3], INTEGER_TYPE)
8484 || !validate_arg (args[4], INTEGER_TYPE)
8485 || !validate_arg (args[5], REAL_TYPE))
8486 return NULL_TREE;
8487
8488 fp_nan = args[0];
8489 fp_infinite = args[1];
8490 fp_normal = args[2];
8491 fp_subnormal = args[3];
8492 fp_zero = args[4];
8493 arg = args[5];
8494 type = TREE_TYPE (arg);
8495 mode = TYPE_MODE (type);
8496 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8497
8498 /* fpclassify(x) ->
8499 isnan(x) ? FP_NAN :
8500 (fabs(x) == Inf ? FP_INFINITE :
8501 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8502 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8503
8504 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8505 build_real (type, dconst0));
8506 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8507 tmp, fp_zero, fp_subnormal);
8508
8509 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8510 real_from_string (&r, buf);
8511 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8512 arg, build_real (type, r));
8513 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8514
8515 if (HONOR_INFINITIES (mode))
8516 {
8517 real_inf (&r);
8518 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8519 build_real (type, r));
8520 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8521 fp_infinite, res);
8522 }
8523
8524 if (HONOR_NANS (mode))
8525 {
8526 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8527 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8528 }
8529
8530 return res;
8531 }
8532
8533 /* Fold a call to an unordered comparison function such as
8534 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8535 being called and ARG0 and ARG1 are the arguments for the call.
8536 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8537 the opposite of the desired result. UNORDERED_CODE is used
8538 for modes that can hold NaNs and ORDERED_CODE is used for
8539 the rest. */
8540
8541 static tree
8542 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8543 enum tree_code unordered_code,
8544 enum tree_code ordered_code)
8545 {
8546 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8547 enum tree_code code;
8548 tree type0, type1;
8549 enum tree_code code0, code1;
8550 tree cmp_type = NULL_TREE;
8551
8552 type0 = TREE_TYPE (arg0);
8553 type1 = TREE_TYPE (arg1);
8554
8555 code0 = TREE_CODE (type0);
8556 code1 = TREE_CODE (type1);
8557
8558 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8559 /* Choose the wider of two real types. */
8560 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8561 ? type0 : type1;
8562 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8563 cmp_type = type0;
8564 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8565 cmp_type = type1;
8566
8567 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8568 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8569
8570 if (unordered_code == UNORDERED_EXPR)
8571 {
8572 if (!HONOR_NANS (arg0))
8573 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8574 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8575 }
8576
8577 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8578 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8579 fold_build2_loc (loc, code, type, arg0, arg1));
8580 }
8581
8582 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8583 arithmetics if it can never overflow, or into internal functions that
8584 return both result of arithmetics and overflowed boolean flag in
8585 a complex integer result, or some other check for overflow.
8586 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8587 checking part of that. */
8588
8589 static tree
8590 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8591 tree arg0, tree arg1, tree arg2)
8592 {
8593 enum internal_fn ifn = IFN_LAST;
8594 /* The code of the expression corresponding to the type-generic
8595 built-in, or ERROR_MARK for the type-specific ones. */
8596 enum tree_code opcode = ERROR_MARK;
8597 bool ovf_only = false;
8598
8599 switch (fcode)
8600 {
8601 case BUILT_IN_ADD_OVERFLOW_P:
8602 ovf_only = true;
8603 /* FALLTHRU */
8604 case BUILT_IN_ADD_OVERFLOW:
8605 opcode = PLUS_EXPR;
8606 /* FALLTHRU */
8607 case BUILT_IN_SADD_OVERFLOW:
8608 case BUILT_IN_SADDL_OVERFLOW:
8609 case BUILT_IN_SADDLL_OVERFLOW:
8610 case BUILT_IN_UADD_OVERFLOW:
8611 case BUILT_IN_UADDL_OVERFLOW:
8612 case BUILT_IN_UADDLL_OVERFLOW:
8613 ifn = IFN_ADD_OVERFLOW;
8614 break;
8615 case BUILT_IN_SUB_OVERFLOW_P:
8616 ovf_only = true;
8617 /* FALLTHRU */
8618 case BUILT_IN_SUB_OVERFLOW:
8619 opcode = MINUS_EXPR;
8620 /* FALLTHRU */
8621 case BUILT_IN_SSUB_OVERFLOW:
8622 case BUILT_IN_SSUBL_OVERFLOW:
8623 case BUILT_IN_SSUBLL_OVERFLOW:
8624 case BUILT_IN_USUB_OVERFLOW:
8625 case BUILT_IN_USUBL_OVERFLOW:
8626 case BUILT_IN_USUBLL_OVERFLOW:
8627 ifn = IFN_SUB_OVERFLOW;
8628 break;
8629 case BUILT_IN_MUL_OVERFLOW_P:
8630 ovf_only = true;
8631 /* FALLTHRU */
8632 case BUILT_IN_MUL_OVERFLOW:
8633 opcode = MULT_EXPR;
8634 /* FALLTHRU */
8635 case BUILT_IN_SMUL_OVERFLOW:
8636 case BUILT_IN_SMULL_OVERFLOW:
8637 case BUILT_IN_SMULLL_OVERFLOW:
8638 case BUILT_IN_UMUL_OVERFLOW:
8639 case BUILT_IN_UMULL_OVERFLOW:
8640 case BUILT_IN_UMULLL_OVERFLOW:
8641 ifn = IFN_MUL_OVERFLOW;
8642 break;
8643 default:
8644 gcc_unreachable ();
8645 }
8646
8647 /* For the "generic" overloads, the first two arguments can have different
8648 types and the last argument determines the target type to use to check
8649 for overflow. The arguments of the other overloads all have the same
8650 type. */
8651 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8652
8653 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8654 arguments are constant, attempt to fold the built-in call into a constant
8655 expression indicating whether or not it detected an overflow. */
8656 if (ovf_only
8657 && TREE_CODE (arg0) == INTEGER_CST
8658 && TREE_CODE (arg1) == INTEGER_CST)
8659 /* Perform the computation in the target type and check for overflow. */
8660 return omit_one_operand_loc (loc, boolean_type_node,
8661 arith_overflowed_p (opcode, type, arg0, arg1)
8662 ? boolean_true_node : boolean_false_node,
8663 arg2);
8664
8665 tree ctype = build_complex_type (type);
8666 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8667 2, arg0, arg1);
8668 tree tgt = save_expr (call);
8669 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8670 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8671 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8672
8673 if (ovf_only)
8674 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8675
8676 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8677 tree store
8678 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8679 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8680 }
8681
8682 /* Fold a call to __builtin_FILE to a constant string. */
8683
8684 static inline tree
8685 fold_builtin_FILE (location_t loc)
8686 {
8687 if (const char *fname = LOCATION_FILE (loc))
8688 return build_string_literal (strlen (fname) + 1, fname);
8689
8690 return build_string_literal (1, "");
8691 }
8692
8693 /* Fold a call to __builtin_FUNCTION to a constant string. */
8694
8695 static inline tree
8696 fold_builtin_FUNCTION ()
8697 {
8698 const char *name = "";
8699
8700 if (current_function_decl)
8701 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8702
8703 return build_string_literal (strlen (name) + 1, name);
8704 }
8705
8706 /* Fold a call to __builtin_LINE to an integer constant. */
8707
8708 static inline tree
8709 fold_builtin_LINE (location_t loc, tree type)
8710 {
8711 return build_int_cst (type, LOCATION_LINE (loc));
8712 }
8713
8714 /* Fold a call to built-in function FNDECL with 0 arguments.
8715 This function returns NULL_TREE if no simplification was possible. */
8716
8717 static tree
8718 fold_builtin_0 (location_t loc, tree fndecl)
8719 {
8720 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8721 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8722 switch (fcode)
8723 {
8724 case BUILT_IN_FILE:
8725 return fold_builtin_FILE (loc);
8726
8727 case BUILT_IN_FUNCTION:
8728 return fold_builtin_FUNCTION ();
8729
8730 case BUILT_IN_LINE:
8731 return fold_builtin_LINE (loc, type);
8732
8733 CASE_FLT_FN (BUILT_IN_INF):
8734 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8735 case BUILT_IN_INFD32:
8736 case BUILT_IN_INFD64:
8737 case BUILT_IN_INFD128:
8738 return fold_builtin_inf (loc, type, true);
8739
8740 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8741 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8742 return fold_builtin_inf (loc, type, false);
8743
8744 case BUILT_IN_CLASSIFY_TYPE:
8745 return fold_builtin_classify_type (NULL_TREE);
8746
8747 default:
8748 break;
8749 }
8750 return NULL_TREE;
8751 }
8752
8753 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8754 This function returns NULL_TREE if no simplification was possible. */
8755
8756 static tree
8757 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8758 {
8759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8761
8762 if (TREE_CODE (arg0) == ERROR_MARK)
8763 return NULL_TREE;
8764
8765 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8766 return ret;
8767
8768 switch (fcode)
8769 {
8770 case BUILT_IN_CONSTANT_P:
8771 {
8772 tree val = fold_builtin_constant_p (arg0);
8773
8774 /* Gimplification will pull the CALL_EXPR for the builtin out of
8775 an if condition. When not optimizing, we'll not CSE it back.
8776 To avoid link error types of regressions, return false now. */
8777 if (!val && !optimize)
8778 val = integer_zero_node;
8779
8780 return val;
8781 }
8782
8783 case BUILT_IN_CLASSIFY_TYPE:
8784 return fold_builtin_classify_type (arg0);
8785
8786 case BUILT_IN_STRLEN:
8787 return fold_builtin_strlen (loc, type, arg0);
8788
8789 CASE_FLT_FN (BUILT_IN_FABS):
8790 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8791 case BUILT_IN_FABSD32:
8792 case BUILT_IN_FABSD64:
8793 case BUILT_IN_FABSD128:
8794 return fold_builtin_fabs (loc, arg0, type);
8795
8796 case BUILT_IN_ABS:
8797 case BUILT_IN_LABS:
8798 case BUILT_IN_LLABS:
8799 case BUILT_IN_IMAXABS:
8800 return fold_builtin_abs (loc, arg0, type);
8801
8802 CASE_FLT_FN (BUILT_IN_CONJ):
8803 if (validate_arg (arg0, COMPLEX_TYPE)
8804 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8805 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8806 break;
8807
8808 CASE_FLT_FN (BUILT_IN_CREAL):
8809 if (validate_arg (arg0, COMPLEX_TYPE)
8810 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8811 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8812 break;
8813
8814 CASE_FLT_FN (BUILT_IN_CIMAG):
8815 if (validate_arg (arg0, COMPLEX_TYPE)
8816 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8817 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8818 break;
8819
8820 CASE_FLT_FN (BUILT_IN_CARG):
8821 return fold_builtin_carg (loc, arg0, type);
8822
8823 case BUILT_IN_ISASCII:
8824 return fold_builtin_isascii (loc, arg0);
8825
8826 case BUILT_IN_TOASCII:
8827 return fold_builtin_toascii (loc, arg0);
8828
8829 case BUILT_IN_ISDIGIT:
8830 return fold_builtin_isdigit (loc, arg0);
8831
8832 CASE_FLT_FN (BUILT_IN_FINITE):
8833 case BUILT_IN_FINITED32:
8834 case BUILT_IN_FINITED64:
8835 case BUILT_IN_FINITED128:
8836 case BUILT_IN_ISFINITE:
8837 {
8838 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8839 if (ret)
8840 return ret;
8841 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8842 }
8843
8844 CASE_FLT_FN (BUILT_IN_ISINF):
8845 case BUILT_IN_ISINFD32:
8846 case BUILT_IN_ISINFD64:
8847 case BUILT_IN_ISINFD128:
8848 {
8849 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8850 if (ret)
8851 return ret;
8852 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8853 }
8854
8855 case BUILT_IN_ISNORMAL:
8856 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8857
8858 case BUILT_IN_ISINF_SIGN:
8859 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8860
8861 CASE_FLT_FN (BUILT_IN_ISNAN):
8862 case BUILT_IN_ISNAND32:
8863 case BUILT_IN_ISNAND64:
8864 case BUILT_IN_ISNAND128:
8865 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8866
8867 case BUILT_IN_FREE:
8868 if (integer_zerop (arg0))
8869 return build_empty_stmt (loc);
8870 break;
8871
8872 default:
8873 break;
8874 }
8875
8876 return NULL_TREE;
8877
8878 }
8879
8880 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8881 This function returns NULL_TREE if no simplification was possible. */
8882
8883 static tree
8884 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8885 {
8886 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8887 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8888
8889 if (TREE_CODE (arg0) == ERROR_MARK
8890 || TREE_CODE (arg1) == ERROR_MARK)
8891 return NULL_TREE;
8892
8893 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8894 return ret;
8895
8896 switch (fcode)
8897 {
8898 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8899 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8900 if (validate_arg (arg0, REAL_TYPE)
8901 && validate_arg (arg1, POINTER_TYPE))
8902 return do_mpfr_lgamma_r (arg0, arg1, type);
8903 break;
8904
8905 CASE_FLT_FN (BUILT_IN_FREXP):
8906 return fold_builtin_frexp (loc, arg0, arg1, type);
8907
8908 CASE_FLT_FN (BUILT_IN_MODF):
8909 return fold_builtin_modf (loc, arg0, arg1, type);
8910
8911 case BUILT_IN_STRSPN:
8912 return fold_builtin_strspn (loc, arg0, arg1);
8913
8914 case BUILT_IN_STRCSPN:
8915 return fold_builtin_strcspn (loc, arg0, arg1);
8916
8917 case BUILT_IN_STRPBRK:
8918 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8919
8920 case BUILT_IN_EXPECT:
8921 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8922
8923 case BUILT_IN_ISGREATER:
8924 return fold_builtin_unordered_cmp (loc, fndecl,
8925 arg0, arg1, UNLE_EXPR, LE_EXPR);
8926 case BUILT_IN_ISGREATEREQUAL:
8927 return fold_builtin_unordered_cmp (loc, fndecl,
8928 arg0, arg1, UNLT_EXPR, LT_EXPR);
8929 case BUILT_IN_ISLESS:
8930 return fold_builtin_unordered_cmp (loc, fndecl,
8931 arg0, arg1, UNGE_EXPR, GE_EXPR);
8932 case BUILT_IN_ISLESSEQUAL:
8933 return fold_builtin_unordered_cmp (loc, fndecl,
8934 arg0, arg1, UNGT_EXPR, GT_EXPR);
8935 case BUILT_IN_ISLESSGREATER:
8936 return fold_builtin_unordered_cmp (loc, fndecl,
8937 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8938 case BUILT_IN_ISUNORDERED:
8939 return fold_builtin_unordered_cmp (loc, fndecl,
8940 arg0, arg1, UNORDERED_EXPR,
8941 NOP_EXPR);
8942
8943 /* We do the folding for va_start in the expander. */
8944 case BUILT_IN_VA_START:
8945 break;
8946
8947 case BUILT_IN_OBJECT_SIZE:
8948 return fold_builtin_object_size (arg0, arg1);
8949
8950 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8951 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8952
8953 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8954 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8955
8956 default:
8957 break;
8958 }
8959 return NULL_TREE;
8960 }
8961
8962 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8963 and ARG2.
8964 This function returns NULL_TREE if no simplification was possible. */
8965
8966 static tree
8967 fold_builtin_3 (location_t loc, tree fndecl,
8968 tree arg0, tree arg1, tree arg2)
8969 {
8970 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8971 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8972
8973 if (TREE_CODE (arg0) == ERROR_MARK
8974 || TREE_CODE (arg1) == ERROR_MARK
8975 || TREE_CODE (arg2) == ERROR_MARK)
8976 return NULL_TREE;
8977
8978 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8979 arg0, arg1, arg2))
8980 return ret;
8981
8982 switch (fcode)
8983 {
8984
8985 CASE_FLT_FN (BUILT_IN_SINCOS):
8986 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8987
8988 CASE_FLT_FN (BUILT_IN_FMA):
8989 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8990
8991 CASE_FLT_FN (BUILT_IN_REMQUO):
8992 if (validate_arg (arg0, REAL_TYPE)
8993 && validate_arg (arg1, REAL_TYPE)
8994 && validate_arg (arg2, POINTER_TYPE))
8995 return do_mpfr_remquo (arg0, arg1, arg2);
8996 break;
8997
8998 case BUILT_IN_MEMCMP:
8999 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9000
9001 case BUILT_IN_EXPECT:
9002 return fold_builtin_expect (loc, arg0, arg1, arg2);
9003
9004 case BUILT_IN_ADD_OVERFLOW:
9005 case BUILT_IN_SUB_OVERFLOW:
9006 case BUILT_IN_MUL_OVERFLOW:
9007 case BUILT_IN_ADD_OVERFLOW_P:
9008 case BUILT_IN_SUB_OVERFLOW_P:
9009 case BUILT_IN_MUL_OVERFLOW_P:
9010 case BUILT_IN_SADD_OVERFLOW:
9011 case BUILT_IN_SADDL_OVERFLOW:
9012 case BUILT_IN_SADDLL_OVERFLOW:
9013 case BUILT_IN_SSUB_OVERFLOW:
9014 case BUILT_IN_SSUBL_OVERFLOW:
9015 case BUILT_IN_SSUBLL_OVERFLOW:
9016 case BUILT_IN_SMUL_OVERFLOW:
9017 case BUILT_IN_SMULL_OVERFLOW:
9018 case BUILT_IN_SMULLL_OVERFLOW:
9019 case BUILT_IN_UADD_OVERFLOW:
9020 case BUILT_IN_UADDL_OVERFLOW:
9021 case BUILT_IN_UADDLL_OVERFLOW:
9022 case BUILT_IN_USUB_OVERFLOW:
9023 case BUILT_IN_USUBL_OVERFLOW:
9024 case BUILT_IN_USUBLL_OVERFLOW:
9025 case BUILT_IN_UMUL_OVERFLOW:
9026 case BUILT_IN_UMULL_OVERFLOW:
9027 case BUILT_IN_UMULLL_OVERFLOW:
9028 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9029
9030 default:
9031 break;
9032 }
9033 return NULL_TREE;
9034 }
9035
9036 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9037 arguments. IGNORE is true if the result of the
9038 function call is ignored. This function returns NULL_TREE if no
9039 simplification was possible. */
9040
9041 tree
9042 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9043 {
9044 tree ret = NULL_TREE;
9045
9046 switch (nargs)
9047 {
9048 case 0:
9049 ret = fold_builtin_0 (loc, fndecl);
9050 break;
9051 case 1:
9052 ret = fold_builtin_1 (loc, fndecl, args[0]);
9053 break;
9054 case 2:
9055 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9056 break;
9057 case 3:
9058 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9059 break;
9060 default:
9061 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9062 break;
9063 }
9064 if (ret)
9065 {
9066 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9067 SET_EXPR_LOCATION (ret, loc);
9068 TREE_NO_WARNING (ret) = 1;
9069 return ret;
9070 }
9071 return NULL_TREE;
9072 }
9073
9074 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9075 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9076 of arguments in ARGS to be omitted. OLDNARGS is the number of
9077 elements in ARGS. */
9078
9079 static tree
9080 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9081 int skip, tree fndecl, int n, va_list newargs)
9082 {
9083 int nargs = oldnargs - skip + n;
9084 tree *buffer;
9085
9086 if (n > 0)
9087 {
9088 int i, j;
9089
9090 buffer = XALLOCAVEC (tree, nargs);
9091 for (i = 0; i < n; i++)
9092 buffer[i] = va_arg (newargs, tree);
9093 for (j = skip; j < oldnargs; j++, i++)
9094 buffer[i] = args[j];
9095 }
9096 else
9097 buffer = args + skip;
9098
9099 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9100 }
9101
9102 /* Return true if FNDECL shouldn't be folded right now.
9103 If a built-in function has an inline attribute always_inline
9104 wrapper, defer folding it after always_inline functions have
9105 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9106 might not be performed. */
9107
9108 bool
9109 avoid_folding_inline_builtin (tree fndecl)
9110 {
9111 return (DECL_DECLARED_INLINE_P (fndecl)
9112 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9113 && cfun
9114 && !cfun->always_inline_functions_inlined
9115 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9116 }
9117
9118 /* A wrapper function for builtin folding that prevents warnings for
9119 "statement without effect" and the like, caused by removing the
9120 call node earlier than the warning is generated. */
9121
9122 tree
9123 fold_call_expr (location_t loc, tree exp, bool ignore)
9124 {
9125 tree ret = NULL_TREE;
9126 tree fndecl = get_callee_fndecl (exp);
9127 if (fndecl
9128 && TREE_CODE (fndecl) == FUNCTION_DECL
9129 && DECL_BUILT_IN (fndecl)
9130 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9131 yet. Defer folding until we see all the arguments
9132 (after inlining). */
9133 && !CALL_EXPR_VA_ARG_PACK (exp))
9134 {
9135 int nargs = call_expr_nargs (exp);
9136
9137 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9138 instead last argument is __builtin_va_arg_pack (). Defer folding
9139 even in that case, until arguments are finalized. */
9140 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9141 {
9142 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9143 if (fndecl2
9144 && TREE_CODE (fndecl2) == FUNCTION_DECL
9145 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9146 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9147 return NULL_TREE;
9148 }
9149
9150 if (avoid_folding_inline_builtin (fndecl))
9151 return NULL_TREE;
9152
9153 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9154 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9155 CALL_EXPR_ARGP (exp), ignore);
9156 else
9157 {
9158 tree *args = CALL_EXPR_ARGP (exp);
9159 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9160 if (ret)
9161 return ret;
9162 }
9163 }
9164 return NULL_TREE;
9165 }
9166
9167 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9168 N arguments are passed in the array ARGARRAY. Return a folded
9169 expression or NULL_TREE if no simplification was possible. */
9170
9171 tree
9172 fold_builtin_call_array (location_t loc, tree,
9173 tree fn,
9174 int n,
9175 tree *argarray)
9176 {
9177 if (TREE_CODE (fn) != ADDR_EXPR)
9178 return NULL_TREE;
9179
9180 tree fndecl = TREE_OPERAND (fn, 0);
9181 if (TREE_CODE (fndecl) == FUNCTION_DECL
9182 && DECL_BUILT_IN (fndecl))
9183 {
9184 /* If last argument is __builtin_va_arg_pack (), arguments to this
9185 function are not finalized yet. Defer folding until they are. */
9186 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9187 {
9188 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9189 if (fndecl2
9190 && TREE_CODE (fndecl2) == FUNCTION_DECL
9191 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9192 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9193 return NULL_TREE;
9194 }
9195 if (avoid_folding_inline_builtin (fndecl))
9196 return NULL_TREE;
9197 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9198 return targetm.fold_builtin (fndecl, n, argarray, false);
9199 else
9200 return fold_builtin_n (loc, fndecl, argarray, n, false);
9201 }
9202
9203 return NULL_TREE;
9204 }
9205
9206 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9207 along with N new arguments specified as the "..." parameters. SKIP
9208 is the number of arguments in EXP to be omitted. This function is used
9209 to do varargs-to-varargs transformations. */
9210
9211 static tree
9212 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9213 {
9214 va_list ap;
9215 tree t;
9216
9217 va_start (ap, n);
9218 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9219 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9220 va_end (ap);
9221
9222 return t;
9223 }
9224
9225 /* Validate a single argument ARG against a tree code CODE representing
9226 a type. Return true when argument is valid. */
9227
9228 static bool
9229 validate_arg (const_tree arg, enum tree_code code)
9230 {
9231 if (!arg)
9232 return false;
9233 else if (code == POINTER_TYPE)
9234 return POINTER_TYPE_P (TREE_TYPE (arg));
9235 else if (code == INTEGER_TYPE)
9236 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9237 return code == TREE_CODE (TREE_TYPE (arg));
9238 }
9239
9240 /* This function validates the types of a function call argument list
9241 against a specified list of tree_codes. If the last specifier is a 0,
9242 that represents an ellipses, otherwise the last specifier must be a
9243 VOID_TYPE.
9244
9245 This is the GIMPLE version of validate_arglist. Eventually we want to
9246 completely convert builtins.c to work from GIMPLEs and the tree based
9247 validate_arglist will then be removed. */
9248
9249 bool
9250 validate_gimple_arglist (const gcall *call, ...)
9251 {
9252 enum tree_code code;
9253 bool res = 0;
9254 va_list ap;
9255 const_tree arg;
9256 size_t i;
9257
9258 va_start (ap, call);
9259 i = 0;
9260
9261 do
9262 {
9263 code = (enum tree_code) va_arg (ap, int);
9264 switch (code)
9265 {
9266 case 0:
9267 /* This signifies an ellipses, any further arguments are all ok. */
9268 res = true;
9269 goto end;
9270 case VOID_TYPE:
9271 /* This signifies an endlink, if no arguments remain, return
9272 true, otherwise return false. */
9273 res = (i == gimple_call_num_args (call));
9274 goto end;
9275 default:
9276 /* If no parameters remain or the parameter's code does not
9277 match the specified code, return false. Otherwise continue
9278 checking any remaining arguments. */
9279 arg = gimple_call_arg (call, i++);
9280 if (!validate_arg (arg, code))
9281 goto end;
9282 break;
9283 }
9284 }
9285 while (1);
9286
9287 /* We need gotos here since we can only have one VA_CLOSE in a
9288 function. */
9289 end: ;
9290 va_end (ap);
9291
9292 return res;
9293 }
9294
9295 /* Default target-specific builtin expander that does nothing. */
9296
9297 rtx
9298 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9299 rtx target ATTRIBUTE_UNUSED,
9300 rtx subtarget ATTRIBUTE_UNUSED,
9301 machine_mode mode ATTRIBUTE_UNUSED,
9302 int ignore ATTRIBUTE_UNUSED)
9303 {
9304 return NULL_RTX;
9305 }
9306
9307 /* Returns true is EXP represents data that would potentially reside
9308 in a readonly section. */
9309
9310 bool
9311 readonly_data_expr (tree exp)
9312 {
9313 STRIP_NOPS (exp);
9314
9315 if (TREE_CODE (exp) != ADDR_EXPR)
9316 return false;
9317
9318 exp = get_base_address (TREE_OPERAND (exp, 0));
9319 if (!exp)
9320 return false;
9321
9322 /* Make sure we call decl_readonly_section only for trees it
9323 can handle (since it returns true for everything it doesn't
9324 understand). */
9325 if (TREE_CODE (exp) == STRING_CST
9326 || TREE_CODE (exp) == CONSTRUCTOR
9327 || (VAR_P (exp) && TREE_STATIC (exp)))
9328 return decl_readonly_section (exp, 0);
9329 else
9330 return false;
9331 }
9332
9333 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9334 to the call, and TYPE is its return type.
9335
9336 Return NULL_TREE if no simplification was possible, otherwise return the
9337 simplified form of the call as a tree.
9338
9339 The simplified form may be a constant or other expression which
9340 computes the same value, but in a more efficient manner (including
9341 calls to other builtin functions).
9342
9343 The call may contain arguments which need to be evaluated, but
9344 which are not useful to determine the result of the call. In
9345 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9346 COMPOUND_EXPR will be an argument which must be evaluated.
9347 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9348 COMPOUND_EXPR in the chain will contain the tree for the simplified
9349 form of the builtin function call. */
9350
9351 static tree
9352 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9353 {
9354 if (!validate_arg (s1, POINTER_TYPE)
9355 || !validate_arg (s2, POINTER_TYPE))
9356 return NULL_TREE;
9357 else
9358 {
9359 tree fn;
9360 const char *p1, *p2;
9361
9362 p2 = c_getstr (s2);
9363 if (p2 == NULL)
9364 return NULL_TREE;
9365
9366 p1 = c_getstr (s1);
9367 if (p1 != NULL)
9368 {
9369 const char *r = strpbrk (p1, p2);
9370 tree tem;
9371
9372 if (r == NULL)
9373 return build_int_cst (TREE_TYPE (s1), 0);
9374
9375 /* Return an offset into the constant string argument. */
9376 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9377 return fold_convert_loc (loc, type, tem);
9378 }
9379
9380 if (p2[0] == '\0')
9381 /* strpbrk(x, "") == NULL.
9382 Evaluate and ignore s1 in case it had side-effects. */
9383 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9384
9385 if (p2[1] != '\0')
9386 return NULL_TREE; /* Really call strpbrk. */
9387
9388 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9389 if (!fn)
9390 return NULL_TREE;
9391
9392 /* New argument list transforming strpbrk(s1, s2) to
9393 strchr(s1, s2[0]). */
9394 return build_call_expr_loc (loc, fn, 2, s1,
9395 build_int_cst (integer_type_node, p2[0]));
9396 }
9397 }
9398
9399 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9400 to the call.
9401
9402 Return NULL_TREE if no simplification was possible, otherwise return the
9403 simplified form of the call as a tree.
9404
9405 The simplified form may be a constant or other expression which
9406 computes the same value, but in a more efficient manner (including
9407 calls to other builtin functions).
9408
9409 The call may contain arguments which need to be evaluated, but
9410 which are not useful to determine the result of the call. In
9411 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9412 COMPOUND_EXPR will be an argument which must be evaluated.
9413 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9414 COMPOUND_EXPR in the chain will contain the tree for the simplified
9415 form of the builtin function call. */
9416
9417 static tree
9418 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9419 {
9420 if (!validate_arg (s1, POINTER_TYPE)
9421 || !validate_arg (s2, POINTER_TYPE))
9422 return NULL_TREE;
9423 else
9424 {
9425 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9426
9427 /* If either argument is "", return NULL_TREE. */
9428 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9429 /* Evaluate and ignore both arguments in case either one has
9430 side-effects. */
9431 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9432 s1, s2);
9433 return NULL_TREE;
9434 }
9435 }
9436
9437 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9438 to the call.
9439
9440 Return NULL_TREE if no simplification was possible, otherwise return the
9441 simplified form of the call as a tree.
9442
9443 The simplified form may be a constant or other expression which
9444 computes the same value, but in a more efficient manner (including
9445 calls to other builtin functions).
9446
9447 The call may contain arguments which need to be evaluated, but
9448 which are not useful to determine the result of the call. In
9449 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9450 COMPOUND_EXPR will be an argument which must be evaluated.
9451 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9452 COMPOUND_EXPR in the chain will contain the tree for the simplified
9453 form of the builtin function call. */
9454
9455 static tree
9456 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9457 {
9458 if (!validate_arg (s1, POINTER_TYPE)
9459 || !validate_arg (s2, POINTER_TYPE))
9460 return NULL_TREE;
9461 else
9462 {
9463 /* If the first argument is "", return NULL_TREE. */
9464 const char *p1 = c_getstr (s1);
9465 if (p1 && *p1 == '\0')
9466 {
9467 /* Evaluate and ignore argument s2 in case it has
9468 side-effects. */
9469 return omit_one_operand_loc (loc, size_type_node,
9470 size_zero_node, s2);
9471 }
9472
9473 /* If the second argument is "", return __builtin_strlen(s1). */
9474 const char *p2 = c_getstr (s2);
9475 if (p2 && *p2 == '\0')
9476 {
9477 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9478
9479 /* If the replacement _DECL isn't initialized, don't do the
9480 transformation. */
9481 if (!fn)
9482 return NULL_TREE;
9483
9484 return build_call_expr_loc (loc, fn, 1, s1);
9485 }
9486 return NULL_TREE;
9487 }
9488 }
9489
9490 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9491 produced. False otherwise. This is done so that we don't output the error
9492 or warning twice or three times. */
9493
9494 bool
9495 fold_builtin_next_arg (tree exp, bool va_start_p)
9496 {
9497 tree fntype = TREE_TYPE (current_function_decl);
9498 int nargs = call_expr_nargs (exp);
9499 tree arg;
9500 /* There is good chance the current input_location points inside the
9501 definition of the va_start macro (perhaps on the token for
9502 builtin) in a system header, so warnings will not be emitted.
9503 Use the location in real source code. */
9504 source_location current_location =
9505 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9506 NULL);
9507
9508 if (!stdarg_p (fntype))
9509 {
9510 error ("%<va_start%> used in function with fixed args");
9511 return true;
9512 }
9513
9514 if (va_start_p)
9515 {
9516 if (va_start_p && (nargs != 2))
9517 {
9518 error ("wrong number of arguments to function %<va_start%>");
9519 return true;
9520 }
9521 arg = CALL_EXPR_ARG (exp, 1);
9522 }
9523 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9524 when we checked the arguments and if needed issued a warning. */
9525 else
9526 {
9527 if (nargs == 0)
9528 {
9529 /* Evidently an out of date version of <stdarg.h>; can't validate
9530 va_start's second argument, but can still work as intended. */
9531 warning_at (current_location,
9532 OPT_Wvarargs,
9533 "%<__builtin_next_arg%> called without an argument");
9534 return true;
9535 }
9536 else if (nargs > 1)
9537 {
9538 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9539 return true;
9540 }
9541 arg = CALL_EXPR_ARG (exp, 0);
9542 }
9543
9544 if (TREE_CODE (arg) == SSA_NAME)
9545 arg = SSA_NAME_VAR (arg);
9546
9547 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9548 or __builtin_next_arg (0) the first time we see it, after checking
9549 the arguments and if needed issuing a warning. */
9550 if (!integer_zerop (arg))
9551 {
9552 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9553
9554 /* Strip off all nops for the sake of the comparison. This
9555 is not quite the same as STRIP_NOPS. It does more.
9556 We must also strip off INDIRECT_EXPR for C++ reference
9557 parameters. */
9558 while (CONVERT_EXPR_P (arg)
9559 || TREE_CODE (arg) == INDIRECT_REF)
9560 arg = TREE_OPERAND (arg, 0);
9561 if (arg != last_parm)
9562 {
9563 /* FIXME: Sometimes with the tree optimizers we can get the
9564 not the last argument even though the user used the last
9565 argument. We just warn and set the arg to be the last
9566 argument so that we will get wrong-code because of
9567 it. */
9568 warning_at (current_location,
9569 OPT_Wvarargs,
9570 "second parameter of %<va_start%> not last named argument");
9571 }
9572
9573 /* Undefined by C99 7.15.1.4p4 (va_start):
9574 "If the parameter parmN is declared with the register storage
9575 class, with a function or array type, or with a type that is
9576 not compatible with the type that results after application of
9577 the default argument promotions, the behavior is undefined."
9578 */
9579 else if (DECL_REGISTER (arg))
9580 {
9581 warning_at (current_location,
9582 OPT_Wvarargs,
9583 "undefined behavior when second parameter of "
9584 "%<va_start%> is declared with %<register%> storage");
9585 }
9586
9587 /* We want to verify the second parameter just once before the tree
9588 optimizers are run and then avoid keeping it in the tree,
9589 as otherwise we could warn even for correct code like:
9590 void foo (int i, ...)
9591 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9592 if (va_start_p)
9593 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9594 else
9595 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9596 }
9597 return false;
9598 }
9599
9600
9601 /* Expand a call EXP to __builtin_object_size. */
9602
9603 static rtx
9604 expand_builtin_object_size (tree exp)
9605 {
9606 tree ost;
9607 int object_size_type;
9608 tree fndecl = get_callee_fndecl (exp);
9609
9610 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9611 {
9612 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9613 exp, fndecl);
9614 expand_builtin_trap ();
9615 return const0_rtx;
9616 }
9617
9618 ost = CALL_EXPR_ARG (exp, 1);
9619 STRIP_NOPS (ost);
9620
9621 if (TREE_CODE (ost) != INTEGER_CST
9622 || tree_int_cst_sgn (ost) < 0
9623 || compare_tree_int (ost, 3) > 0)
9624 {
9625 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9626 exp, fndecl);
9627 expand_builtin_trap ();
9628 return const0_rtx;
9629 }
9630
9631 object_size_type = tree_to_shwi (ost);
9632
9633 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9634 }
9635
9636 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9637 FCODE is the BUILT_IN_* to use.
9638 Return NULL_RTX if we failed; the caller should emit a normal call,
9639 otherwise try to get the result in TARGET, if convenient (and in
9640 mode MODE if that's convenient). */
9641
9642 static rtx
9643 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9644 enum built_in_function fcode)
9645 {
9646 tree dest, src, len, size;
9647
9648 if (!validate_arglist (exp,
9649 POINTER_TYPE,
9650 fcode == BUILT_IN_MEMSET_CHK
9651 ? INTEGER_TYPE : POINTER_TYPE,
9652 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9653 return NULL_RTX;
9654
9655 dest = CALL_EXPR_ARG (exp, 0);
9656 src = CALL_EXPR_ARG (exp, 1);
9657 len = CALL_EXPR_ARG (exp, 2);
9658 size = CALL_EXPR_ARG (exp, 3);
9659
9660 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9661 exp, len, /*maxlen=*/NULL_TREE,
9662 /*str=*/NULL_TREE, size);
9663
9664 if (!tree_fits_uhwi_p (size))
9665 return NULL_RTX;
9666
9667 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9668 {
9669 /* Avoid transforming the checking call to an ordinary one when
9670 an overflow has been detected or when the call couldn't be
9671 validated because the size is not constant. */
9672 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9673 return NULL_RTX;
9674
9675 tree fn = NULL_TREE;
9676 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9677 mem{cpy,pcpy,move,set} is available. */
9678 switch (fcode)
9679 {
9680 case BUILT_IN_MEMCPY_CHK:
9681 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9682 break;
9683 case BUILT_IN_MEMPCPY_CHK:
9684 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9685 break;
9686 case BUILT_IN_MEMMOVE_CHK:
9687 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9688 break;
9689 case BUILT_IN_MEMSET_CHK:
9690 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9691 break;
9692 default:
9693 break;
9694 }
9695
9696 if (! fn)
9697 return NULL_RTX;
9698
9699 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9700 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9701 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9702 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9703 }
9704 else if (fcode == BUILT_IN_MEMSET_CHK)
9705 return NULL_RTX;
9706 else
9707 {
9708 unsigned int dest_align = get_pointer_alignment (dest);
9709
9710 /* If DEST is not a pointer type, call the normal function. */
9711 if (dest_align == 0)
9712 return NULL_RTX;
9713
9714 /* If SRC and DEST are the same (and not volatile), do nothing. */
9715 if (operand_equal_p (src, dest, 0))
9716 {
9717 tree expr;
9718
9719 if (fcode != BUILT_IN_MEMPCPY_CHK)
9720 {
9721 /* Evaluate and ignore LEN in case it has side-effects. */
9722 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9723 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9724 }
9725
9726 expr = fold_build_pointer_plus (dest, len);
9727 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9728 }
9729
9730 /* __memmove_chk special case. */
9731 if (fcode == BUILT_IN_MEMMOVE_CHK)
9732 {
9733 unsigned int src_align = get_pointer_alignment (src);
9734
9735 if (src_align == 0)
9736 return NULL_RTX;
9737
9738 /* If src is categorized for a readonly section we can use
9739 normal __memcpy_chk. */
9740 if (readonly_data_expr (src))
9741 {
9742 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9743 if (!fn)
9744 return NULL_RTX;
9745 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9746 dest, src, len, size);
9747 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9748 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9749 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9750 }
9751 }
9752 return NULL_RTX;
9753 }
9754 }
9755
9756 /* Emit warning if a buffer overflow is detected at compile time. */
9757
9758 static void
9759 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9760 {
9761 /* The source string. */
9762 tree srcstr = NULL_TREE;
9763 /* The size of the destination object. */
9764 tree objsize = NULL_TREE;
9765 /* The string that is being concatenated with (as in __strcat_chk)
9766 or null if it isn't. */
9767 tree catstr = NULL_TREE;
9768 /* The maximum length of the source sequence in a bounded operation
9769 (such as __strncat_chk) or null if the operation isn't bounded
9770 (such as __strcat_chk). */
9771 tree maxlen = NULL_TREE;
9772
9773 switch (fcode)
9774 {
9775 case BUILT_IN_STRCPY_CHK:
9776 case BUILT_IN_STPCPY_CHK:
9777 srcstr = CALL_EXPR_ARG (exp, 1);
9778 objsize = CALL_EXPR_ARG (exp, 2);
9779 break;
9780
9781 case BUILT_IN_STRCAT_CHK:
9782 /* For __strcat_chk the warning will be emitted only if overflowing
9783 by at least strlen (dest) + 1 bytes. */
9784 catstr = CALL_EXPR_ARG (exp, 0);
9785 srcstr = CALL_EXPR_ARG (exp, 1);
9786 objsize = CALL_EXPR_ARG (exp, 2);
9787 break;
9788
9789 case BUILT_IN_STRNCAT_CHK:
9790 catstr = CALL_EXPR_ARG (exp, 0);
9791 srcstr = CALL_EXPR_ARG (exp, 1);
9792 maxlen = CALL_EXPR_ARG (exp, 2);
9793 objsize = CALL_EXPR_ARG (exp, 3);
9794 break;
9795
9796 case BUILT_IN_STRNCPY_CHK:
9797 case BUILT_IN_STPNCPY_CHK:
9798 srcstr = CALL_EXPR_ARG (exp, 1);
9799 maxlen = CALL_EXPR_ARG (exp, 2);
9800 objsize = CALL_EXPR_ARG (exp, 3);
9801 break;
9802
9803 case BUILT_IN_SNPRINTF_CHK:
9804 case BUILT_IN_VSNPRINTF_CHK:
9805 maxlen = CALL_EXPR_ARG (exp, 1);
9806 objsize = CALL_EXPR_ARG (exp, 3);
9807 break;
9808 default:
9809 gcc_unreachable ();
9810 }
9811
9812 if (catstr && maxlen)
9813 {
9814 /* Check __strncat_chk. There is no way to determine the length
9815 of the string to which the source string is being appended so
9816 just warn when the length of the source string is not known. */
9817 check_strncat_sizes (exp, objsize);
9818 return;
9819 }
9820
9821 check_sizes (OPT_Wstringop_overflow_, exp,
9822 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9823 }
9824
9825 /* Emit warning if a buffer overflow is detected at compile time
9826 in __sprintf_chk/__vsprintf_chk calls. */
9827
9828 static void
9829 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9830 {
9831 tree size, len, fmt;
9832 const char *fmt_str;
9833 int nargs = call_expr_nargs (exp);
9834
9835 /* Verify the required arguments in the original call. */
9836
9837 if (nargs < 4)
9838 return;
9839 size = CALL_EXPR_ARG (exp, 2);
9840 fmt = CALL_EXPR_ARG (exp, 3);
9841
9842 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9843 return;
9844
9845 /* Check whether the format is a literal string constant. */
9846 fmt_str = c_getstr (fmt);
9847 if (fmt_str == NULL)
9848 return;
9849
9850 if (!init_target_chars ())
9851 return;
9852
9853 /* If the format doesn't contain % args or %%, we know its size. */
9854 if (strchr (fmt_str, target_percent) == 0)
9855 len = build_int_cstu (size_type_node, strlen (fmt_str));
9856 /* If the format is "%s" and first ... argument is a string literal,
9857 we know it too. */
9858 else if (fcode == BUILT_IN_SPRINTF_CHK
9859 && strcmp (fmt_str, target_percent_s) == 0)
9860 {
9861 tree arg;
9862
9863 if (nargs < 5)
9864 return;
9865 arg = CALL_EXPR_ARG (exp, 4);
9866 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9867 return;
9868
9869 len = c_strlen (arg, 1);
9870 if (!len || ! tree_fits_uhwi_p (len))
9871 return;
9872 }
9873 else
9874 return;
9875
9876 /* Add one for the terminating nul. */
9877 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9878 check_sizes (OPT_Wstringop_overflow_,
9879 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9880 }
9881
9882 /* Emit warning if a free is called with address of a variable. */
9883
9884 static void
9885 maybe_emit_free_warning (tree exp)
9886 {
9887 tree arg = CALL_EXPR_ARG (exp, 0);
9888
9889 STRIP_NOPS (arg);
9890 if (TREE_CODE (arg) != ADDR_EXPR)
9891 return;
9892
9893 arg = get_base_address (TREE_OPERAND (arg, 0));
9894 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9895 return;
9896
9897 if (SSA_VAR_P (arg))
9898 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9899 "%Kattempt to free a non-heap object %qD", exp, arg);
9900 else
9901 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9902 "%Kattempt to free a non-heap object", exp);
9903 }
9904
9905 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9906 if possible. */
9907
9908 static tree
9909 fold_builtin_object_size (tree ptr, tree ost)
9910 {
9911 unsigned HOST_WIDE_INT bytes;
9912 int object_size_type;
9913
9914 if (!validate_arg (ptr, POINTER_TYPE)
9915 || !validate_arg (ost, INTEGER_TYPE))
9916 return NULL_TREE;
9917
9918 STRIP_NOPS (ost);
9919
9920 if (TREE_CODE (ost) != INTEGER_CST
9921 || tree_int_cst_sgn (ost) < 0
9922 || compare_tree_int (ost, 3) > 0)
9923 return NULL_TREE;
9924
9925 object_size_type = tree_to_shwi (ost);
9926
9927 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9928 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9929 and (size_t) 0 for types 2 and 3. */
9930 if (TREE_SIDE_EFFECTS (ptr))
9931 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9932
9933 if (TREE_CODE (ptr) == ADDR_EXPR)
9934 {
9935 compute_builtin_object_size (ptr, object_size_type, &bytes);
9936 if (wi::fits_to_tree_p (bytes, size_type_node))
9937 return build_int_cstu (size_type_node, bytes);
9938 }
9939 else if (TREE_CODE (ptr) == SSA_NAME)
9940 {
9941 /* If object size is not known yet, delay folding until
9942 later. Maybe subsequent passes will help determining
9943 it. */
9944 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9945 && wi::fits_to_tree_p (bytes, size_type_node))
9946 return build_int_cstu (size_type_node, bytes);
9947 }
9948
9949 return NULL_TREE;
9950 }
9951
9952 /* Builtins with folding operations that operate on "..." arguments
9953 need special handling; we need to store the arguments in a convenient
9954 data structure before attempting any folding. Fortunately there are
9955 only a few builtins that fall into this category. FNDECL is the
9956 function, EXP is the CALL_EXPR for the call. */
9957
9958 static tree
9959 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9960 {
9961 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9962 tree ret = NULL_TREE;
9963
9964 switch (fcode)
9965 {
9966 case BUILT_IN_FPCLASSIFY:
9967 ret = fold_builtin_fpclassify (loc, args, nargs);
9968 break;
9969
9970 default:
9971 break;
9972 }
9973 if (ret)
9974 {
9975 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9976 SET_EXPR_LOCATION (ret, loc);
9977 TREE_NO_WARNING (ret) = 1;
9978 return ret;
9979 }
9980 return NULL_TREE;
9981 }
9982
9983 /* Initialize format string characters in the target charset. */
9984
9985 bool
9986 init_target_chars (void)
9987 {
9988 static bool init;
9989 if (!init)
9990 {
9991 target_newline = lang_hooks.to_target_charset ('\n');
9992 target_percent = lang_hooks.to_target_charset ('%');
9993 target_c = lang_hooks.to_target_charset ('c');
9994 target_s = lang_hooks.to_target_charset ('s');
9995 if (target_newline == 0 || target_percent == 0 || target_c == 0
9996 || target_s == 0)
9997 return false;
9998
9999 target_percent_c[0] = target_percent;
10000 target_percent_c[1] = target_c;
10001 target_percent_c[2] = '\0';
10002
10003 target_percent_s[0] = target_percent;
10004 target_percent_s[1] = target_s;
10005 target_percent_s[2] = '\0';
10006
10007 target_percent_s_newline[0] = target_percent;
10008 target_percent_s_newline[1] = target_s;
10009 target_percent_s_newline[2] = target_newline;
10010 target_percent_s_newline[3] = '\0';
10011
10012 init = true;
10013 }
10014 return true;
10015 }
10016
10017 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10018 and no overflow/underflow occurred. INEXACT is true if M was not
10019 exactly calculated. TYPE is the tree type for the result. This
10020 function assumes that you cleared the MPFR flags and then
10021 calculated M to see if anything subsequently set a flag prior to
10022 entering this function. Return NULL_TREE if any checks fail. */
10023
10024 static tree
10025 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10026 {
10027 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10028 overflow/underflow occurred. If -frounding-math, proceed iff the
10029 result of calling FUNC was exact. */
10030 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10031 && (!flag_rounding_math || !inexact))
10032 {
10033 REAL_VALUE_TYPE rr;
10034
10035 real_from_mpfr (&rr, m, type, GMP_RNDN);
10036 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10037 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10038 but the mpft_t is not, then we underflowed in the
10039 conversion. */
10040 if (real_isfinite (&rr)
10041 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10042 {
10043 REAL_VALUE_TYPE rmode;
10044
10045 real_convert (&rmode, TYPE_MODE (type), &rr);
10046 /* Proceed iff the specified mode can hold the value. */
10047 if (real_identical (&rmode, &rr))
10048 return build_real (type, rmode);
10049 }
10050 }
10051 return NULL_TREE;
10052 }
10053
10054 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10055 number and no overflow/underflow occurred. INEXACT is true if M
10056 was not exactly calculated. TYPE is the tree type for the result.
10057 This function assumes that you cleared the MPFR flags and then
10058 calculated M to see if anything subsequently set a flag prior to
10059 entering this function. Return NULL_TREE if any checks fail, if
10060 FORCE_CONVERT is true, then bypass the checks. */
10061
10062 static tree
10063 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10064 {
10065 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10066 overflow/underflow occurred. If -frounding-math, proceed iff the
10067 result of calling FUNC was exact. */
10068 if (force_convert
10069 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10070 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10071 && (!flag_rounding_math || !inexact)))
10072 {
10073 REAL_VALUE_TYPE re, im;
10074
10075 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10076 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10077 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10078 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10079 but the mpft_t is not, then we underflowed in the
10080 conversion. */
10081 if (force_convert
10082 || (real_isfinite (&re) && real_isfinite (&im)
10083 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10084 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10085 {
10086 REAL_VALUE_TYPE re_mode, im_mode;
10087
10088 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10089 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10090 /* Proceed iff the specified mode can hold the value. */
10091 if (force_convert
10092 || (real_identical (&re_mode, &re)
10093 && real_identical (&im_mode, &im)))
10094 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10095 build_real (TREE_TYPE (type), im_mode));
10096 }
10097 }
10098 return NULL_TREE;
10099 }
10100
10101 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10102 the pointer *(ARG_QUO) and return the result. The type is taken
10103 from the type of ARG0 and is used for setting the precision of the
10104 calculation and results. */
10105
10106 static tree
10107 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10108 {
10109 tree const type = TREE_TYPE (arg0);
10110 tree result = NULL_TREE;
10111
10112 STRIP_NOPS (arg0);
10113 STRIP_NOPS (arg1);
10114
10115 /* To proceed, MPFR must exactly represent the target floating point
10116 format, which only happens when the target base equals two. */
10117 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10118 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10119 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10120 {
10121 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10122 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10123
10124 if (real_isfinite (ra0) && real_isfinite (ra1))
10125 {
10126 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10127 const int prec = fmt->p;
10128 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10129 tree result_rem;
10130 long integer_quo;
10131 mpfr_t m0, m1;
10132
10133 mpfr_inits2 (prec, m0, m1, NULL);
10134 mpfr_from_real (m0, ra0, GMP_RNDN);
10135 mpfr_from_real (m1, ra1, GMP_RNDN);
10136 mpfr_clear_flags ();
10137 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10138 /* Remquo is independent of the rounding mode, so pass
10139 inexact=0 to do_mpfr_ckconv(). */
10140 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10141 mpfr_clears (m0, m1, NULL);
10142 if (result_rem)
10143 {
10144 /* MPFR calculates quo in the host's long so it may
10145 return more bits in quo than the target int can hold
10146 if sizeof(host long) > sizeof(target int). This can
10147 happen even for native compilers in LP64 mode. In
10148 these cases, modulo the quo value with the largest
10149 number that the target int can hold while leaving one
10150 bit for the sign. */
10151 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10152 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10153
10154 /* Dereference the quo pointer argument. */
10155 arg_quo = build_fold_indirect_ref (arg_quo);
10156 /* Proceed iff a valid pointer type was passed in. */
10157 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10158 {
10159 /* Set the value. */
10160 tree result_quo
10161 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10162 build_int_cst (TREE_TYPE (arg_quo),
10163 integer_quo));
10164 TREE_SIDE_EFFECTS (result_quo) = 1;
10165 /* Combine the quo assignment with the rem. */
10166 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10167 result_quo, result_rem));
10168 }
10169 }
10170 }
10171 }
10172 return result;
10173 }
10174
10175 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10176 resulting value as a tree with type TYPE. The mpfr precision is
10177 set to the precision of TYPE. We assume that this mpfr function
10178 returns zero if the result could be calculated exactly within the
10179 requested precision. In addition, the integer pointer represented
10180 by ARG_SG will be dereferenced and set to the appropriate signgam
10181 (-1,1) value. */
10182
10183 static tree
10184 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10185 {
10186 tree result = NULL_TREE;
10187
10188 STRIP_NOPS (arg);
10189
10190 /* To proceed, MPFR must exactly represent the target floating point
10191 format, which only happens when the target base equals two. Also
10192 verify ARG is a constant and that ARG_SG is an int pointer. */
10193 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10194 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10195 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10196 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10197 {
10198 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10199
10200 /* In addition to NaN and Inf, the argument cannot be zero or a
10201 negative integer. */
10202 if (real_isfinite (ra)
10203 && ra->cl != rvc_zero
10204 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10205 {
10206 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10207 const int prec = fmt->p;
10208 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10209 int inexact, sg;
10210 mpfr_t m;
10211 tree result_lg;
10212
10213 mpfr_init2 (m, prec);
10214 mpfr_from_real (m, ra, GMP_RNDN);
10215 mpfr_clear_flags ();
10216 inexact = mpfr_lgamma (m, &sg, m, rnd);
10217 result_lg = do_mpfr_ckconv (m, type, inexact);
10218 mpfr_clear (m);
10219 if (result_lg)
10220 {
10221 tree result_sg;
10222
10223 /* Dereference the arg_sg pointer argument. */
10224 arg_sg = build_fold_indirect_ref (arg_sg);
10225 /* Assign the signgam value into *arg_sg. */
10226 result_sg = fold_build2 (MODIFY_EXPR,
10227 TREE_TYPE (arg_sg), arg_sg,
10228 build_int_cst (TREE_TYPE (arg_sg), sg));
10229 TREE_SIDE_EFFECTS (result_sg) = 1;
10230 /* Combine the signgam assignment with the lgamma result. */
10231 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10232 result_sg, result_lg));
10233 }
10234 }
10235 }
10236
10237 return result;
10238 }
10239
10240 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10241 mpc function FUNC on it and return the resulting value as a tree
10242 with type TYPE. The mpfr precision is set to the precision of
10243 TYPE. We assume that function FUNC returns zero if the result
10244 could be calculated exactly within the requested precision. If
10245 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10246 in the arguments and/or results. */
10247
10248 tree
10249 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10250 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10251 {
10252 tree result = NULL_TREE;
10253
10254 STRIP_NOPS (arg0);
10255 STRIP_NOPS (arg1);
10256
10257 /* To proceed, MPFR must exactly represent the target floating point
10258 format, which only happens when the target base equals two. */
10259 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10261 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10262 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10263 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10264 {
10265 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10266 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10267 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10268 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10269
10270 if (do_nonfinite
10271 || (real_isfinite (re0) && real_isfinite (im0)
10272 && real_isfinite (re1) && real_isfinite (im1)))
10273 {
10274 const struct real_format *const fmt =
10275 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10276 const int prec = fmt->p;
10277 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10278 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10279 int inexact;
10280 mpc_t m0, m1;
10281
10282 mpc_init2 (m0, prec);
10283 mpc_init2 (m1, prec);
10284 mpfr_from_real (mpc_realref (m0), re0, rnd);
10285 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10286 mpfr_from_real (mpc_realref (m1), re1, rnd);
10287 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10288 mpfr_clear_flags ();
10289 inexact = func (m0, m0, m1, crnd);
10290 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10291 mpc_clear (m0);
10292 mpc_clear (m1);
10293 }
10294 }
10295
10296 return result;
10297 }
10298
10299 /* A wrapper function for builtin folding that prevents warnings for
10300 "statement without effect" and the like, caused by removing the
10301 call node earlier than the warning is generated. */
10302
10303 tree
10304 fold_call_stmt (gcall *stmt, bool ignore)
10305 {
10306 tree ret = NULL_TREE;
10307 tree fndecl = gimple_call_fndecl (stmt);
10308 location_t loc = gimple_location (stmt);
10309 if (fndecl
10310 && TREE_CODE (fndecl) == FUNCTION_DECL
10311 && DECL_BUILT_IN (fndecl)
10312 && !gimple_call_va_arg_pack_p (stmt))
10313 {
10314 int nargs = gimple_call_num_args (stmt);
10315 tree *args = (nargs > 0
10316 ? gimple_call_arg_ptr (stmt, 0)
10317 : &error_mark_node);
10318
10319 if (avoid_folding_inline_builtin (fndecl))
10320 return NULL_TREE;
10321 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10322 {
10323 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10324 }
10325 else
10326 {
10327 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10328 if (ret)
10329 {
10330 /* Propagate location information from original call to
10331 expansion of builtin. Otherwise things like
10332 maybe_emit_chk_warning, that operate on the expansion
10333 of a builtin, will use the wrong location information. */
10334 if (gimple_has_location (stmt))
10335 {
10336 tree realret = ret;
10337 if (TREE_CODE (ret) == NOP_EXPR)
10338 realret = TREE_OPERAND (ret, 0);
10339 if (CAN_HAVE_LOCATION_P (realret)
10340 && !EXPR_HAS_LOCATION (realret))
10341 SET_EXPR_LOCATION (realret, loc);
10342 return realret;
10343 }
10344 return ret;
10345 }
10346 }
10347 }
10348 return NULL_TREE;
10349 }
10350
10351 /* Look up the function in builtin_decl that corresponds to DECL
10352 and set ASMSPEC as its user assembler name. DECL must be a
10353 function decl that declares a builtin. */
10354
10355 void
10356 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10357 {
10358 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10359 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10360 && asmspec != 0);
10361
10362 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10363 set_user_assembler_name (builtin, asmspec);
10364
10365 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10366 && INT_TYPE_SIZE < BITS_PER_WORD)
10367 {
10368 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10369 set_user_assembler_libfunc ("ffs", asmspec);
10370 set_optab_libfunc (ffs_optab, mode, "ffs");
10371 }
10372 }
10373
10374 /* Return true if DECL is a builtin that expands to a constant or similarly
10375 simple code. */
10376 bool
10377 is_simple_builtin (tree decl)
10378 {
10379 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10380 switch (DECL_FUNCTION_CODE (decl))
10381 {
10382 /* Builtins that expand to constants. */
10383 case BUILT_IN_CONSTANT_P:
10384 case BUILT_IN_EXPECT:
10385 case BUILT_IN_OBJECT_SIZE:
10386 case BUILT_IN_UNREACHABLE:
10387 /* Simple register moves or loads from stack. */
10388 case BUILT_IN_ASSUME_ALIGNED:
10389 case BUILT_IN_RETURN_ADDRESS:
10390 case BUILT_IN_EXTRACT_RETURN_ADDR:
10391 case BUILT_IN_FROB_RETURN_ADDR:
10392 case BUILT_IN_RETURN:
10393 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10394 case BUILT_IN_FRAME_ADDRESS:
10395 case BUILT_IN_VA_END:
10396 case BUILT_IN_STACK_SAVE:
10397 case BUILT_IN_STACK_RESTORE:
10398 /* Exception state returns or moves registers around. */
10399 case BUILT_IN_EH_FILTER:
10400 case BUILT_IN_EH_POINTER:
10401 case BUILT_IN_EH_COPY_VALUES:
10402 return true;
10403
10404 default:
10405 return false;
10406 }
10407
10408 return false;
10409 }
10410
10411 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10412 most probably expanded inline into reasonably simple code. This is a
10413 superset of is_simple_builtin. */
10414 bool
10415 is_inexpensive_builtin (tree decl)
10416 {
10417 if (!decl)
10418 return false;
10419 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10420 return true;
10421 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10422 switch (DECL_FUNCTION_CODE (decl))
10423 {
10424 case BUILT_IN_ABS:
10425 case BUILT_IN_ALLOCA:
10426 case BUILT_IN_ALLOCA_WITH_ALIGN:
10427 case BUILT_IN_BSWAP16:
10428 case BUILT_IN_BSWAP32:
10429 case BUILT_IN_BSWAP64:
10430 case BUILT_IN_CLZ:
10431 case BUILT_IN_CLZIMAX:
10432 case BUILT_IN_CLZL:
10433 case BUILT_IN_CLZLL:
10434 case BUILT_IN_CTZ:
10435 case BUILT_IN_CTZIMAX:
10436 case BUILT_IN_CTZL:
10437 case BUILT_IN_CTZLL:
10438 case BUILT_IN_FFS:
10439 case BUILT_IN_FFSIMAX:
10440 case BUILT_IN_FFSL:
10441 case BUILT_IN_FFSLL:
10442 case BUILT_IN_IMAXABS:
10443 case BUILT_IN_FINITE:
10444 case BUILT_IN_FINITEF:
10445 case BUILT_IN_FINITEL:
10446 case BUILT_IN_FINITED32:
10447 case BUILT_IN_FINITED64:
10448 case BUILT_IN_FINITED128:
10449 case BUILT_IN_FPCLASSIFY:
10450 case BUILT_IN_ISFINITE:
10451 case BUILT_IN_ISINF_SIGN:
10452 case BUILT_IN_ISINF:
10453 case BUILT_IN_ISINFF:
10454 case BUILT_IN_ISINFL:
10455 case BUILT_IN_ISINFD32:
10456 case BUILT_IN_ISINFD64:
10457 case BUILT_IN_ISINFD128:
10458 case BUILT_IN_ISNAN:
10459 case BUILT_IN_ISNANF:
10460 case BUILT_IN_ISNANL:
10461 case BUILT_IN_ISNAND32:
10462 case BUILT_IN_ISNAND64:
10463 case BUILT_IN_ISNAND128:
10464 case BUILT_IN_ISNORMAL:
10465 case BUILT_IN_ISGREATER:
10466 case BUILT_IN_ISGREATEREQUAL:
10467 case BUILT_IN_ISLESS:
10468 case BUILT_IN_ISLESSEQUAL:
10469 case BUILT_IN_ISLESSGREATER:
10470 case BUILT_IN_ISUNORDERED:
10471 case BUILT_IN_VA_ARG_PACK:
10472 case BUILT_IN_VA_ARG_PACK_LEN:
10473 case BUILT_IN_VA_COPY:
10474 case BUILT_IN_TRAP:
10475 case BUILT_IN_SAVEREGS:
10476 case BUILT_IN_POPCOUNTL:
10477 case BUILT_IN_POPCOUNTLL:
10478 case BUILT_IN_POPCOUNTIMAX:
10479 case BUILT_IN_POPCOUNT:
10480 case BUILT_IN_PARITYL:
10481 case BUILT_IN_PARITYLL:
10482 case BUILT_IN_PARITYIMAX:
10483 case BUILT_IN_PARITY:
10484 case BUILT_IN_LABS:
10485 case BUILT_IN_LLABS:
10486 case BUILT_IN_PREFETCH:
10487 case BUILT_IN_ACC_ON_DEVICE:
10488 return true;
10489
10490 default:
10491 return is_simple_builtin (decl);
10492 }
10493
10494 return false;
10495 }
10496
10497 /* Return true if T is a constant and the value cast to a target char
10498 can be represented by a host char.
10499 Store the casted char constant in *P if so. */
10500
10501 bool
10502 target_char_cst_p (tree t, char *p)
10503 {
10504 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10505 return false;
10506
10507 *p = (char)tree_to_uhwi (t);
10508 return true;
10509 }