]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
PR middle-end/91631 - buffer overflow into an array member of a declared object not...
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
95
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
98
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
134 static rtx expand_builtin_strcat (tree, rtx);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_stpncpy (tree, rtx);
139 static rtx expand_builtin_strncat (tree, rtx);
140 static rtx expand_builtin_strncpy (tree, rtx);
141 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
142 static rtx expand_builtin_memset (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static rtx expand_builtin_expect_with_probability (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
178
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
186
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
197
198 /* Return true if NAME starts with __builtin_ or __sync_. */
199
200 static bool
201 is_builtin_name (const char *name)
202 {
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
210 }
211
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
215
216 bool
217 called_as_built_in (tree node)
218 {
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
224 }
225
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
230
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
237
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
240
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 {
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
251
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
256
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
260 {
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
267 }
268 else if (TREE_CODE (exp) == LABEL_DECL)
269 ;
270 else if (TREE_CODE (exp) == CONST_DECL)
271 {
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
277
278 known_alignment = true;
279 }
280 else if (DECL_P (exp))
281 {
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
284 }
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
288 {
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 {
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
302 }
303
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
307
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
310
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 {
315 if (TMR_INDEX (exp))
316 {
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 }
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
325 }
326
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
338 {
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
344 }
345 }
346 else if (TREE_CODE (exp) == STRING_CST)
347 {
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
353
354 known_alignment = true;
355 }
356
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
360 {
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
363 {
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
367 }
368 }
369
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
374 {
375 align = alt_align;
376 known_alignment = false;
377 }
378
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
382 }
383
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
392 {
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
394 }
395
396 /* Return the alignment in bits of EXP, an object. */
397
398 unsigned int
399 get_object_alignment (tree exp)
400 {
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
403
404 get_object_alignment_1 (exp, &align, &bitpos);
405
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
408
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
412 }
413
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
418
419 If EXP is not a pointer, false is returned too. */
420
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
424 {
425 STRIP_NOPS (exp);
426
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 {
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
439 {
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
442 {
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
446 }
447 }
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
451 }
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 {
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 {
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
468 }
469 else
470 {
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
474 }
475 }
476 else if (TREE_CODE (exp) == INTEGER_CST)
477 {
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
482 }
483
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
487 }
488
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
495
496 unsigned int
497 get_pointer_alignment (tree exp)
498 {
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
501
502 get_pointer_alignment_1 (exp, &align, &bitpos);
503
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
506
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
509
510 return align;
511 }
512
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 {
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521
522 unsigned n;
523
524 if (eltsize == 1)
525 {
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
528 {
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
532 }
533 }
534 else
535 {
536 for (n = 0; n < maxelts; n++)
537 {
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
541 }
542 }
543 return n;
544 }
545
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
549
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 {
553 if (TREE_NO_WARNING (arg))
554 return;
555
556 loc = expansion_point_location_if_in_system_header (loc);
557
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
560 {
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
564 }
565 }
566
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
572
573 tree
574 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 {
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data lendata = { };
579 tree len = c_strlen (exp, 1, &lendata);
580 if (len == NULL_TREE && lendata.minlen && lendata.decl)
581 {
582 if (size)
583 {
584 len = lendata.minlen;
585 if (lendata.off)
586 {
587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (lendata.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
593 {
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (lendata.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 }
600 else
601 *exact = false;
602 }
603 else
604 *exact = true;
605
606 *size = len;
607 }
608 return lendata.decl;
609 }
610
611 return NULL_TREE;
612 }
613
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
619
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
626
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
630
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if ARG references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
637
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
640
641 The value returned is of type `ssizetype'. */
642
643 tree
644 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
645 {
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data = { };
650 if (!data)
651 data = &local_strlen_data;
652
653 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
654
655 tree src = STRIP_NOPS (arg);
656 if (TREE_CODE (src) == COND_EXPR
657 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
658 {
659 tree len1, len2;
660
661 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
662 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
663 if (tree_int_cst_equal (len1, len2))
664 return len1;
665 }
666
667 if (TREE_CODE (src) == COMPOUND_EXPR
668 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
669 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
670
671 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
672
673 /* Offset from the beginning of the string in bytes. */
674 tree byteoff;
675 tree memsize;
676 tree decl;
677 src = string_constant (src, &byteoff, &memsize, &decl);
678 if (src == 0)
679 return NULL_TREE;
680
681 /* Determine the size of the string element. */
682 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
683 return NULL_TREE;
684
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
690 all NUL. */
691 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
692 strelts = strelts / eltsize;
693
694 if (!tree_fits_uhwi_p (memsize))
695 return NULL_TREE;
696
697 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
698
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr = TREE_STRING_POINTER (src);
702
703 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
704 {
705 /* The code below works only for single byte character types. */
706 if (eltsize != 1)
707 return NULL_TREE;
708
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len = string_length (ptr, eltsize, strelts);
714
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len + 1 < strelts)
719 return NULL_TREE;
720 else if (len >= maxelts)
721 {
722 data->decl = decl;
723 data->off = byteoff;
724 data->minlen = ssize_int (len);
725 return NULL_TREE;
726 }
727
728 /* For empty strings the result should be zero. */
729 if (len == 0)
730 return ssize_int (0);
731
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
738 : byteoff;
739 offsave = fold_convert_loc (loc, sizetype, offsave);
740 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
741 size_int (len));
742 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
743 offsave);
744 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
745 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
746 build_zero_cst (ssizetype));
747 }
748
749 /* Offset from the beginning of the string in elements. */
750 HOST_WIDE_INT eltoff;
751
752 /* We have a known offset into the string. Start searching there for
753 a null character if we can represent it as a single HOST_WIDE_INT. */
754 if (byteoff == 0)
755 eltoff = 0;
756 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
757 eltoff = -1;
758 else
759 eltoff = tree_to_uhwi (byteoff) / eltsize;
760
761 /* If the offset is known to be out of bounds, warn, and call strlen at
762 runtime. */
763 if (eltoff < 0 || eltoff >= maxelts)
764 {
765 /* Suppress multiple warnings for propagated constant strings. */
766 if (only_value != 2
767 && !TREE_NO_WARNING (arg)
768 && warning_at (loc, OPT_Warray_bounds,
769 "offset %qwi outside bounds of constant string",
770 eltoff))
771 {
772 if (decl)
773 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
774 TREE_NO_WARNING (arg) = 1;
775 }
776 return NULL_TREE;
777 }
778
779 /* If eltoff is larger than strelts but less than maxelts the
780 string length is zero, since the excess memory will be zero. */
781 if (eltoff > strelts)
782 return ssize_int (0);
783
784 /* Use strlen to search for the first zero byte. Since any strings
785 constructed with build_string will have nulls appended, we win even
786 if we get handed something like (char[4])"abcd".
787
788 Since ELTOFF is our starting index into the string, no further
789 calculation is needed. */
790 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
791 strelts - eltoff);
792
793 /* Don't know what to return if there was no zero termination.
794 Ideally this would turn into a gcc_checking_assert over time.
795 Set DECL/LEN so callers can examine them. */
796 if (len >= maxelts - eltoff)
797 {
798 data->decl = decl;
799 data->off = byteoff;
800 data->minlen = ssize_int (len);
801 return NULL_TREE;
802 }
803
804 return ssize_int (len);
805 }
806
807 /* Return a constant integer corresponding to target reading
808 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
809 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
810 are assumed to be zero, otherwise it reads as many characters
811 as needed. */
812
813 rtx
814 c_readstr (const char *str, scalar_int_mode mode,
815 bool null_terminated_p/*=true*/)
816 {
817 HOST_WIDE_INT ch;
818 unsigned int i, j;
819 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
820
821 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
822 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
823 / HOST_BITS_PER_WIDE_INT;
824
825 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
826 for (i = 0; i < len; i++)
827 tmp[i] = 0;
828
829 ch = 1;
830 for (i = 0; i < GET_MODE_SIZE (mode); i++)
831 {
832 j = i;
833 if (WORDS_BIG_ENDIAN)
834 j = GET_MODE_SIZE (mode) - i - 1;
835 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
836 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
837 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
838 j *= BITS_PER_UNIT;
839
840 if (ch || !null_terminated_p)
841 ch = (unsigned char) str[i];
842 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
843 }
844
845 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
846 return immed_wide_int_const (c, mode);
847 }
848
849 /* Cast a target constant CST to target CHAR and if that value fits into
850 host char type, return zero and put that value into variable pointed to by
851 P. */
852
853 static int
854 target_char_cast (tree cst, char *p)
855 {
856 unsigned HOST_WIDE_INT val, hostval;
857
858 if (TREE_CODE (cst) != INTEGER_CST
859 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
860 return 1;
861
862 /* Do not care if it fits or not right here. */
863 val = TREE_INT_CST_LOW (cst);
864
865 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
866 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
867
868 hostval = val;
869 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
870 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
871
872 if (val != hostval)
873 return 1;
874
875 *p = hostval;
876 return 0;
877 }
878
879 /* Similar to save_expr, but assumes that arbitrary code is not executed
880 in between the multiple evaluations. In particular, we assume that a
881 non-addressable local variable will not be modified. */
882
883 static tree
884 builtin_save_expr (tree exp)
885 {
886 if (TREE_CODE (exp) == SSA_NAME
887 || (TREE_ADDRESSABLE (exp) == 0
888 && (TREE_CODE (exp) == PARM_DECL
889 || (VAR_P (exp) && !TREE_STATIC (exp)))))
890 return exp;
891
892 return save_expr (exp);
893 }
894
895 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
896 times to get the address of either a higher stack frame, or a return
897 address located within it (depending on FNDECL_CODE). */
898
899 static rtx
900 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
901 {
902 int i;
903 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
904 if (tem == NULL_RTX)
905 {
906 /* For a zero count with __builtin_return_address, we don't care what
907 frame address we return, because target-specific definitions will
908 override us. Therefore frame pointer elimination is OK, and using
909 the soft frame pointer is OK.
910
911 For a nonzero count, or a zero count with __builtin_frame_address,
912 we require a stable offset from the current frame pointer to the
913 previous one, so we must use the hard frame pointer, and
914 we must disable frame pointer elimination. */
915 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
916 tem = frame_pointer_rtx;
917 else
918 {
919 tem = hard_frame_pointer_rtx;
920
921 /* Tell reload not to eliminate the frame pointer. */
922 crtl->accesses_prior_frames = 1;
923 }
924 }
925
926 if (count > 0)
927 SETUP_FRAME_ADDRESSES ();
928
929 /* On the SPARC, the return address is not in the frame, it is in a
930 register. There is no way to access it off of the current frame
931 pointer, but it can be accessed off the previous frame pointer by
932 reading the value from the register window save area. */
933 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
934 count--;
935
936 /* Scan back COUNT frames to the specified frame. */
937 for (i = 0; i < count; i++)
938 {
939 /* Assume the dynamic chain pointer is in the word that the
940 frame address points to, unless otherwise specified. */
941 tem = DYNAMIC_CHAIN_ADDRESS (tem);
942 tem = memory_address (Pmode, tem);
943 tem = gen_frame_mem (Pmode, tem);
944 tem = copy_to_reg (tem);
945 }
946
947 /* For __builtin_frame_address, return what we've got. But, on
948 the SPARC for example, we may have to add a bias. */
949 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
950 return FRAME_ADDR_RTX (tem);
951
952 /* For __builtin_return_address, get the return address from that frame. */
953 #ifdef RETURN_ADDR_RTX
954 tem = RETURN_ADDR_RTX (count, tem);
955 #else
956 tem = memory_address (Pmode,
957 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
958 tem = gen_frame_mem (Pmode, tem);
959 #endif
960 return tem;
961 }
962
963 /* Alias set used for setjmp buffer. */
964 static alias_set_type setjmp_alias_set = -1;
965
966 /* Construct the leading half of a __builtin_setjmp call. Control will
967 return to RECEIVER_LABEL. This is also called directly by the SJLJ
968 exception handling code. */
969
970 void
971 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
972 {
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
974 rtx stack_save;
975 rtx mem;
976
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
979
980 buf_addr = convert_memory_address (Pmode, buf_addr);
981
982 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
983
984 /* We store the frame pointer and the address of receiver_label in
985 the buffer and use the rest of it for the stack save area, which
986 is machine-dependent. */
987
988 mem = gen_rtx_MEM (Pmode, buf_addr);
989 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (mem, hard_frame_pointer_rtx);
991
992 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
993 GET_MODE_SIZE (Pmode))),
994 set_mem_alias_set (mem, setjmp_alias_set);
995
996 emit_move_insn (validize_mem (mem),
997 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
998
999 stack_save = gen_rtx_MEM (sa_mode,
1000 plus_constant (Pmode, buf_addr,
1001 2 * GET_MODE_SIZE (Pmode)));
1002 set_mem_alias_set (stack_save, setjmp_alias_set);
1003 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1004
1005 /* If there is further processing to do, do it. */
1006 if (targetm.have_builtin_setjmp_setup ())
1007 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1008
1009 /* We have a nonlocal label. */
1010 cfun->has_nonlocal_label = 1;
1011 }
1012
1013 /* Construct the trailing part of a __builtin_setjmp call. This is
1014 also called directly by the SJLJ exception handling code.
1015 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1016
1017 void
1018 expand_builtin_setjmp_receiver (rtx receiver_label)
1019 {
1020 rtx chain;
1021
1022 /* Mark the FP as used when we get here, so we have to make sure it's
1023 marked as used by this function. */
1024 emit_use (hard_frame_pointer_rtx);
1025
1026 /* Mark the static chain as clobbered here so life information
1027 doesn't get messed up for it. */
1028 chain = rtx_for_static_chain (current_function_decl, true);
1029 if (chain && REG_P (chain))
1030 emit_clobber (chain);
1031
1032 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1033 {
1034 /* If the argument pointer can be eliminated in favor of the
1035 frame pointer, we don't need to restore it. We assume here
1036 that if such an elimination is present, it can always be used.
1037 This is the case on all known machines; if we don't make this
1038 assumption, we do unnecessary saving on many machines. */
1039 size_t i;
1040 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1041
1042 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1043 if (elim_regs[i].from == ARG_POINTER_REGNUM
1044 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1045 break;
1046
1047 if (i == ARRAY_SIZE (elim_regs))
1048 {
1049 /* Now restore our arg pointer from the address at which it
1050 was saved in our stack frame. */
1051 emit_move_insn (crtl->args.internal_arg_pointer,
1052 copy_to_reg (get_arg_pointer_save_area ()));
1053 }
1054 }
1055
1056 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1057 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1058 else if (targetm.have_nonlocal_goto_receiver ())
1059 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1060 else
1061 { /* Nothing */ }
1062
1063 /* We must not allow the code we just generated to be reordered by
1064 scheduling. Specifically, the update of the frame pointer must
1065 happen immediately, not later. */
1066 emit_insn (gen_blockage ());
1067 }
1068
1069 /* __builtin_longjmp is passed a pointer to an array of five words (not
1070 all will be used on all machines). It operates similarly to the C
1071 library function of the same name, but is more efficient. Much of
1072 the code below is copied from the handling of non-local gotos. */
1073
1074 static void
1075 expand_builtin_longjmp (rtx buf_addr, rtx value)
1076 {
1077 rtx fp, lab, stack;
1078 rtx_insn *insn, *last;
1079 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1080
1081 /* DRAP is needed for stack realign if longjmp is expanded to current
1082 function */
1083 if (SUPPORTS_STACK_ALIGNMENT)
1084 crtl->need_drap = true;
1085
1086 if (setjmp_alias_set == -1)
1087 setjmp_alias_set = new_alias_set ();
1088
1089 buf_addr = convert_memory_address (Pmode, buf_addr);
1090
1091 buf_addr = force_reg (Pmode, buf_addr);
1092
1093 /* We require that the user must pass a second argument of 1, because
1094 that is what builtin_setjmp will return. */
1095 gcc_assert (value == const1_rtx);
1096
1097 last = get_last_insn ();
1098 if (targetm.have_builtin_longjmp ())
1099 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1100 else
1101 {
1102 fp = gen_rtx_MEM (Pmode, buf_addr);
1103 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1104 GET_MODE_SIZE (Pmode)));
1105
1106 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1107 2 * GET_MODE_SIZE (Pmode)));
1108 set_mem_alias_set (fp, setjmp_alias_set);
1109 set_mem_alias_set (lab, setjmp_alias_set);
1110 set_mem_alias_set (stack, setjmp_alias_set);
1111
1112 /* Pick up FP, label, and SP from the block and jump. This code is
1113 from expand_goto in stmt.c; see there for detailed comments. */
1114 if (targetm.have_nonlocal_goto ())
1115 /* We have to pass a value to the nonlocal_goto pattern that will
1116 get copied into the static_chain pointer, but it does not matter
1117 what that value is, because builtin_setjmp does not use it. */
1118 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1119 else
1120 {
1121 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1122 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1123
1124 lab = copy_to_reg (lab);
1125
1126 /* Restore the frame pointer and stack pointer. We must use a
1127 temporary since the setjmp buffer may be a local. */
1128 fp = copy_to_reg (fp);
1129 emit_stack_restore (SAVE_NONLOCAL, stack);
1130
1131 /* Ensure the frame pointer move is not optimized. */
1132 emit_insn (gen_blockage ());
1133 emit_clobber (hard_frame_pointer_rtx);
1134 emit_clobber (frame_pointer_rtx);
1135 emit_move_insn (hard_frame_pointer_rtx, fp);
1136
1137 emit_use (hard_frame_pointer_rtx);
1138 emit_use (stack_pointer_rtx);
1139 emit_indirect_jump (lab);
1140 }
1141 }
1142
1143 /* Search backwards and mark the jump insn as a non-local goto.
1144 Note that this precludes the use of __builtin_longjmp to a
1145 __builtin_setjmp target in the same function. However, we've
1146 already cautioned the user that these functions are for
1147 internal exception handling use only. */
1148 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1149 {
1150 gcc_assert (insn != last);
1151
1152 if (JUMP_P (insn))
1153 {
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1156 }
1157 else if (CALL_P (insn))
1158 break;
1159 }
1160 }
1161
1162 static inline bool
1163 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1164 {
1165 return (iter->i < iter->n);
1166 }
1167
1168 /* This function validates the types of a function call argument list
1169 against a specified list of tree_codes. If the last specifier is a 0,
1170 that represents an ellipsis, otherwise the last specifier must be a
1171 VOID_TYPE. */
1172
1173 static bool
1174 validate_arglist (const_tree callexpr, ...)
1175 {
1176 enum tree_code code;
1177 bool res = 0;
1178 va_list ap;
1179 const_call_expr_arg_iterator iter;
1180 const_tree arg;
1181
1182 va_start (ap, callexpr);
1183 init_const_call_expr_arg_iterator (callexpr, &iter);
1184
1185 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1186 tree fn = CALL_EXPR_FN (callexpr);
1187 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1188
1189 for (unsigned argno = 1; ; ++argno)
1190 {
1191 code = (enum tree_code) va_arg (ap, int);
1192
1193 switch (code)
1194 {
1195 case 0:
1196 /* This signifies an ellipses, any further arguments are all ok. */
1197 res = true;
1198 goto end;
1199 case VOID_TYPE:
1200 /* This signifies an endlink, if no arguments remain, return
1201 true, otherwise return false. */
1202 res = !more_const_call_expr_args_p (&iter);
1203 goto end;
1204 case POINTER_TYPE:
1205 /* The actual argument must be nonnull when either the whole
1206 called function has been declared nonnull, or when the formal
1207 argument corresponding to the actual argument has been. */
1208 if (argmap
1209 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1210 {
1211 arg = next_const_call_expr_arg (&iter);
1212 if (!validate_arg (arg, code) || integer_zerop (arg))
1213 goto end;
1214 break;
1215 }
1216 /* FALLTHRU */
1217 default:
1218 /* If no parameters remain or the parameter's code does not
1219 match the specified code, return false. Otherwise continue
1220 checking any remaining arguments. */
1221 arg = next_const_call_expr_arg (&iter);
1222 if (!validate_arg (arg, code))
1223 goto end;
1224 break;
1225 }
1226 }
1227
1228 /* We need gotos here since we can only have one VA_CLOSE in a
1229 function. */
1230 end: ;
1231 va_end (ap);
1232
1233 BITMAP_FREE (argmap);
1234
1235 return res;
1236 }
1237
1238 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1239 and the address of the save area. */
1240
1241 static rtx
1242 expand_builtin_nonlocal_goto (tree exp)
1243 {
1244 tree t_label, t_save_area;
1245 rtx r_label, r_save_area, r_fp, r_sp;
1246 rtx_insn *insn;
1247
1248 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1249 return NULL_RTX;
1250
1251 t_label = CALL_EXPR_ARG (exp, 0);
1252 t_save_area = CALL_EXPR_ARG (exp, 1);
1253
1254 r_label = expand_normal (t_label);
1255 r_label = convert_memory_address (Pmode, r_label);
1256 r_save_area = expand_normal (t_save_area);
1257 r_save_area = convert_memory_address (Pmode, r_save_area);
1258 /* Copy the address of the save location to a register just in case it was
1259 based on the frame pointer. */
1260 r_save_area = copy_to_reg (r_save_area);
1261 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1262 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1263 plus_constant (Pmode, r_save_area,
1264 GET_MODE_SIZE (Pmode)));
1265
1266 crtl->has_nonlocal_goto = 1;
1267
1268 /* ??? We no longer need to pass the static chain value, afaik. */
1269 if (targetm.have_nonlocal_goto ())
1270 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1271 else
1272 {
1273 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1274 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1275
1276 r_label = copy_to_reg (r_label);
1277
1278 /* Restore the frame pointer and stack pointer. We must use a
1279 temporary since the setjmp buffer may be a local. */
1280 r_fp = copy_to_reg (r_fp);
1281 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1282
1283 /* Ensure the frame pointer move is not optimized. */
1284 emit_insn (gen_blockage ());
1285 emit_clobber (hard_frame_pointer_rtx);
1286 emit_clobber (frame_pointer_rtx);
1287 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1288
1289 /* USE of hard_frame_pointer_rtx added for consistency;
1290 not clear if really needed. */
1291 emit_use (hard_frame_pointer_rtx);
1292 emit_use (stack_pointer_rtx);
1293
1294 /* If the architecture is using a GP register, we must
1295 conservatively assume that the target function makes use of it.
1296 The prologue of functions with nonlocal gotos must therefore
1297 initialize the GP register to the appropriate value, and we
1298 must then make sure that this value is live at the point
1299 of the jump. (Note that this doesn't necessarily apply
1300 to targets with a nonlocal_goto pattern; they are free
1301 to implement it in their own way. Note also that this is
1302 a no-op if the GP register is a global invariant.) */
1303 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1304 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1305 emit_use (pic_offset_table_rtx);
1306
1307 emit_indirect_jump (r_label);
1308 }
1309
1310 /* Search backwards to the jump insn and mark it as a
1311 non-local goto. */
1312 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1313 {
1314 if (JUMP_P (insn))
1315 {
1316 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1317 break;
1318 }
1319 else if (CALL_P (insn))
1320 break;
1321 }
1322
1323 return const0_rtx;
1324 }
1325
1326 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1327 (not all will be used on all machines) that was passed to __builtin_setjmp.
1328 It updates the stack pointer in that block to the current value. This is
1329 also called directly by the SJLJ exception handling code. */
1330
1331 void
1332 expand_builtin_update_setjmp_buf (rtx buf_addr)
1333 {
1334 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1335 buf_addr = convert_memory_address (Pmode, buf_addr);
1336 rtx stack_save
1337 = gen_rtx_MEM (sa_mode,
1338 memory_address
1339 (sa_mode,
1340 plus_constant (Pmode, buf_addr,
1341 2 * GET_MODE_SIZE (Pmode))));
1342
1343 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1344 }
1345
1346 /* Expand a call to __builtin_prefetch. For a target that does not support
1347 data prefetch, evaluate the memory address argument in case it has side
1348 effects. */
1349
1350 static void
1351 expand_builtin_prefetch (tree exp)
1352 {
1353 tree arg0, arg1, arg2;
1354 int nargs;
1355 rtx op0, op1, op2;
1356
1357 if (!validate_arglist (exp, POINTER_TYPE, 0))
1358 return;
1359
1360 arg0 = CALL_EXPR_ARG (exp, 0);
1361
1362 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1363 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1364 locality). */
1365 nargs = call_expr_nargs (exp);
1366 if (nargs > 1)
1367 arg1 = CALL_EXPR_ARG (exp, 1);
1368 else
1369 arg1 = integer_zero_node;
1370 if (nargs > 2)
1371 arg2 = CALL_EXPR_ARG (exp, 2);
1372 else
1373 arg2 = integer_three_node;
1374
1375 /* Argument 0 is an address. */
1376 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1377
1378 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1379 if (TREE_CODE (arg1) != INTEGER_CST)
1380 {
1381 error ("second argument to %<__builtin_prefetch%> must be a constant");
1382 arg1 = integer_zero_node;
1383 }
1384 op1 = expand_normal (arg1);
1385 /* Argument 1 must be either zero or one. */
1386 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1387 {
1388 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1389 " using zero");
1390 op1 = const0_rtx;
1391 }
1392
1393 /* Argument 2 (locality) must be a compile-time constant int. */
1394 if (TREE_CODE (arg2) != INTEGER_CST)
1395 {
1396 error ("third argument to %<__builtin_prefetch%> must be a constant");
1397 arg2 = integer_zero_node;
1398 }
1399 op2 = expand_normal (arg2);
1400 /* Argument 2 must be 0, 1, 2, or 3. */
1401 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1402 {
1403 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1404 op2 = const0_rtx;
1405 }
1406
1407 if (targetm.have_prefetch ())
1408 {
1409 class expand_operand ops[3];
1410
1411 create_address_operand (&ops[0], op0);
1412 create_integer_operand (&ops[1], INTVAL (op1));
1413 create_integer_operand (&ops[2], INTVAL (op2));
1414 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1415 return;
1416 }
1417
1418 /* Don't do anything with direct references to volatile memory, but
1419 generate code to handle other side effects. */
1420 if (!MEM_P (op0) && side_effects_p (op0))
1421 emit_insn (op0);
1422 }
1423
1424 /* Get a MEM rtx for expression EXP which is the address of an operand
1425 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1426 the maximum length of the block of memory that might be accessed or
1427 NULL if unknown. */
1428
1429 static rtx
1430 get_memory_rtx (tree exp, tree len)
1431 {
1432 tree orig_exp = exp;
1433 rtx addr, mem;
1434
1435 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1436 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1437 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1438 exp = TREE_OPERAND (exp, 0);
1439
1440 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1441 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1442
1443 /* Get an expression we can use to find the attributes to assign to MEM.
1444 First remove any nops. */
1445 while (CONVERT_EXPR_P (exp)
1446 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1447 exp = TREE_OPERAND (exp, 0);
1448
1449 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1450 (as builtin stringops may alias with anything). */
1451 exp = fold_build2 (MEM_REF,
1452 build_array_type (char_type_node,
1453 build_range_type (sizetype,
1454 size_one_node, len)),
1455 exp, build_int_cst (ptr_type_node, 0));
1456
1457 /* If the MEM_REF has no acceptable address, try to get the base object
1458 from the original address we got, and build an all-aliasing
1459 unknown-sized access to that one. */
1460 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1461 set_mem_attributes (mem, exp, 0);
1462 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1463 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1464 0))))
1465 {
1466 exp = build_fold_addr_expr (exp);
1467 exp = fold_build2 (MEM_REF,
1468 build_array_type (char_type_node,
1469 build_range_type (sizetype,
1470 size_zero_node,
1471 NULL)),
1472 exp, build_int_cst (ptr_type_node, 0));
1473 set_mem_attributes (mem, exp, 0);
1474 }
1475 set_mem_alias_set (mem, 0);
1476 return mem;
1477 }
1478 \f
1479 /* Built-in functions to perform an untyped call and return. */
1480
1481 #define apply_args_mode \
1482 (this_target_builtins->x_apply_args_mode)
1483 #define apply_result_mode \
1484 (this_target_builtins->x_apply_result_mode)
1485
1486 /* Return the size required for the block returned by __builtin_apply_args,
1487 and initialize apply_args_mode. */
1488
1489 static int
1490 apply_args_size (void)
1491 {
1492 static int size = -1;
1493 int align;
1494 unsigned int regno;
1495
1496 /* The values computed by this function never change. */
1497 if (size < 0)
1498 {
1499 /* The first value is the incoming arg-pointer. */
1500 size = GET_MODE_SIZE (Pmode);
1501
1502 /* The second value is the structure value address unless this is
1503 passed as an "invisible" first argument. */
1504 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1505 size += GET_MODE_SIZE (Pmode);
1506
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if (FUNCTION_ARG_REGNO_P (regno))
1509 {
1510 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1511
1512 gcc_assert (mode != VOIDmode);
1513
1514 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1515 if (size % align != 0)
1516 size = CEIL (size, align) * align;
1517 size += GET_MODE_SIZE (mode);
1518 apply_args_mode[regno] = mode;
1519 }
1520 else
1521 {
1522 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1523 }
1524 }
1525 return size;
1526 }
1527
1528 /* Return the size required for the block returned by __builtin_apply,
1529 and initialize apply_result_mode. */
1530
1531 static int
1532 apply_result_size (void)
1533 {
1534 static int size = -1;
1535 int align, regno;
1536
1537 /* The values computed by this function never change. */
1538 if (size < 0)
1539 {
1540 size = 0;
1541
1542 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1543 if (targetm.calls.function_value_regno_p (regno))
1544 {
1545 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1546
1547 gcc_assert (mode != VOIDmode);
1548
1549 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1550 if (size % align != 0)
1551 size = CEIL (size, align) * align;
1552 size += GET_MODE_SIZE (mode);
1553 apply_result_mode[regno] = mode;
1554 }
1555 else
1556 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1557
1558 /* Allow targets that use untyped_call and untyped_return to override
1559 the size so that machine-specific information can be stored here. */
1560 #ifdef APPLY_RESULT_SIZE
1561 size = APPLY_RESULT_SIZE;
1562 #endif
1563 }
1564 return size;
1565 }
1566
1567 /* Create a vector describing the result block RESULT. If SAVEP is true,
1568 the result block is used to save the values; otherwise it is used to
1569 restore the values. */
1570
1571 static rtx
1572 result_vector (int savep, rtx result)
1573 {
1574 int regno, size, align, nelts;
1575 fixed_size_mode mode;
1576 rtx reg, mem;
1577 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1578
1579 size = nelts = 0;
1580 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1581 if ((mode = apply_result_mode[regno]) != VOIDmode)
1582 {
1583 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1584 if (size % align != 0)
1585 size = CEIL (size, align) * align;
1586 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1587 mem = adjust_address (result, mode, size);
1588 savevec[nelts++] = (savep
1589 ? gen_rtx_SET (mem, reg)
1590 : gen_rtx_SET (reg, mem));
1591 size += GET_MODE_SIZE (mode);
1592 }
1593 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1594 }
1595
1596 /* Save the state required to perform an untyped call with the same
1597 arguments as were passed to the current function. */
1598
1599 static rtx
1600 expand_builtin_apply_args_1 (void)
1601 {
1602 rtx registers, tem;
1603 int size, align, regno;
1604 fixed_size_mode mode;
1605 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1606
1607 /* Create a block where the arg-pointer, structure value address,
1608 and argument registers can be saved. */
1609 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1610
1611 /* Walk past the arg-pointer and structure value address. */
1612 size = GET_MODE_SIZE (Pmode);
1613 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1614 size += GET_MODE_SIZE (Pmode);
1615
1616 /* Save each register used in calling a function to the block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_args_mode[regno]) != VOIDmode)
1619 {
1620 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1621 if (size % align != 0)
1622 size = CEIL (size, align) * align;
1623
1624 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1625
1626 emit_move_insn (adjust_address (registers, mode, size), tem);
1627 size += GET_MODE_SIZE (mode);
1628 }
1629
1630 /* Save the arg pointer to the block. */
1631 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1632 /* We need the pointer as the caller actually passed them to us, not
1633 as we might have pretended they were passed. Make sure it's a valid
1634 operand, as emit_move_insn isn't expected to handle a PLUS. */
1635 if (STACK_GROWS_DOWNWARD)
1636 tem
1637 = force_operand (plus_constant (Pmode, tem,
1638 crtl->args.pretend_args_size),
1639 NULL_RTX);
1640 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1641
1642 size = GET_MODE_SIZE (Pmode);
1643
1644 /* Save the structure value address unless this is passed as an
1645 "invisible" first argument. */
1646 if (struct_incoming_value)
1647 emit_move_insn (adjust_address (registers, Pmode, size),
1648 copy_to_reg (struct_incoming_value));
1649
1650 /* Return the address of the block. */
1651 return copy_addr_to_reg (XEXP (registers, 0));
1652 }
1653
1654 /* __builtin_apply_args returns block of memory allocated on
1655 the stack into which is stored the arg pointer, structure
1656 value address, static chain, and all the registers that might
1657 possibly be used in performing a function call. The code is
1658 moved to the start of the function so the incoming values are
1659 saved. */
1660
1661 static rtx
1662 expand_builtin_apply_args (void)
1663 {
1664 /* Don't do __builtin_apply_args more than once in a function.
1665 Save the result of the first call and reuse it. */
1666 if (apply_args_value != 0)
1667 return apply_args_value;
1668 {
1669 /* When this function is called, it means that registers must be
1670 saved on entry to this function. So we migrate the
1671 call to the first insn of this function. */
1672 rtx temp;
1673
1674 start_sequence ();
1675 temp = expand_builtin_apply_args_1 ();
1676 rtx_insn *seq = get_insns ();
1677 end_sequence ();
1678
1679 apply_args_value = temp;
1680
1681 /* Put the insns after the NOTE that starts the function.
1682 If this is inside a start_sequence, make the outer-level insn
1683 chain current, so the code is placed at the start of the
1684 function. If internal_arg_pointer is a non-virtual pseudo,
1685 it needs to be placed after the function that initializes
1686 that pseudo. */
1687 push_topmost_sequence ();
1688 if (REG_P (crtl->args.internal_arg_pointer)
1689 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1690 emit_insn_before (seq, parm_birth_insn);
1691 else
1692 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1693 pop_topmost_sequence ();
1694 return temp;
1695 }
1696 }
1697
1698 /* Perform an untyped call and save the state required to perform an
1699 untyped return of whatever value was returned by the given function. */
1700
1701 static rtx
1702 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1703 {
1704 int size, align, regno;
1705 fixed_size_mode mode;
1706 rtx incoming_args, result, reg, dest, src;
1707 rtx_call_insn *call_insn;
1708 rtx old_stack_level = 0;
1709 rtx call_fusage = 0;
1710 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1711
1712 arguments = convert_memory_address (Pmode, arguments);
1713
1714 /* Create a block where the return registers can be saved. */
1715 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1716
1717 /* Fetch the arg pointer from the ARGUMENTS block. */
1718 incoming_args = gen_reg_rtx (Pmode);
1719 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1720 if (!STACK_GROWS_DOWNWARD)
1721 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1722 incoming_args, 0, OPTAB_LIB_WIDEN);
1723
1724 /* Push a new argument block and copy the arguments. Do not allow
1725 the (potential) memcpy call below to interfere with our stack
1726 manipulations. */
1727 do_pending_stack_adjust ();
1728 NO_DEFER_POP;
1729
1730 /* Save the stack with nonlocal if available. */
1731 if (targetm.have_save_stack_nonlocal ())
1732 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1733 else
1734 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1735
1736 /* Allocate a block of memory onto the stack and copy the memory
1737 arguments to the outgoing arguments address. We can pass TRUE
1738 as the 4th argument because we just saved the stack pointer
1739 and will restore it right after the call. */
1740 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1741
1742 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1743 may have already set current_function_calls_alloca to true.
1744 current_function_calls_alloca won't be set if argsize is zero,
1745 so we have to guarantee need_drap is true here. */
1746 if (SUPPORTS_STACK_ALIGNMENT)
1747 crtl->need_drap = true;
1748
1749 dest = virtual_outgoing_args_rtx;
1750 if (!STACK_GROWS_DOWNWARD)
1751 {
1752 if (CONST_INT_P (argsize))
1753 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1754 else
1755 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1756 }
1757 dest = gen_rtx_MEM (BLKmode, dest);
1758 set_mem_align (dest, PARM_BOUNDARY);
1759 src = gen_rtx_MEM (BLKmode, incoming_args);
1760 set_mem_align (src, PARM_BOUNDARY);
1761 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1762
1763 /* Refer to the argument block. */
1764 apply_args_size ();
1765 arguments = gen_rtx_MEM (BLKmode, arguments);
1766 set_mem_align (arguments, PARM_BOUNDARY);
1767
1768 /* Walk past the arg-pointer and structure value address. */
1769 size = GET_MODE_SIZE (Pmode);
1770 if (struct_value)
1771 size += GET_MODE_SIZE (Pmode);
1772
1773 /* Restore each of the registers previously saved. Make USE insns
1774 for each of these registers for use in making the call. */
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_args_mode[regno]) != VOIDmode)
1777 {
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, regno);
1782 emit_move_insn (reg, adjust_address (arguments, mode, size));
1783 use_reg (&call_fusage, reg);
1784 size += GET_MODE_SIZE (mode);
1785 }
1786
1787 /* Restore the structure value address unless this is passed as an
1788 "invisible" first argument. */
1789 size = GET_MODE_SIZE (Pmode);
1790 if (struct_value)
1791 {
1792 rtx value = gen_reg_rtx (Pmode);
1793 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1794 emit_move_insn (struct_value, value);
1795 if (REG_P (struct_value))
1796 use_reg (&call_fusage, struct_value);
1797 }
1798
1799 /* All arguments and registers used for the call are set up by now! */
1800 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1801
1802 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1803 and we don't want to load it into a register as an optimization,
1804 because prepare_call_address already did it if it should be done. */
1805 if (GET_CODE (function) != SYMBOL_REF)
1806 function = memory_address (FUNCTION_MODE, function);
1807
1808 /* Generate the actual call instruction and save the return value. */
1809 if (targetm.have_untyped_call ())
1810 {
1811 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1812 emit_call_insn (targetm.gen_untyped_call (mem, result,
1813 result_vector (1, result)));
1814 }
1815 else if (targetm.have_call_value ())
1816 {
1817 rtx valreg = 0;
1818
1819 /* Locate the unique return register. It is not possible to
1820 express a call that sets more than one return register using
1821 call_value; use untyped_call for that. In fact, untyped_call
1822 only needs to save the return registers in the given block. */
1823 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1824 if ((mode = apply_result_mode[regno]) != VOIDmode)
1825 {
1826 gcc_assert (!valreg); /* have_untyped_call required. */
1827
1828 valreg = gen_rtx_REG (mode, regno);
1829 }
1830
1831 emit_insn (targetm.gen_call_value (valreg,
1832 gen_rtx_MEM (FUNCTION_MODE, function),
1833 const0_rtx, NULL_RTX, const0_rtx));
1834
1835 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1836 }
1837 else
1838 gcc_unreachable ();
1839
1840 /* Find the CALL insn we just emitted, and attach the register usage
1841 information. */
1842 call_insn = last_call_insn ();
1843 add_function_usage_to (call_insn, call_fusage);
1844
1845 /* Restore the stack. */
1846 if (targetm.have_save_stack_nonlocal ())
1847 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1848 else
1849 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1850 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1851
1852 OK_DEFER_POP;
1853
1854 /* Return the address of the result block. */
1855 result = copy_addr_to_reg (XEXP (result, 0));
1856 return convert_memory_address (ptr_mode, result);
1857 }
1858
1859 /* Perform an untyped return. */
1860
1861 static void
1862 expand_builtin_return (rtx result)
1863 {
1864 int size, align, regno;
1865 fixed_size_mode mode;
1866 rtx reg;
1867 rtx_insn *call_fusage = 0;
1868
1869 result = convert_memory_address (Pmode, result);
1870
1871 apply_result_size ();
1872 result = gen_rtx_MEM (BLKmode, result);
1873
1874 if (targetm.have_untyped_return ())
1875 {
1876 rtx vector = result_vector (0, result);
1877 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1878 emit_barrier ();
1879 return;
1880 }
1881
1882 /* Restore the return value and note that each value is used. */
1883 size = 0;
1884 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1885 if ((mode = apply_result_mode[regno]) != VOIDmode)
1886 {
1887 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1888 if (size % align != 0)
1889 size = CEIL (size, align) * align;
1890 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1891 emit_move_insn (reg, adjust_address (result, mode, size));
1892
1893 push_to_sequence (call_fusage);
1894 emit_use (reg);
1895 call_fusage = get_insns ();
1896 end_sequence ();
1897 size += GET_MODE_SIZE (mode);
1898 }
1899
1900 /* Put the USE insns before the return. */
1901 emit_insn (call_fusage);
1902
1903 /* Return whatever values was restored by jumping directly to the end
1904 of the function. */
1905 expand_naked_return ();
1906 }
1907
1908 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1909
1910 static enum type_class
1911 type_to_class (tree type)
1912 {
1913 switch (TREE_CODE (type))
1914 {
1915 case VOID_TYPE: return void_type_class;
1916 case INTEGER_TYPE: return integer_type_class;
1917 case ENUMERAL_TYPE: return enumeral_type_class;
1918 case BOOLEAN_TYPE: return boolean_type_class;
1919 case POINTER_TYPE: return pointer_type_class;
1920 case REFERENCE_TYPE: return reference_type_class;
1921 case OFFSET_TYPE: return offset_type_class;
1922 case REAL_TYPE: return real_type_class;
1923 case COMPLEX_TYPE: return complex_type_class;
1924 case FUNCTION_TYPE: return function_type_class;
1925 case METHOD_TYPE: return method_type_class;
1926 case RECORD_TYPE: return record_type_class;
1927 case UNION_TYPE:
1928 case QUAL_UNION_TYPE: return union_type_class;
1929 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1930 ? string_type_class : array_type_class);
1931 case LANG_TYPE: return lang_type_class;
1932 default: return no_type_class;
1933 }
1934 }
1935
1936 /* Expand a call EXP to __builtin_classify_type. */
1937
1938 static rtx
1939 expand_builtin_classify_type (tree exp)
1940 {
1941 if (call_expr_nargs (exp))
1942 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1943 return GEN_INT (no_type_class);
1944 }
1945
1946 /* This helper macro, meant to be used in mathfn_built_in below, determines
1947 which among a set of builtin math functions is appropriate for a given type
1948 mode. The `F' (float) and `L' (long double) are automatically generated
1949 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1950 types, there are additional types that are considered with 'F32', 'F64',
1951 'F128', etc. suffixes. */
1952 #define CASE_MATHFN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; break;
1956 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1957 types. */
1958 #define CASE_MATHFN_FLOATN(MATHFN) \
1959 CASE_CFN_##MATHFN: \
1960 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1961 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1962 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1963 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1964 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1965 break;
1966 /* Similar to above, but appends _R after any F/L suffix. */
1967 #define CASE_MATHFN_REENT(MATHFN) \
1968 case CFN_BUILT_IN_##MATHFN##_R: \
1969 case CFN_BUILT_IN_##MATHFN##F_R: \
1970 case CFN_BUILT_IN_##MATHFN##L_R: \
1971 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1972 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1973
1974 /* Return a function equivalent to FN but operating on floating-point
1975 values of type TYPE, or END_BUILTINS if no such function exists.
1976 This is purely an operation on function codes; it does not guarantee
1977 that the target actually has an implementation of the function. */
1978
1979 static built_in_function
1980 mathfn_built_in_2 (tree type, combined_fn fn)
1981 {
1982 tree mtype;
1983 built_in_function fcode, fcodef, fcodel;
1984 built_in_function fcodef16 = END_BUILTINS;
1985 built_in_function fcodef32 = END_BUILTINS;
1986 built_in_function fcodef64 = END_BUILTINS;
1987 built_in_function fcodef128 = END_BUILTINS;
1988 built_in_function fcodef32x = END_BUILTINS;
1989 built_in_function fcodef64x = END_BUILTINS;
1990 built_in_function fcodef128x = END_BUILTINS;
1991
1992 switch (fn)
1993 {
1994 CASE_MATHFN (ACOS)
1995 CASE_MATHFN (ACOSH)
1996 CASE_MATHFN (ASIN)
1997 CASE_MATHFN (ASINH)
1998 CASE_MATHFN (ATAN)
1999 CASE_MATHFN (ATAN2)
2000 CASE_MATHFN (ATANH)
2001 CASE_MATHFN (CBRT)
2002 CASE_MATHFN_FLOATN (CEIL)
2003 CASE_MATHFN (CEXPI)
2004 CASE_MATHFN_FLOATN (COPYSIGN)
2005 CASE_MATHFN (COS)
2006 CASE_MATHFN (COSH)
2007 CASE_MATHFN (DREM)
2008 CASE_MATHFN (ERF)
2009 CASE_MATHFN (ERFC)
2010 CASE_MATHFN (EXP)
2011 CASE_MATHFN (EXP10)
2012 CASE_MATHFN (EXP2)
2013 CASE_MATHFN (EXPM1)
2014 CASE_MATHFN (FABS)
2015 CASE_MATHFN (FDIM)
2016 CASE_MATHFN_FLOATN (FLOOR)
2017 CASE_MATHFN_FLOATN (FMA)
2018 CASE_MATHFN_FLOATN (FMAX)
2019 CASE_MATHFN_FLOATN (FMIN)
2020 CASE_MATHFN (FMOD)
2021 CASE_MATHFN (FREXP)
2022 CASE_MATHFN (GAMMA)
2023 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2024 CASE_MATHFN (HUGE_VAL)
2025 CASE_MATHFN (HYPOT)
2026 CASE_MATHFN (ILOGB)
2027 CASE_MATHFN (ICEIL)
2028 CASE_MATHFN (IFLOOR)
2029 CASE_MATHFN (INF)
2030 CASE_MATHFN (IRINT)
2031 CASE_MATHFN (IROUND)
2032 CASE_MATHFN (ISINF)
2033 CASE_MATHFN (J0)
2034 CASE_MATHFN (J1)
2035 CASE_MATHFN (JN)
2036 CASE_MATHFN (LCEIL)
2037 CASE_MATHFN (LDEXP)
2038 CASE_MATHFN (LFLOOR)
2039 CASE_MATHFN (LGAMMA)
2040 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2041 CASE_MATHFN (LLCEIL)
2042 CASE_MATHFN (LLFLOOR)
2043 CASE_MATHFN (LLRINT)
2044 CASE_MATHFN (LLROUND)
2045 CASE_MATHFN (LOG)
2046 CASE_MATHFN (LOG10)
2047 CASE_MATHFN (LOG1P)
2048 CASE_MATHFN (LOG2)
2049 CASE_MATHFN (LOGB)
2050 CASE_MATHFN (LRINT)
2051 CASE_MATHFN (LROUND)
2052 CASE_MATHFN (MODF)
2053 CASE_MATHFN (NAN)
2054 CASE_MATHFN (NANS)
2055 CASE_MATHFN_FLOATN (NEARBYINT)
2056 CASE_MATHFN (NEXTAFTER)
2057 CASE_MATHFN (NEXTTOWARD)
2058 CASE_MATHFN (POW)
2059 CASE_MATHFN (POWI)
2060 CASE_MATHFN (POW10)
2061 CASE_MATHFN (REMAINDER)
2062 CASE_MATHFN (REMQUO)
2063 CASE_MATHFN_FLOATN (RINT)
2064 CASE_MATHFN_FLOATN (ROUND)
2065 CASE_MATHFN_FLOATN (ROUNDEVEN)
2066 CASE_MATHFN (SCALB)
2067 CASE_MATHFN (SCALBLN)
2068 CASE_MATHFN (SCALBN)
2069 CASE_MATHFN (SIGNBIT)
2070 CASE_MATHFN (SIGNIFICAND)
2071 CASE_MATHFN (SIN)
2072 CASE_MATHFN (SINCOS)
2073 CASE_MATHFN (SINH)
2074 CASE_MATHFN_FLOATN (SQRT)
2075 CASE_MATHFN (TAN)
2076 CASE_MATHFN (TANH)
2077 CASE_MATHFN (TGAMMA)
2078 CASE_MATHFN_FLOATN (TRUNC)
2079 CASE_MATHFN (Y0)
2080 CASE_MATHFN (Y1)
2081 CASE_MATHFN (YN)
2082
2083 default:
2084 return END_BUILTINS;
2085 }
2086
2087 mtype = TYPE_MAIN_VARIANT (type);
2088 if (mtype == double_type_node)
2089 return fcode;
2090 else if (mtype == float_type_node)
2091 return fcodef;
2092 else if (mtype == long_double_type_node)
2093 return fcodel;
2094 else if (mtype == float16_type_node)
2095 return fcodef16;
2096 else if (mtype == float32_type_node)
2097 return fcodef32;
2098 else if (mtype == float64_type_node)
2099 return fcodef64;
2100 else if (mtype == float128_type_node)
2101 return fcodef128;
2102 else if (mtype == float32x_type_node)
2103 return fcodef32x;
2104 else if (mtype == float64x_type_node)
2105 return fcodef64x;
2106 else if (mtype == float128x_type_node)
2107 return fcodef128x;
2108 else
2109 return END_BUILTINS;
2110 }
2111
2112 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2113 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2114 otherwise use the explicit declaration. If we can't do the conversion,
2115 return null. */
2116
2117 static tree
2118 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2119 {
2120 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2121 if (fcode2 == END_BUILTINS)
2122 return NULL_TREE;
2123
2124 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2125 return NULL_TREE;
2126
2127 return builtin_decl_explicit (fcode2);
2128 }
2129
2130 /* Like mathfn_built_in_1, but always use the implicit array. */
2131
2132 tree
2133 mathfn_built_in (tree type, combined_fn fn)
2134 {
2135 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2136 }
2137
2138 /* Like mathfn_built_in_1, but take a built_in_function and
2139 always use the implicit array. */
2140
2141 tree
2142 mathfn_built_in (tree type, enum built_in_function fn)
2143 {
2144 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2145 }
2146
2147 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2148 return its code, otherwise return IFN_LAST. Note that this function
2149 only tests whether the function is defined in internals.def, not whether
2150 it is actually available on the target. */
2151
2152 internal_fn
2153 associated_internal_fn (tree fndecl)
2154 {
2155 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2156 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2157 switch (DECL_FUNCTION_CODE (fndecl))
2158 {
2159 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2162 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2164 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2165 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2166 #include "internal-fn.def"
2167
2168 CASE_FLT_FN (BUILT_IN_POW10):
2169 return IFN_EXP10;
2170
2171 CASE_FLT_FN (BUILT_IN_DREM):
2172 return IFN_REMAINDER;
2173
2174 CASE_FLT_FN (BUILT_IN_SCALBN):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2177 return IFN_LDEXP;
2178 return IFN_LAST;
2179
2180 default:
2181 return IFN_LAST;
2182 }
2183 }
2184
2185 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2186 on the current target by a call to an internal function, return the
2187 code of that internal function, otherwise return IFN_LAST. The caller
2188 is responsible for ensuring that any side-effects of the built-in
2189 call are dealt with correctly. E.g. if CALL sets errno, the caller
2190 must decide that the errno result isn't needed or make it available
2191 in some other way. */
2192
2193 internal_fn
2194 replacement_internal_fn (gcall *call)
2195 {
2196 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2197 {
2198 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2199 if (ifn != IFN_LAST)
2200 {
2201 tree_pair types = direct_internal_fn_types (ifn, call);
2202 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2203 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2204 return ifn;
2205 }
2206 }
2207 return IFN_LAST;
2208 }
2209
2210 /* Expand a call to the builtin trinary math functions (fma).
2211 Return NULL_RTX if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's
2215 operands. */
2216
2217 static rtx
2218 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2219 {
2220 optab builtin_optab;
2221 rtx op0, op1, op2, result;
2222 rtx_insn *insns;
2223 tree fndecl = get_callee_fndecl (exp);
2224 tree arg0, arg1, arg2;
2225 machine_mode mode;
2226
2227 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2228 return NULL_RTX;
2229
2230 arg0 = CALL_EXPR_ARG (exp, 0);
2231 arg1 = CALL_EXPR_ARG (exp, 1);
2232 arg2 = CALL_EXPR_ARG (exp, 2);
2233
2234 switch (DECL_FUNCTION_CODE (fndecl))
2235 {
2236 CASE_FLT_FN (BUILT_IN_FMA):
2237 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2238 builtin_optab = fma_optab; break;
2239 default:
2240 gcc_unreachable ();
2241 }
2242
2243 /* Make a suitable register to place result in. */
2244 mode = TYPE_MODE (TREE_TYPE (exp));
2245
2246 /* Before working hard, check whether the instruction is available. */
2247 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2248 return NULL_RTX;
2249
2250 result = gen_reg_rtx (mode);
2251
2252 /* Always stabilize the argument list. */
2253 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2254 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2255 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2256
2257 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2258 op1 = expand_normal (arg1);
2259 op2 = expand_normal (arg2);
2260
2261 start_sequence ();
2262
2263 /* Compute into RESULT.
2264 Set RESULT to wherever the result comes back. */
2265 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2266 result, 0);
2267
2268 /* If we were unable to expand via the builtin, stop the sequence
2269 (without outputting the insns) and call to the library function
2270 with the stabilized argument list. */
2271 if (result == 0)
2272 {
2273 end_sequence ();
2274 return expand_call (exp, target, target == const0_rtx);
2275 }
2276
2277 /* Output the entire sequence. */
2278 insns = get_insns ();
2279 end_sequence ();
2280 emit_insn (insns);
2281
2282 return result;
2283 }
2284
2285 /* Expand a call to the builtin sin and cos math functions.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2288 function; if convenient, the result should be placed in TARGET.
2289 SUBTARGET may be used as the target for computing one of EXP's
2290 operands. */
2291
2292 static rtx
2293 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2294 {
2295 optab builtin_optab;
2296 rtx op0;
2297 rtx_insn *insns;
2298 tree fndecl = get_callee_fndecl (exp);
2299 machine_mode mode;
2300 tree arg;
2301
2302 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2303 return NULL_RTX;
2304
2305 arg = CALL_EXPR_ARG (exp, 0);
2306
2307 switch (DECL_FUNCTION_CODE (fndecl))
2308 {
2309 CASE_FLT_FN (BUILT_IN_SIN):
2310 CASE_FLT_FN (BUILT_IN_COS):
2311 builtin_optab = sincos_optab; break;
2312 default:
2313 gcc_unreachable ();
2314 }
2315
2316 /* Make a suitable register to place result in. */
2317 mode = TYPE_MODE (TREE_TYPE (exp));
2318
2319 /* Check if sincos insn is available, otherwise fallback
2320 to sin or cos insn. */
2321 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2322 switch (DECL_FUNCTION_CODE (fndecl))
2323 {
2324 CASE_FLT_FN (BUILT_IN_SIN):
2325 builtin_optab = sin_optab; break;
2326 CASE_FLT_FN (BUILT_IN_COS):
2327 builtin_optab = cos_optab; break;
2328 default:
2329 gcc_unreachable ();
2330 }
2331
2332 /* Before working hard, check whether the instruction is available. */
2333 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2334 {
2335 rtx result = gen_reg_rtx (mode);
2336
2337 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2338 need to expand the argument again. This way, we will not perform
2339 side-effects more the once. */
2340 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2341
2342 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2343
2344 start_sequence ();
2345
2346 /* Compute into RESULT.
2347 Set RESULT to wherever the result comes back. */
2348 if (builtin_optab == sincos_optab)
2349 {
2350 int ok;
2351
2352 switch (DECL_FUNCTION_CODE (fndecl))
2353 {
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2356 break;
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2359 break;
2360 default:
2361 gcc_unreachable ();
2362 }
2363 gcc_assert (ok);
2364 }
2365 else
2366 result = expand_unop (mode, builtin_optab, op0, result, 0);
2367
2368 if (result != 0)
2369 {
2370 /* Output the entire sequence. */
2371 insns = get_insns ();
2372 end_sequence ();
2373 emit_insn (insns);
2374 return result;
2375 }
2376
2377 /* If we were unable to expand via the builtin, stop the sequence
2378 (without outputting the insns) and call to the library function
2379 with the stabilized argument list. */
2380 end_sequence ();
2381 }
2382
2383 return expand_call (exp, target, target == const0_rtx);
2384 }
2385
2386 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2387 return an RTL instruction code that implements the functionality.
2388 If that isn't possible or available return CODE_FOR_nothing. */
2389
2390 static enum insn_code
2391 interclass_mathfn_icode (tree arg, tree fndecl)
2392 {
2393 bool errno_set = false;
2394 optab builtin_optab = unknown_optab;
2395 machine_mode mode;
2396
2397 switch (DECL_FUNCTION_CODE (fndecl))
2398 {
2399 CASE_FLT_FN (BUILT_IN_ILOGB):
2400 errno_set = true; builtin_optab = ilogb_optab; break;
2401 CASE_FLT_FN (BUILT_IN_ISINF):
2402 builtin_optab = isinf_optab; break;
2403 case BUILT_IN_ISNORMAL:
2404 case BUILT_IN_ISFINITE:
2405 CASE_FLT_FN (BUILT_IN_FINITE):
2406 case BUILT_IN_FINITED32:
2407 case BUILT_IN_FINITED64:
2408 case BUILT_IN_FINITED128:
2409 case BUILT_IN_ISINFD32:
2410 case BUILT_IN_ISINFD64:
2411 case BUILT_IN_ISINFD128:
2412 /* These builtins have no optabs (yet). */
2413 break;
2414 default:
2415 gcc_unreachable ();
2416 }
2417
2418 /* There's no easy way to detect the case we need to set EDOM. */
2419 if (flag_errno_math && errno_set)
2420 return CODE_FOR_nothing;
2421
2422 /* Optab mode depends on the mode of the input argument. */
2423 mode = TYPE_MODE (TREE_TYPE (arg));
2424
2425 if (builtin_optab)
2426 return optab_handler (builtin_optab, mode);
2427 return CODE_FOR_nothing;
2428 }
2429
2430 /* Expand a call to one of the builtin math functions that operate on
2431 floating point argument and output an integer result (ilogb, isinf,
2432 isnan, etc).
2433 Return 0 if a normal call should be emitted rather than expanding the
2434 function in-line. EXP is the expression that is a call to the builtin
2435 function; if convenient, the result should be placed in TARGET. */
2436
2437 static rtx
2438 expand_builtin_interclass_mathfn (tree exp, rtx target)
2439 {
2440 enum insn_code icode = CODE_FOR_nothing;
2441 rtx op0;
2442 tree fndecl = get_callee_fndecl (exp);
2443 machine_mode mode;
2444 tree arg;
2445
2446 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2447 return NULL_RTX;
2448
2449 arg = CALL_EXPR_ARG (exp, 0);
2450 icode = interclass_mathfn_icode (arg, fndecl);
2451 mode = TYPE_MODE (TREE_TYPE (arg));
2452
2453 if (icode != CODE_FOR_nothing)
2454 {
2455 class expand_operand ops[1];
2456 rtx_insn *last = get_last_insn ();
2457 tree orig_arg = arg;
2458
2459 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2460 need to expand the argument again. This way, we will not perform
2461 side-effects more the once. */
2462 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2463
2464 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2465
2466 if (mode != GET_MODE (op0))
2467 op0 = convert_to_mode (mode, op0, 0);
2468
2469 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2470 if (maybe_legitimize_operands (icode, 0, 1, ops)
2471 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2472 return ops[0].value;
2473
2474 delete_insns_since (last);
2475 CALL_EXPR_ARG (exp, 0) = orig_arg;
2476 }
2477
2478 return NULL_RTX;
2479 }
2480
2481 /* Expand a call to the builtin sincos math function.
2482 Return NULL_RTX if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2484 function. */
2485
2486 static rtx
2487 expand_builtin_sincos (tree exp)
2488 {
2489 rtx op0, op1, op2, target1, target2;
2490 machine_mode mode;
2491 tree arg, sinp, cosp;
2492 int result;
2493 location_t loc = EXPR_LOCATION (exp);
2494 tree alias_type, alias_off;
2495
2496 if (!validate_arglist (exp, REAL_TYPE,
2497 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2498 return NULL_RTX;
2499
2500 arg = CALL_EXPR_ARG (exp, 0);
2501 sinp = CALL_EXPR_ARG (exp, 1);
2502 cosp = CALL_EXPR_ARG (exp, 2);
2503
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (arg));
2506
2507 /* Check if sincos insn is available, otherwise emit the call. */
2508 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2509 return NULL_RTX;
2510
2511 target1 = gen_reg_rtx (mode);
2512 target2 = gen_reg_rtx (mode);
2513
2514 op0 = expand_normal (arg);
2515 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2516 alias_off = build_int_cst (alias_type, 0);
2517 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2518 sinp, alias_off));
2519 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2520 cosp, alias_off));
2521
2522 /* Compute into target1 and target2.
2523 Set TARGET to wherever the result comes back. */
2524 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2525 gcc_assert (result);
2526
2527 /* Move target1 and target2 to the memory locations indicated
2528 by op1 and op2. */
2529 emit_move_insn (op1, target1);
2530 emit_move_insn (op2, target2);
2531
2532 return const0_rtx;
2533 }
2534
2535 /* Expand a call to the internal cexpi builtin to the sincos math function.
2536 EXP is the expression that is a call to the builtin function; if convenient,
2537 the result should be placed in TARGET. */
2538
2539 static rtx
2540 expand_builtin_cexpi (tree exp, rtx target)
2541 {
2542 tree fndecl = get_callee_fndecl (exp);
2543 tree arg, type;
2544 machine_mode mode;
2545 rtx op0, op1, op2;
2546 location_t loc = EXPR_LOCATION (exp);
2547
2548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2549 return NULL_RTX;
2550
2551 arg = CALL_EXPR_ARG (exp, 0);
2552 type = TREE_TYPE (arg);
2553 mode = TYPE_MODE (TREE_TYPE (arg));
2554
2555 /* Try expanding via a sincos optab, fall back to emitting a libcall
2556 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2557 is only generated from sincos, cexp or if we have either of them. */
2558 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2559 {
2560 op1 = gen_reg_rtx (mode);
2561 op2 = gen_reg_rtx (mode);
2562
2563 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2564
2565 /* Compute into op1 and op2. */
2566 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2567 }
2568 else if (targetm.libc_has_function (function_sincos))
2569 {
2570 tree call, fn = NULL_TREE;
2571 tree top1, top2;
2572 rtx op1a, op2a;
2573
2574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2575 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2576 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2577 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2578 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2579 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2580 else
2581 gcc_unreachable ();
2582
2583 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2584 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op1a = copy_addr_to_reg (XEXP (op1, 0));
2586 op2a = copy_addr_to_reg (XEXP (op2, 0));
2587 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2588 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2589
2590 /* Make sure not to fold the sincos call again. */
2591 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2592 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2593 call, 3, arg, top1, top2));
2594 }
2595 else
2596 {
2597 tree call, fn = NULL_TREE, narg;
2598 tree ctype = build_complex_type (type);
2599
2600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2601 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2603 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2604 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2605 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2606 else
2607 gcc_unreachable ();
2608
2609 /* If we don't have a decl for cexp create one. This is the
2610 friendliest fallback if the user calls __builtin_cexpi
2611 without full target C99 function support. */
2612 if (fn == NULL_TREE)
2613 {
2614 tree fntype;
2615 const char *name = NULL;
2616
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 name = "cexpf";
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 name = "cexp";
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 name = "cexpl";
2623
2624 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2625 fn = build_fn_decl (name, fntype);
2626 }
2627
2628 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2629 build_real (type, dconst0), arg);
2630
2631 /* Make sure not to fold the cexp call again. */
2632 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2633 return expand_expr (build_call_nary (ctype, call, 1, narg),
2634 target, VOIDmode, EXPAND_NORMAL);
2635 }
2636
2637 /* Now build the proper return type. */
2638 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2639 make_tree (TREE_TYPE (arg), op2),
2640 make_tree (TREE_TYPE (arg), op1)),
2641 target, VOIDmode, EXPAND_NORMAL);
2642 }
2643
2644 /* Conveniently construct a function call expression. FNDECL names the
2645 function to be called, N is the number of arguments, and the "..."
2646 parameters are the argument expressions. Unlike build_call_exr
2647 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2648
2649 static tree
2650 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2651 {
2652 va_list ap;
2653 tree fntype = TREE_TYPE (fndecl);
2654 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2655
2656 va_start (ap, n);
2657 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2658 va_end (ap);
2659 SET_EXPR_LOCATION (fn, loc);
2660 return fn;
2661 }
2662
2663 /* Expand a call to one of the builtin rounding functions gcc defines
2664 as an extension (lfloor and lceil). As these are gcc extensions we
2665 do not need to worry about setting errno to EDOM.
2666 If expanding via optab fails, lower expression to (int)(floor(x)).
2667 EXP is the expression that is a call to the builtin function;
2668 if convenient, the result should be placed in TARGET. */
2669
2670 static rtx
2671 expand_builtin_int_roundingfn (tree exp, rtx target)
2672 {
2673 convert_optab builtin_optab;
2674 rtx op0, tmp;
2675 rtx_insn *insns;
2676 tree fndecl = get_callee_fndecl (exp);
2677 enum built_in_function fallback_fn;
2678 tree fallback_fndecl;
2679 machine_mode mode;
2680 tree arg;
2681
2682 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2683 return NULL_RTX;
2684
2685 arg = CALL_EXPR_ARG (exp, 0);
2686
2687 switch (DECL_FUNCTION_CODE (fndecl))
2688 {
2689 CASE_FLT_FN (BUILT_IN_ICEIL):
2690 CASE_FLT_FN (BUILT_IN_LCEIL):
2691 CASE_FLT_FN (BUILT_IN_LLCEIL):
2692 builtin_optab = lceil_optab;
2693 fallback_fn = BUILT_IN_CEIL;
2694 break;
2695
2696 CASE_FLT_FN (BUILT_IN_IFLOOR):
2697 CASE_FLT_FN (BUILT_IN_LFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2699 builtin_optab = lfloor_optab;
2700 fallback_fn = BUILT_IN_FLOOR;
2701 break;
2702
2703 default:
2704 gcc_unreachable ();
2705 }
2706
2707 /* Make a suitable register to place result in. */
2708 mode = TYPE_MODE (TREE_TYPE (exp));
2709
2710 target = gen_reg_rtx (mode);
2711
2712 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2713 need to expand the argument again. This way, we will not perform
2714 side-effects more the once. */
2715 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2716
2717 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2718
2719 start_sequence ();
2720
2721 /* Compute into TARGET. */
2722 if (expand_sfix_optab (target, op0, builtin_optab))
2723 {
2724 /* Output the entire sequence. */
2725 insns = get_insns ();
2726 end_sequence ();
2727 emit_insn (insns);
2728 return target;
2729 }
2730
2731 /* If we were unable to expand via the builtin, stop the sequence
2732 (without outputting the insns). */
2733 end_sequence ();
2734
2735 /* Fall back to floating point rounding optab. */
2736 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2737
2738 /* For non-C99 targets we may end up without a fallback fndecl here
2739 if the user called __builtin_lfloor directly. In this case emit
2740 a call to the floor/ceil variants nevertheless. This should result
2741 in the best user experience for not full C99 targets. */
2742 if (fallback_fndecl == NULL_TREE)
2743 {
2744 tree fntype;
2745 const char *name = NULL;
2746
2747 switch (DECL_FUNCTION_CODE (fndecl))
2748 {
2749 case BUILT_IN_ICEIL:
2750 case BUILT_IN_LCEIL:
2751 case BUILT_IN_LLCEIL:
2752 name = "ceil";
2753 break;
2754 case BUILT_IN_ICEILF:
2755 case BUILT_IN_LCEILF:
2756 case BUILT_IN_LLCEILF:
2757 name = "ceilf";
2758 break;
2759 case BUILT_IN_ICEILL:
2760 case BUILT_IN_LCEILL:
2761 case BUILT_IN_LLCEILL:
2762 name = "ceill";
2763 break;
2764 case BUILT_IN_IFLOOR:
2765 case BUILT_IN_LFLOOR:
2766 case BUILT_IN_LLFLOOR:
2767 name = "floor";
2768 break;
2769 case BUILT_IN_IFLOORF:
2770 case BUILT_IN_LFLOORF:
2771 case BUILT_IN_LLFLOORF:
2772 name = "floorf";
2773 break;
2774 case BUILT_IN_IFLOORL:
2775 case BUILT_IN_LFLOORL:
2776 case BUILT_IN_LLFLOORL:
2777 name = "floorl";
2778 break;
2779 default:
2780 gcc_unreachable ();
2781 }
2782
2783 fntype = build_function_type_list (TREE_TYPE (arg),
2784 TREE_TYPE (arg), NULL_TREE);
2785 fallback_fndecl = build_fn_decl (name, fntype);
2786 }
2787
2788 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2789
2790 tmp = expand_normal (exp);
2791 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2792
2793 /* Truncate the result of floating point optab to integer
2794 via expand_fix (). */
2795 target = gen_reg_rtx (mode);
2796 expand_fix (target, tmp, 0);
2797
2798 return target;
2799 }
2800
2801 /* Expand a call to one of the builtin math functions doing integer
2802 conversion (lrint).
2803 Return 0 if a normal call should be emitted rather than expanding the
2804 function in-line. EXP is the expression that is a call to the builtin
2805 function; if convenient, the result should be placed in TARGET. */
2806
2807 static rtx
2808 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2809 {
2810 convert_optab builtin_optab;
2811 rtx op0;
2812 rtx_insn *insns;
2813 tree fndecl = get_callee_fndecl (exp);
2814 tree arg;
2815 machine_mode mode;
2816 enum built_in_function fallback_fn = BUILT_IN_NONE;
2817
2818 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2819 return NULL_RTX;
2820
2821 arg = CALL_EXPR_ARG (exp, 0);
2822
2823 switch (DECL_FUNCTION_CODE (fndecl))
2824 {
2825 CASE_FLT_FN (BUILT_IN_IRINT):
2826 fallback_fn = BUILT_IN_LRINT;
2827 gcc_fallthrough ();
2828 CASE_FLT_FN (BUILT_IN_LRINT):
2829 CASE_FLT_FN (BUILT_IN_LLRINT):
2830 builtin_optab = lrint_optab;
2831 break;
2832
2833 CASE_FLT_FN (BUILT_IN_IROUND):
2834 fallback_fn = BUILT_IN_LROUND;
2835 gcc_fallthrough ();
2836 CASE_FLT_FN (BUILT_IN_LROUND):
2837 CASE_FLT_FN (BUILT_IN_LLROUND):
2838 builtin_optab = lround_optab;
2839 break;
2840
2841 default:
2842 gcc_unreachable ();
2843 }
2844
2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2847 return NULL_RTX;
2848
2849 /* Make a suitable register to place result in. */
2850 mode = TYPE_MODE (TREE_TYPE (exp));
2851
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (!flag_errno_math)
2854 {
2855 rtx result = gen_reg_rtx (mode);
2856
2857 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2858 need to expand the argument again. This way, we will not perform
2859 side-effects more the once. */
2860 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2861
2862 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2863
2864 start_sequence ();
2865
2866 if (expand_sfix_optab (result, op0, builtin_optab))
2867 {
2868 /* Output the entire sequence. */
2869 insns = get_insns ();
2870 end_sequence ();
2871 emit_insn (insns);
2872 return result;
2873 }
2874
2875 /* If we were unable to expand via the builtin, stop the sequence
2876 (without outputting the insns) and call to the library function
2877 with the stabilized argument list. */
2878 end_sequence ();
2879 }
2880
2881 if (fallback_fn != BUILT_IN_NONE)
2882 {
2883 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2884 targets, (int) round (x) should never be transformed into
2885 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2886 a call to lround in the hope that the target provides at least some
2887 C99 functions. This should result in the best user experience for
2888 not full C99 targets. */
2889 tree fallback_fndecl = mathfn_built_in_1
2890 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2891
2892 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2893 fallback_fndecl, 1, arg);
2894
2895 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2896 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2897 return convert_to_mode (mode, target, 0);
2898 }
2899
2900 return expand_call (exp, target, target == const0_rtx);
2901 }
2902
2903 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2904 a normal call should be emitted rather than expanding the function
2905 in-line. EXP is the expression that is a call to the builtin
2906 function; if convenient, the result should be placed in TARGET. */
2907
2908 static rtx
2909 expand_builtin_powi (tree exp, rtx target)
2910 {
2911 tree arg0, arg1;
2912 rtx op0, op1;
2913 machine_mode mode;
2914 machine_mode mode2;
2915
2916 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2917 return NULL_RTX;
2918
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2921 mode = TYPE_MODE (TREE_TYPE (exp));
2922
2923 /* Emit a libcall to libgcc. */
2924
2925 /* Mode of the 2nd argument must match that of an int. */
2926 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2927
2928 if (target == NULL_RTX)
2929 target = gen_reg_rtx (mode);
2930
2931 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2932 if (GET_MODE (op0) != mode)
2933 op0 = convert_to_mode (mode, op0, 0);
2934 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2935 if (GET_MODE (op1) != mode2)
2936 op1 = convert_to_mode (mode2, op1, 0);
2937
2938 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2939 target, LCT_CONST, mode,
2940 op0, mode, op1, mode2);
2941
2942 return target;
2943 }
2944
2945 /* Expand expression EXP which is a call to the strlen builtin. Return
2946 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2947 try to get the result in TARGET, if convenient. */
2948
2949 static rtx
2950 expand_builtin_strlen (tree exp, rtx target,
2951 machine_mode target_mode)
2952 {
2953 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2954 return NULL_RTX;
2955
2956 class expand_operand ops[4];
2957 rtx pat;
2958 tree len;
2959 tree src = CALL_EXPR_ARG (exp, 0);
2960 rtx src_reg;
2961 rtx_insn *before_strlen;
2962 machine_mode insn_mode;
2963 enum insn_code icode = CODE_FOR_nothing;
2964 unsigned int align;
2965
2966 /* If the length can be computed at compile-time, return it. */
2967 len = c_strlen (src, 0);
2968 if (len)
2969 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2970
2971 /* If the length can be computed at compile-time and is constant
2972 integer, but there are side-effects in src, evaluate
2973 src for side-effects, then return len.
2974 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2975 can be optimized into: i++; x = 3; */
2976 len = c_strlen (src, 1);
2977 if (len && TREE_CODE (len) == INTEGER_CST)
2978 {
2979 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2980 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2981 }
2982
2983 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2984
2985 /* If SRC is not a pointer type, don't do this operation inline. */
2986 if (align == 0)
2987 return NULL_RTX;
2988
2989 /* Bail out if we can't compute strlen in the right mode. */
2990 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2991 {
2992 icode = optab_handler (strlen_optab, insn_mode);
2993 if (icode != CODE_FOR_nothing)
2994 break;
2995 }
2996 if (insn_mode == VOIDmode)
2997 return NULL_RTX;
2998
2999 /* Make a place to hold the source address. We will not expand
3000 the actual source until we are sure that the expansion will
3001 not fail -- there are trees that cannot be expanded twice. */
3002 src_reg = gen_reg_rtx (Pmode);
3003
3004 /* Mark the beginning of the strlen sequence so we can emit the
3005 source operand later. */
3006 before_strlen = get_last_insn ();
3007
3008 create_output_operand (&ops[0], target, insn_mode);
3009 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3010 create_integer_operand (&ops[2], 0);
3011 create_integer_operand (&ops[3], align);
3012 if (!maybe_expand_insn (icode, 4, ops))
3013 return NULL_RTX;
3014
3015 /* Check to see if the argument was declared attribute nonstring
3016 and if so, issue a warning since at this point it's not known
3017 to be nul-terminated. */
3018 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3019
3020 /* Now that we are assured of success, expand the source. */
3021 start_sequence ();
3022 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3023 if (pat != src_reg)
3024 {
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat) != Pmode)
3027 pat = convert_to_mode (Pmode, pat,
3028 POINTERS_EXTEND_UNSIGNED);
3029 #endif
3030 emit_move_insn (src_reg, pat);
3031 }
3032 pat = get_insns ();
3033 end_sequence ();
3034
3035 if (before_strlen)
3036 emit_insn_after (pat, before_strlen);
3037 else
3038 emit_insn_before (pat, get_insns ());
3039
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops[0].value) == target_mode)
3042 target = ops[0].value;
3043 else if (target != 0)
3044 convert_move (target, ops[0].value, 0);
3045 else
3046 target = convert_to_mode (target_mode, ops[0].value, 0);
3047
3048 return target;
3049 }
3050
3051 /* Expand call EXP to the strnlen built-in, returning the result
3052 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3053
3054 static rtx
3055 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3056 {
3057 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3058 return NULL_RTX;
3059
3060 tree src = CALL_EXPR_ARG (exp, 0);
3061 tree bound = CALL_EXPR_ARG (exp, 1);
3062
3063 if (!bound)
3064 return NULL_RTX;
3065
3066 location_t loc = UNKNOWN_LOCATION;
3067 if (EXPR_HAS_LOCATION (exp))
3068 loc = EXPR_LOCATION (exp);
3069
3070 tree maxobjsize = max_object_size ();
3071 tree func = get_callee_fndecl (exp);
3072
3073 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3074 so these conversions aren't necessary. */
3075 c_strlen_data lendata = { };
3076 tree len = c_strlen (src, 0, &lendata, 1);
3077 if (len)
3078 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3079
3080 if (TREE_CODE (bound) == INTEGER_CST)
3081 {
3082 if (!TREE_NO_WARNING (exp)
3083 && tree_int_cst_lt (maxobjsize, bound)
3084 && warning_at (loc, OPT_Wstringop_overflow_,
3085 "%K%qD specified bound %E "
3086 "exceeds maximum object size %E",
3087 exp, func, bound, maxobjsize))
3088 TREE_NO_WARNING (exp) = true;
3089
3090 bool exact = true;
3091 if (!len || TREE_CODE (len) != INTEGER_CST)
3092 {
3093 /* Clear EXACT if LEN may be less than SRC suggests,
3094 such as in
3095 strnlen (&a[i], sizeof a)
3096 where the value of i is unknown. Unless i's value is
3097 zero, the call is unsafe because the bound is greater. */
3098 lendata.decl = unterminated_array (src, &len, &exact);
3099 if (!lendata.decl)
3100 return NULL_RTX;
3101 }
3102
3103 if (lendata.decl
3104 && !TREE_NO_WARNING (exp)
3105 && ((tree_int_cst_lt (len, bound))
3106 || !exact))
3107 {
3108 location_t warnloc
3109 = expansion_point_location_if_in_system_header (loc);
3110
3111 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3112 exact
3113 ? G_("%K%qD specified bound %E exceeds the size %E "
3114 "of unterminated array")
3115 : G_("%K%qD specified bound %E may exceed the size "
3116 "of at most %E of unterminated array"),
3117 exp, func, bound, len))
3118 {
3119 inform (DECL_SOURCE_LOCATION (lendata.decl),
3120 "referenced argument declared here");
3121 TREE_NO_WARNING (exp) = true;
3122 return NULL_RTX;
3123 }
3124 }
3125
3126 if (!len)
3127 return NULL_RTX;
3128
3129 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3130 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3131 }
3132
3133 if (TREE_CODE (bound) != SSA_NAME)
3134 return NULL_RTX;
3135
3136 wide_int min, max;
3137 enum value_range_kind rng = get_range_info (bound, &min, &max);
3138 if (rng != VR_RANGE)
3139 return NULL_RTX;
3140
3141 if (!TREE_NO_WARNING (exp)
3142 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3143 && warning_at (loc, OPT_Wstringop_overflow_,
3144 "%K%qD specified bound [%wu, %wu] "
3145 "exceeds maximum object size %E",
3146 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3147 TREE_NO_WARNING (exp) = true;
3148
3149 bool exact = true;
3150 if (!len || TREE_CODE (len) != INTEGER_CST)
3151 {
3152 lendata.decl = unterminated_array (src, &len, &exact);
3153 if (!lendata.decl)
3154 return NULL_RTX;
3155 }
3156
3157 if (lendata.decl
3158 && !TREE_NO_WARNING (exp)
3159 && (wi::ltu_p (wi::to_wide (len), min)
3160 || !exact))
3161 {
3162 location_t warnloc
3163 = expansion_point_location_if_in_system_header (loc);
3164
3165 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3166 exact
3167 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3168 "the size %E of unterminated array")
3169 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3170 "the size of at most %E of unterminated array"),
3171 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3172 {
3173 inform (DECL_SOURCE_LOCATION (lendata.decl),
3174 "referenced argument declared here");
3175 TREE_NO_WARNING (exp) = true;
3176 }
3177 }
3178
3179 if (lendata.decl)
3180 return NULL_RTX;
3181
3182 if (wi::gtu_p (min, wi::to_wide (len)))
3183 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3184
3185 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3186 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3187 }
3188
3189 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3190 bytes from constant string DATA + OFFSET and return it as target
3191 constant. */
3192
3193 static rtx
3194 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3195 scalar_int_mode mode)
3196 {
3197 const char *str = (const char *) data;
3198
3199 gcc_assert (offset >= 0
3200 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3201 <= strlen (str) + 1));
3202
3203 return c_readstr (str + offset, mode);
3204 }
3205
3206 /* LEN specify length of the block of memcpy/memset operation.
3207 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3208 In some cases we can make very likely guess on max size, then we
3209 set it into PROBABLE_MAX_SIZE. */
3210
3211 static void
3212 determine_block_size (tree len, rtx len_rtx,
3213 unsigned HOST_WIDE_INT *min_size,
3214 unsigned HOST_WIDE_INT *max_size,
3215 unsigned HOST_WIDE_INT *probable_max_size)
3216 {
3217 if (CONST_INT_P (len_rtx))
3218 {
3219 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3220 return;
3221 }
3222 else
3223 {
3224 wide_int min, max;
3225 enum value_range_kind range_type = VR_UNDEFINED;
3226
3227 /* Determine bounds from the type. */
3228 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3229 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3230 else
3231 *min_size = 0;
3232 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3233 *probable_max_size = *max_size
3234 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3235 else
3236 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3237
3238 if (TREE_CODE (len) == SSA_NAME)
3239 range_type = get_range_info (len, &min, &max);
3240 if (range_type == VR_RANGE)
3241 {
3242 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3243 *min_size = min.to_uhwi ();
3244 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3245 *probable_max_size = *max_size = max.to_uhwi ();
3246 }
3247 else if (range_type == VR_ANTI_RANGE)
3248 {
3249 /* Anti range 0...N lets us to determine minimal size to N+1. */
3250 if (min == 0)
3251 {
3252 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3253 *min_size = max.to_uhwi () + 1;
3254 }
3255 /* Code like
3256
3257 int n;
3258 if (n < 100)
3259 memcpy (a, b, n)
3260
3261 Produce anti range allowing negative values of N. We still
3262 can use the information and make a guess that N is not negative.
3263 */
3264 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3265 *probable_max_size = min.to_uhwi () - 1;
3266 }
3267 }
3268 gcc_checking_assert (*max_size <=
3269 (unsigned HOST_WIDE_INT)
3270 GET_MODE_MASK (GET_MODE (len_rtx)));
3271 }
3272
3273 /* Try to verify that the sizes and lengths of the arguments to a string
3274 manipulation function given by EXP are within valid bounds and that
3275 the operation does not lead to buffer overflow or read past the end.
3276 Arguments other than EXP may be null. When non-null, the arguments
3277 have the following meaning:
3278 DST is the destination of a copy call or NULL otherwise.
3279 SRC is the source of a copy call or NULL otherwise.
3280 DSTWRITE is the number of bytes written into the destination obtained
3281 from the user-supplied size argument to the function (such as in
3282 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3283 MAXREAD is the user-supplied bound on the length of the source sequence
3284 (such as in strncat(d, s, N). It specifies the upper limit on the number
3285 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3286 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3287 expression EXP is a string function call (as opposed to a memory call
3288 like memcpy). As an exception, SRCSTR can also be an integer denoting
3289 the precomputed size of the source string or object (for functions like
3290 memcpy).
3291 DSTSIZE is the size of the destination object specified by the last
3292 argument to the _chk builtins, typically resulting from the expansion
3293 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3294 DSTSIZE).
3295
3296 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3297 SIZE_MAX.
3298
3299 If the call is successfully verified as safe return true, otherwise
3300 return false. */
3301
3302 static bool
3303 check_access (tree exp, tree, tree, tree dstwrite,
3304 tree maxread, tree srcstr, tree dstsize)
3305 {
3306 int opt = OPT_Wstringop_overflow_;
3307
3308 /* The size of the largest object is half the address space, or
3309 PTRDIFF_MAX. (This is way too permissive.) */
3310 tree maxobjsize = max_object_size ();
3311
3312 /* Either the length of the source string for string functions or
3313 the size of the source object for raw memory functions. */
3314 tree slen = NULL_TREE;
3315
3316 tree range[2] = { NULL_TREE, NULL_TREE };
3317
3318 /* Set to true when the exact number of bytes written by a string
3319 function like strcpy is not known and the only thing that is
3320 known is that it must be at least one (for the terminating nul). */
3321 bool at_least_one = false;
3322 if (srcstr)
3323 {
3324 /* SRCSTR is normally a pointer to string but as a special case
3325 it can be an integer denoting the length of a string. */
3326 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3327 {
3328 /* Try to determine the range of lengths the source string
3329 refers to. If it can be determined and is less than
3330 the upper bound given by MAXREAD add one to it for
3331 the terminating nul. Otherwise, set it to one for
3332 the same reason, or to MAXREAD as appropriate. */
3333 c_strlen_data lendata = { };
3334 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3335 range[0] = lendata.minlen;
3336 range[1] = lendata.maxbound;
3337 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3338 {
3339 if (maxread && tree_int_cst_le (maxread, range[0]))
3340 range[0] = range[1] = maxread;
3341 else
3342 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3343 range[0], size_one_node);
3344
3345 if (maxread && tree_int_cst_le (maxread, range[1]))
3346 range[1] = maxread;
3347 else if (!integer_all_onesp (range[1]))
3348 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3349 range[1], size_one_node);
3350
3351 slen = range[0];
3352 }
3353 else
3354 {
3355 at_least_one = true;
3356 slen = size_one_node;
3357 }
3358 }
3359 else
3360 slen = srcstr;
3361 }
3362
3363 if (!dstwrite && !maxread)
3364 {
3365 /* When the only available piece of data is the object size
3366 there is nothing to do. */
3367 if (!slen)
3368 return true;
3369
3370 /* Otherwise, when the length of the source sequence is known
3371 (as with strlen), set DSTWRITE to it. */
3372 if (!range[0])
3373 dstwrite = slen;
3374 }
3375
3376 if (!dstsize)
3377 dstsize = maxobjsize;
3378
3379 if (dstwrite)
3380 get_size_range (dstwrite, range);
3381
3382 tree func = get_callee_fndecl (exp);
3383
3384 /* First check the number of bytes to be written against the maximum
3385 object size. */
3386 if (range[0]
3387 && TREE_CODE (range[0]) == INTEGER_CST
3388 && tree_int_cst_lt (maxobjsize, range[0]))
3389 {
3390 if (TREE_NO_WARNING (exp))
3391 return false;
3392
3393 location_t loc = tree_nonartificial_location (exp);
3394 loc = expansion_point_location_if_in_system_header (loc);
3395
3396 bool warned;
3397 if (range[0] == range[1])
3398 warned = warning_at (loc, opt,
3399 "%K%qD specified size %E "
3400 "exceeds maximum object size %E",
3401 exp, func, range[0], maxobjsize);
3402 else
3403 warned = warning_at (loc, opt,
3404 "%K%qD specified size between %E and %E "
3405 "exceeds maximum object size %E",
3406 exp, func,
3407 range[0], range[1], maxobjsize);
3408 if (warned)
3409 TREE_NO_WARNING (exp) = true;
3410
3411 return false;
3412 }
3413
3414 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3415 constant, and in range of unsigned HOST_WIDE_INT. */
3416 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3417
3418 /* Next check the number of bytes to be written against the destination
3419 object size. */
3420 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3421 {
3422 if (range[0]
3423 && TREE_CODE (range[0]) == INTEGER_CST
3424 && ((tree_fits_uhwi_p (dstsize)
3425 && tree_int_cst_lt (dstsize, range[0]))
3426 || (dstwrite
3427 && tree_fits_uhwi_p (dstwrite)
3428 && tree_int_cst_lt (dstwrite, range[0]))))
3429 {
3430 if (TREE_NO_WARNING (exp))
3431 return false;
3432
3433 location_t loc = tree_nonartificial_location (exp);
3434 loc = expansion_point_location_if_in_system_header (loc);
3435
3436 if (dstwrite == slen && at_least_one)
3437 {
3438 /* This is a call to strcpy with a destination of 0 size
3439 and a source of unknown length. The call will write
3440 at least one byte past the end of the destination. */
3441 warning_at (loc, opt,
3442 "%K%qD writing %E or more bytes into a region "
3443 "of size %E overflows the destination",
3444 exp, func, range[0], dstsize);
3445 }
3446 else if (tree_int_cst_equal (range[0], range[1]))
3447 warning_n (loc, opt, tree_to_uhwi (range[0]),
3448 "%K%qD writing %E byte into a region "
3449 "of size %E overflows the destination",
3450 "%K%qD writing %E bytes into a region "
3451 "of size %E overflows the destination",
3452 exp, func, range[0], dstsize);
3453 else if (tree_int_cst_sign_bit (range[1]))
3454 {
3455 /* Avoid printing the upper bound if it's invalid. */
3456 warning_at (loc, opt,
3457 "%K%qD writing %E or more bytes into a region "
3458 "of size %E overflows the destination",
3459 exp, func, range[0], dstsize);
3460 }
3461 else
3462 warning_at (loc, opt,
3463 "%K%qD writing between %E and %E bytes into "
3464 "a region of size %E overflows the destination",
3465 exp, func, range[0], range[1],
3466 dstsize);
3467
3468 /* Return error when an overflow has been detected. */
3469 return false;
3470 }
3471 }
3472
3473 /* Check the maximum length of the source sequence against the size
3474 of the destination object if known, or against the maximum size
3475 of an object. */
3476 if (maxread)
3477 {
3478 get_size_range (maxread, range);
3479 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3480 {
3481 location_t loc = tree_nonartificial_location (exp);
3482 loc = expansion_point_location_if_in_system_header (loc);
3483
3484 if (tree_int_cst_lt (maxobjsize, range[0]))
3485 {
3486 if (TREE_NO_WARNING (exp))
3487 return false;
3488
3489 /* Warn about crazy big sizes first since that's more
3490 likely to be meaningful than saying that the bound
3491 is greater than the object size if both are big. */
3492 if (range[0] == range[1])
3493 warning_at (loc, opt,
3494 "%K%qD specified bound %E "
3495 "exceeds maximum object size %E",
3496 exp, func,
3497 range[0], maxobjsize);
3498 else
3499 warning_at (loc, opt,
3500 "%K%qD specified bound between %E and %E "
3501 "exceeds maximum object size %E",
3502 exp, func,
3503 range[0], range[1], maxobjsize);
3504
3505 return false;
3506 }
3507
3508 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3509 {
3510 if (TREE_NO_WARNING (exp))
3511 return false;
3512
3513 if (tree_int_cst_equal (range[0], range[1]))
3514 warning_at (loc, opt,
3515 "%K%qD specified bound %E "
3516 "exceeds destination size %E",
3517 exp, func,
3518 range[0], dstsize);
3519 else
3520 warning_at (loc, opt,
3521 "%K%qD specified bound between %E and %E "
3522 "exceeds destination size %E",
3523 exp, func,
3524 range[0], range[1], dstsize);
3525 return false;
3526 }
3527 }
3528 }
3529
3530 /* Check for reading past the end of SRC. */
3531 if (slen
3532 && slen == srcstr
3533 && dstwrite && range[0]
3534 && tree_int_cst_lt (slen, range[0]))
3535 {
3536 if (TREE_NO_WARNING (exp))
3537 return false;
3538
3539 location_t loc = tree_nonartificial_location (exp);
3540
3541 if (tree_int_cst_equal (range[0], range[1]))
3542 warning_n (loc, opt, tree_to_uhwi (range[0]),
3543 "%K%qD reading %E byte from a region of size %E",
3544 "%K%qD reading %E bytes from a region of size %E",
3545 exp, func, range[0], slen);
3546 else if (tree_int_cst_sign_bit (range[1]))
3547 {
3548 /* Avoid printing the upper bound if it's invalid. */
3549 warning_at (loc, opt,
3550 "%K%qD reading %E or more bytes from a region "
3551 "of size %E",
3552 exp, func, range[0], slen);
3553 }
3554 else
3555 warning_at (loc, opt,
3556 "%K%qD reading between %E and %E bytes from a region "
3557 "of size %E",
3558 exp, func, range[0], range[1], slen);
3559 return false;
3560 }
3561
3562 return true;
3563 }
3564
3565 /* Helper to compute the size of the object referenced by the DEST
3566 expression which must have pointer type, using Object Size type
3567 OSTYPE (only the least significant 2 bits are used). Return
3568 an estimate of the size of the object if successful or NULL when
3569 the size cannot be determined. When the referenced object involves
3570 a non-constant offset in some range the returned value represents
3571 the largest size given the smallest non-negative offset in the
3572 range. If nonnull, set *PDECL to the decl of the referenced
3573 subobject if it can be determined, or to null otherwise.
3574 The function is intended for diagnostics and should not be used
3575 to influence code generation or optimization. */
3576
3577 tree
3578 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */)
3579 {
3580 tree dummy = NULL_TREE;
3581 if (!pdecl)
3582 pdecl = &dummy;
3583
3584 unsigned HOST_WIDE_INT size;
3585
3586 /* Only the two least significant bits are meaningful. */
3587 ostype &= 3;
3588
3589 if (compute_builtin_object_size (dest, ostype, &size))
3590 return build_int_cst (sizetype, size);
3591
3592 if (TREE_CODE (dest) == SSA_NAME)
3593 {
3594 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3595 if (!is_gimple_assign (stmt))
3596 return NULL_TREE;
3597
3598 dest = gimple_assign_rhs1 (stmt);
3599
3600 tree_code code = gimple_assign_rhs_code (stmt);
3601 if (code == POINTER_PLUS_EXPR)
3602 {
3603 /* compute_builtin_object_size fails for addresses with
3604 non-constant offsets. Try to determine the range of
3605 such an offset here and use it to adjust the constant
3606 size. */
3607 tree off = gimple_assign_rhs2 (stmt);
3608 if (TREE_CODE (off) == INTEGER_CST)
3609 {
3610 if (tree size = compute_objsize (dest, ostype, pdecl))
3611 {
3612 wide_int wioff = wi::to_wide (off);
3613 wide_int wisiz = wi::to_wide (size);
3614
3615 /* Ignore negative offsets for now. For others,
3616 use the lower bound as the most optimistic
3617 estimate of the (remaining) size. */
3618 if (wi::sign_mask (wioff))
3619 ;
3620 else if (wi::ltu_p (wioff, wisiz))
3621 return wide_int_to_tree (TREE_TYPE (size),
3622 wi::sub (wisiz, wioff));
3623 else
3624 return size_zero_node;
3625 }
3626 }
3627 else if (TREE_CODE (off) == SSA_NAME
3628 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3629 {
3630 wide_int min, max;
3631 enum value_range_kind rng = get_range_info (off, &min, &max);
3632
3633 if (rng == VR_RANGE)
3634 {
3635 if (tree size = compute_objsize (dest, ostype, pdecl))
3636 {
3637 wide_int wisiz = wi::to_wide (size);
3638
3639 /* Ignore negative offsets for now. For others,
3640 use the lower bound as the most optimistic
3641 estimate of the (remaining)size. */
3642 if (wi::sign_mask (min)
3643 || wi::sign_mask (max))
3644 ;
3645 else if (wi::ltu_p (min, wisiz))
3646 return wide_int_to_tree (TREE_TYPE (size),
3647 wi::sub (wisiz, min));
3648 else
3649 return size_zero_node;
3650 }
3651 }
3652 }
3653 }
3654 else if (code != ADDR_EXPR)
3655 return NULL_TREE;
3656 }
3657
3658 /* Unless computing the largest size (for memcpy and other raw memory
3659 functions), try to determine the size of the object from its type. */
3660 if (!ostype)
3661 return NULL_TREE;
3662
3663 if (TREE_CODE (dest) == ARRAY_REF
3664 || TREE_CODE (dest) == MEM_REF)
3665 {
3666 tree ref = TREE_OPERAND (dest, 0);
3667 tree off = TREE_OPERAND (dest, 1);
3668 if (tree size = compute_objsize (ref, ostype, pdecl))
3669 {
3670 /* If the declaration of the destination object is known
3671 to have zero size, return zero. */
3672 if (integer_zerop (size))
3673 return integer_zero_node;
3674
3675 if (TREE_CODE (off) != INTEGER_CST
3676 || TREE_CODE (size) != INTEGER_CST)
3677 return NULL_TREE;
3678
3679 if (TREE_CODE (dest) == ARRAY_REF)
3680 {
3681 tree eltype = TREE_TYPE (dest);
3682 if (tree tpsize = TYPE_SIZE_UNIT (eltype))
3683 off = fold_build2 (MULT_EXPR, size_type_node, off, tpsize);
3684 else
3685 return NULL_TREE;
3686 }
3687
3688 if (tree_int_cst_lt (off, size))
3689 return fold_build2 (MINUS_EXPR, size_type_node, size, off);
3690 return integer_zero_node;
3691 }
3692
3693 return NULL_TREE;
3694 }
3695
3696 if (TREE_CODE (dest) == COMPONENT_REF)
3697 {
3698 *pdecl = TREE_OPERAND (dest, 1);
3699 return component_ref_size (dest);
3700 }
3701
3702 if (TREE_CODE (dest) != ADDR_EXPR)
3703 return NULL_TREE;
3704
3705 tree ref = TREE_OPERAND (dest, 0);
3706 if (DECL_P (ref))
3707 {
3708 *pdecl = ref;
3709 return DECL_SIZE_UNIT (ref);
3710 }
3711
3712 tree type = TREE_TYPE (dest);
3713 if (TREE_CODE (type) == POINTER_TYPE)
3714 type = TREE_TYPE (type);
3715
3716 type = TYPE_MAIN_VARIANT (type);
3717
3718 if (TREE_CODE (type) == ARRAY_TYPE
3719 && !array_at_struct_end_p (ref))
3720 {
3721 if (tree size = TYPE_SIZE_UNIT (type))
3722 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3723 }
3724
3725 return NULL_TREE;
3726 }
3727
3728 /* Helper to determine and check the sizes of the source and the destination
3729 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3730 call expression, DEST is the destination argument, SRC is the source
3731 argument or null, and LEN is the number of bytes. Use Object Size type-0
3732 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3733 (no overflow or invalid sizes), false otherwise. */
3734
3735 static bool
3736 check_memop_access (tree exp, tree dest, tree src, tree size)
3737 {
3738 /* For functions like memset and memcpy that operate on raw memory
3739 try to determine the size of the largest source and destination
3740 object using type-0 Object Size regardless of the object size
3741 type specified by the option. */
3742 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3743 tree dstsize = compute_objsize (dest, 0);
3744
3745 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3746 srcsize, dstsize);
3747 }
3748
3749 /* Validate memchr arguments without performing any expansion.
3750 Return NULL_RTX. */
3751
3752 static rtx
3753 expand_builtin_memchr (tree exp, rtx)
3754 {
3755 if (!validate_arglist (exp,
3756 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3757 return NULL_RTX;
3758
3759 tree arg1 = CALL_EXPR_ARG (exp, 0);
3760 tree len = CALL_EXPR_ARG (exp, 2);
3761
3762 /* Diagnose calls where the specified length exceeds the size
3763 of the object. */
3764 if (warn_stringop_overflow)
3765 {
3766 tree size = compute_objsize (arg1, 0);
3767 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3768 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3769 }
3770
3771 return NULL_RTX;
3772 }
3773
3774 /* Expand a call EXP to the memcpy builtin.
3775 Return NULL_RTX if we failed, the caller should emit a normal call,
3776 otherwise try to get the result in TARGET, if convenient (and in
3777 mode MODE if that's convenient). */
3778
3779 static rtx
3780 expand_builtin_memcpy (tree exp, rtx target)
3781 {
3782 if (!validate_arglist (exp,
3783 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3784 return NULL_RTX;
3785
3786 tree dest = CALL_EXPR_ARG (exp, 0);
3787 tree src = CALL_EXPR_ARG (exp, 1);
3788 tree len = CALL_EXPR_ARG (exp, 2);
3789
3790 check_memop_access (exp, dest, src, len);
3791
3792 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3793 /*retmode=*/ RETURN_BEGIN);
3794 }
3795
3796 /* Check a call EXP to the memmove built-in for validity.
3797 Return NULL_RTX on both success and failure. */
3798
3799 static rtx
3800 expand_builtin_memmove (tree exp, rtx)
3801 {
3802 if (!validate_arglist (exp,
3803 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3804 return NULL_RTX;
3805
3806 tree dest = CALL_EXPR_ARG (exp, 0);
3807 tree src = CALL_EXPR_ARG (exp, 1);
3808 tree len = CALL_EXPR_ARG (exp, 2);
3809
3810 check_memop_access (exp, dest, src, len);
3811
3812 return NULL_RTX;
3813 }
3814
3815 /* Expand a call EXP to the mempcpy builtin.
3816 Return NULL_RTX if we failed; the caller should emit a normal call,
3817 otherwise try to get the result in TARGET, if convenient (and in
3818 mode MODE if that's convenient). */
3819
3820 static rtx
3821 expand_builtin_mempcpy (tree exp, rtx target)
3822 {
3823 if (!validate_arglist (exp,
3824 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3825 return NULL_RTX;
3826
3827 tree dest = CALL_EXPR_ARG (exp, 0);
3828 tree src = CALL_EXPR_ARG (exp, 1);
3829 tree len = CALL_EXPR_ARG (exp, 2);
3830
3831 /* Policy does not generally allow using compute_objsize (which
3832 is used internally by check_memop_size) to change code generation
3833 or drive optimization decisions.
3834
3835 In this instance it is safe because the code we generate has
3836 the same semantics regardless of the return value of
3837 check_memop_sizes. Exactly the same amount of data is copied
3838 and the return value is exactly the same in both cases.
3839
3840 Furthermore, check_memop_size always uses mode 0 for the call to
3841 compute_objsize, so the imprecise nature of compute_objsize is
3842 avoided. */
3843
3844 /* Avoid expanding mempcpy into memcpy when the call is determined
3845 to overflow the buffer. This also prevents the same overflow
3846 from being diagnosed again when expanding memcpy. */
3847 if (!check_memop_access (exp, dest, src, len))
3848 return NULL_RTX;
3849
3850 return expand_builtin_mempcpy_args (dest, src, len,
3851 target, exp, /*retmode=*/ RETURN_END);
3852 }
3853
3854 /* Helper function to do the actual work for expand of memory copy family
3855 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3856 of memory from SRC to DEST and assign to TARGET if convenient. Return
3857 value is based on RETMODE argument. */
3858
3859 static rtx
3860 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3861 rtx target, tree exp, memop_ret retmode)
3862 {
3863 const char *src_str;
3864 unsigned int src_align = get_pointer_alignment (src);
3865 unsigned int dest_align = get_pointer_alignment (dest);
3866 rtx dest_mem, src_mem, dest_addr, len_rtx;
3867 HOST_WIDE_INT expected_size = -1;
3868 unsigned int expected_align = 0;
3869 unsigned HOST_WIDE_INT min_size;
3870 unsigned HOST_WIDE_INT max_size;
3871 unsigned HOST_WIDE_INT probable_max_size;
3872
3873 bool is_move_done;
3874
3875 /* If DEST is not a pointer type, call the normal function. */
3876 if (dest_align == 0)
3877 return NULL_RTX;
3878
3879 /* If either SRC is not a pointer type, don't do this
3880 operation in-line. */
3881 if (src_align == 0)
3882 return NULL_RTX;
3883
3884 if (currently_expanding_gimple_stmt)
3885 stringop_block_profile (currently_expanding_gimple_stmt,
3886 &expected_align, &expected_size);
3887
3888 if (expected_align < dest_align)
3889 expected_align = dest_align;
3890 dest_mem = get_memory_rtx (dest, len);
3891 set_mem_align (dest_mem, dest_align);
3892 len_rtx = expand_normal (len);
3893 determine_block_size (len, len_rtx, &min_size, &max_size,
3894 &probable_max_size);
3895 src_str = c_getstr (src);
3896
3897 /* If SRC is a string constant and block move would be done
3898 by pieces, we can avoid loading the string from memory
3899 and only stored the computed constants. */
3900 if (src_str
3901 && CONST_INT_P (len_rtx)
3902 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3903 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3904 CONST_CAST (char *, src_str),
3905 dest_align, false))
3906 {
3907 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3908 builtin_memcpy_read_str,
3909 CONST_CAST (char *, src_str),
3910 dest_align, false, retmode);
3911 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3912 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3913 return dest_mem;
3914 }
3915
3916 src_mem = get_memory_rtx (src, len);
3917 set_mem_align (src_mem, src_align);
3918
3919 /* Copy word part most expediently. */
3920 enum block_op_methods method = BLOCK_OP_NORMAL;
3921 if (CALL_EXPR_TAILCALL (exp)
3922 && (retmode == RETURN_BEGIN || target == const0_rtx))
3923 method = BLOCK_OP_TAILCALL;
3924 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3925 && retmode == RETURN_END
3926 && target != const0_rtx);
3927 if (use_mempcpy_call)
3928 method = BLOCK_OP_NO_LIBCALL_RET;
3929 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3930 expected_align, expected_size,
3931 min_size, max_size, probable_max_size,
3932 use_mempcpy_call, &is_move_done);
3933
3934 /* Bail out when a mempcpy call would be expanded as libcall and when
3935 we have a target that provides a fast implementation
3936 of mempcpy routine. */
3937 if (!is_move_done)
3938 return NULL_RTX;
3939
3940 if (dest_addr == pc_rtx)
3941 return NULL_RTX;
3942
3943 if (dest_addr == 0)
3944 {
3945 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3946 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3947 }
3948
3949 if (retmode != RETURN_BEGIN && target != const0_rtx)
3950 {
3951 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3952 /* stpcpy pointer to last byte. */
3953 if (retmode == RETURN_END_MINUS_ONE)
3954 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3955 }
3956
3957 return dest_addr;
3958 }
3959
3960 static rtx
3961 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3962 rtx target, tree orig_exp, memop_ret retmode)
3963 {
3964 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3965 retmode);
3966 }
3967
3968 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3969 we failed, the caller should emit a normal call, otherwise try to
3970 get the result in TARGET, if convenient.
3971 Return value is based on RETMODE argument. */
3972
3973 static rtx
3974 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3975 {
3976 class expand_operand ops[3];
3977 rtx dest_mem;
3978 rtx src_mem;
3979
3980 if (!targetm.have_movstr ())
3981 return NULL_RTX;
3982
3983 dest_mem = get_memory_rtx (dest, NULL);
3984 src_mem = get_memory_rtx (src, NULL);
3985 if (retmode == RETURN_BEGIN)
3986 {
3987 target = force_reg (Pmode, XEXP (dest_mem, 0));
3988 dest_mem = replace_equiv_address (dest_mem, target);
3989 }
3990
3991 create_output_operand (&ops[0],
3992 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3993 create_fixed_operand (&ops[1], dest_mem);
3994 create_fixed_operand (&ops[2], src_mem);
3995 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3996 return NULL_RTX;
3997
3998 if (retmode != RETURN_BEGIN && target != const0_rtx)
3999 {
4000 target = ops[0].value;
4001 /* movstr is supposed to set end to the address of the NUL
4002 terminator. If the caller requested a mempcpy-like return value,
4003 adjust it. */
4004 if (retmode == RETURN_END)
4005 {
4006 rtx tem = plus_constant (GET_MODE (target),
4007 gen_lowpart (GET_MODE (target), target), 1);
4008 emit_move_insn (target, force_operand (tem, NULL_RTX));
4009 }
4010 }
4011 return target;
4012 }
4013
4014 /* Do some very basic size validation of a call to the strcpy builtin
4015 given by EXP. Return NULL_RTX to have the built-in expand to a call
4016 to the library function. */
4017
4018 static rtx
4019 expand_builtin_strcat (tree exp, rtx)
4020 {
4021 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4022 || !warn_stringop_overflow)
4023 return NULL_RTX;
4024
4025 tree dest = CALL_EXPR_ARG (exp, 0);
4026 tree src = CALL_EXPR_ARG (exp, 1);
4027
4028 /* There is no way here to determine the length of the string in
4029 the destination to which the SRC string is being appended so
4030 just diagnose cases when the souce string is longer than
4031 the destination object. */
4032
4033 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4034
4035 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4036 destsize);
4037
4038 return NULL_RTX;
4039 }
4040
4041 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call, otherwise
4043 try to get the result in TARGET, if convenient (and in mode MODE if that's
4044 convenient). */
4045
4046 static rtx
4047 expand_builtin_strcpy (tree exp, rtx target)
4048 {
4049 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4050 return NULL_RTX;
4051
4052 tree dest = CALL_EXPR_ARG (exp, 0);
4053 tree src = CALL_EXPR_ARG (exp, 1);
4054
4055 if (warn_stringop_overflow)
4056 {
4057 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4058 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4059 src, destsize);
4060 }
4061
4062 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4063 {
4064 /* Check to see if the argument was declared attribute nonstring
4065 and if so, issue a warning since at this point it's not known
4066 to be nul-terminated. */
4067 tree fndecl = get_callee_fndecl (exp);
4068 maybe_warn_nonstring_arg (fndecl, exp);
4069 return ret;
4070 }
4071
4072 return NULL_RTX;
4073 }
4074
4075 /* Helper function to do the actual work for expand_builtin_strcpy. The
4076 arguments to the builtin_strcpy call DEST and SRC are broken out
4077 so that this can also be called without constructing an actual CALL_EXPR.
4078 The other arguments and return value are the same as for
4079 expand_builtin_strcpy. */
4080
4081 static rtx
4082 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4083 {
4084 /* Detect strcpy calls with unterminated arrays.. */
4085 if (tree nonstr = unterminated_array (src))
4086 {
4087 /* NONSTR refers to the non-nul terminated constant array. */
4088 if (!TREE_NO_WARNING (exp))
4089 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4090 return NULL_RTX;
4091 }
4092
4093 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4094 }
4095
4096 /* Expand a call EXP to the stpcpy builtin.
4097 Return NULL_RTX if we failed the caller should emit a normal call,
4098 otherwise try to get the result in TARGET, if convenient (and in
4099 mode MODE if that's convenient). */
4100
4101 static rtx
4102 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4103 {
4104 tree dst, src;
4105 location_t loc = EXPR_LOCATION (exp);
4106
4107 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4108 return NULL_RTX;
4109
4110 dst = CALL_EXPR_ARG (exp, 0);
4111 src = CALL_EXPR_ARG (exp, 1);
4112
4113 if (warn_stringop_overflow)
4114 {
4115 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4116 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4117 src, destsize);
4118 }
4119
4120 /* If return value is ignored, transform stpcpy into strcpy. */
4121 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4122 {
4123 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4124 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4125 return expand_expr (result, target, mode, EXPAND_NORMAL);
4126 }
4127 else
4128 {
4129 tree len, lenp1;
4130 rtx ret;
4131
4132 /* Ensure we get an actual string whose length can be evaluated at
4133 compile-time, not an expression containing a string. This is
4134 because the latter will potentially produce pessimized code
4135 when used to produce the return value. */
4136 c_strlen_data lendata = { };
4137 if (!c_getstr (src, NULL)
4138 || !(len = c_strlen (src, 0, &lendata, 1)))
4139 return expand_movstr (dst, src, target,
4140 /*retmode=*/ RETURN_END_MINUS_ONE);
4141
4142 if (lendata.decl && !TREE_NO_WARNING (exp))
4143 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4144
4145 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4146 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4147 target, exp,
4148 /*retmode=*/ RETURN_END_MINUS_ONE);
4149
4150 if (ret)
4151 return ret;
4152
4153 if (TREE_CODE (len) == INTEGER_CST)
4154 {
4155 rtx len_rtx = expand_normal (len);
4156
4157 if (CONST_INT_P (len_rtx))
4158 {
4159 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4160
4161 if (ret)
4162 {
4163 if (! target)
4164 {
4165 if (mode != VOIDmode)
4166 target = gen_reg_rtx (mode);
4167 else
4168 target = gen_reg_rtx (GET_MODE (ret));
4169 }
4170 if (GET_MODE (target) != GET_MODE (ret))
4171 ret = gen_lowpart (GET_MODE (target), ret);
4172
4173 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4174 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4175 gcc_assert (ret);
4176
4177 return target;
4178 }
4179 }
4180 }
4181
4182 return expand_movstr (dst, src, target,
4183 /*retmode=*/ RETURN_END_MINUS_ONE);
4184 }
4185 }
4186
4187 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4188 arguments while being careful to avoid duplicate warnings (which could
4189 be issued if the expander were to expand the call, resulting in it
4190 being emitted in expand_call(). */
4191
4192 static rtx
4193 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4194 {
4195 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4196 {
4197 /* The call has been successfully expanded. Check for nonstring
4198 arguments and issue warnings as appropriate. */
4199 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4200 return ret;
4201 }
4202
4203 return NULL_RTX;
4204 }
4205
4206 /* Check a call EXP to the stpncpy built-in for validity.
4207 Return NULL_RTX on both success and failure. */
4208
4209 static rtx
4210 expand_builtin_stpncpy (tree exp, rtx)
4211 {
4212 if (!validate_arglist (exp,
4213 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4214 || !warn_stringop_overflow)
4215 return NULL_RTX;
4216
4217 /* The source and destination of the call. */
4218 tree dest = CALL_EXPR_ARG (exp, 0);
4219 tree src = CALL_EXPR_ARG (exp, 1);
4220
4221 /* The exact number of bytes to write (not the maximum). */
4222 tree len = CALL_EXPR_ARG (exp, 2);
4223
4224 /* The size of the destination object. */
4225 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4226
4227 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4228
4229 return NULL_RTX;
4230 }
4231
4232 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4233 bytes from constant string DATA + OFFSET and return it as target
4234 constant. */
4235
4236 rtx
4237 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4238 scalar_int_mode mode)
4239 {
4240 const char *str = (const char *) data;
4241
4242 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4243 return const0_rtx;
4244
4245 return c_readstr (str + offset, mode);
4246 }
4247
4248 /* Helper to check the sizes of sequences and the destination of calls
4249 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4250 success (no overflow or invalid sizes), false otherwise. */
4251
4252 static bool
4253 check_strncat_sizes (tree exp, tree objsize)
4254 {
4255 tree dest = CALL_EXPR_ARG (exp, 0);
4256 tree src = CALL_EXPR_ARG (exp, 1);
4257 tree maxread = CALL_EXPR_ARG (exp, 2);
4258
4259 /* Try to determine the range of lengths that the source expression
4260 refers to. */
4261 c_strlen_data lendata = { };
4262 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4263
4264 /* Try to verify that the destination is big enough for the shortest
4265 string. */
4266
4267 if (!objsize && warn_stringop_overflow)
4268 {
4269 /* If it hasn't been provided by __strncat_chk, try to determine
4270 the size of the destination object into which the source is
4271 being copied. */
4272 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4273 }
4274
4275 /* Add one for the terminating nul. */
4276 tree srclen = (lendata.minlen
4277 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4278 size_one_node)
4279 : NULL_TREE);
4280
4281 /* The strncat function copies at most MAXREAD bytes and always appends
4282 the terminating nul so the specified upper bound should never be equal
4283 to (or greater than) the size of the destination. */
4284 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4285 && tree_int_cst_equal (objsize, maxread))
4286 {
4287 location_t loc = tree_nonartificial_location (exp);
4288 loc = expansion_point_location_if_in_system_header (loc);
4289
4290 warning_at (loc, OPT_Wstringop_overflow_,
4291 "%K%qD specified bound %E equals destination size",
4292 exp, get_callee_fndecl (exp), maxread);
4293
4294 return false;
4295 }
4296
4297 if (!srclen
4298 || (maxread && tree_fits_uhwi_p (maxread)
4299 && tree_fits_uhwi_p (srclen)
4300 && tree_int_cst_lt (maxread, srclen)))
4301 srclen = maxread;
4302
4303 /* The number of bytes to write is LEN but check_access will also
4304 check SRCLEN if LEN's value isn't known. */
4305 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4306 objsize);
4307 }
4308
4309 /* Similar to expand_builtin_strcat, do some very basic size validation
4310 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4311 the built-in expand to a call to the library function. */
4312
4313 static rtx
4314 expand_builtin_strncat (tree exp, rtx)
4315 {
4316 if (!validate_arglist (exp,
4317 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4318 || !warn_stringop_overflow)
4319 return NULL_RTX;
4320
4321 tree dest = CALL_EXPR_ARG (exp, 0);
4322 tree src = CALL_EXPR_ARG (exp, 1);
4323 /* The upper bound on the number of bytes to write. */
4324 tree maxread = CALL_EXPR_ARG (exp, 2);
4325 /* The length of the source sequence. */
4326 tree slen = c_strlen (src, 1);
4327
4328 /* Try to determine the range of lengths that the source expression
4329 refers to. Since the lengths are only used for warning and not
4330 for code generation disable strict mode below. */
4331 tree maxlen = slen;
4332 if (!maxlen)
4333 {
4334 c_strlen_data lendata = { };
4335 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4336 maxlen = lendata.maxbound;
4337 }
4338
4339 /* Try to verify that the destination is big enough for the shortest
4340 string. First try to determine the size of the destination object
4341 into which the source is being copied. */
4342 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4343
4344 /* Add one for the terminating nul. */
4345 tree srclen = (maxlen
4346 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4347 size_one_node)
4348 : NULL_TREE);
4349
4350 /* The strncat function copies at most MAXREAD bytes and always appends
4351 the terminating nul so the specified upper bound should never be equal
4352 to (or greater than) the size of the destination. */
4353 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4354 && tree_int_cst_equal (destsize, maxread))
4355 {
4356 location_t loc = tree_nonartificial_location (exp);
4357 loc = expansion_point_location_if_in_system_header (loc);
4358
4359 warning_at (loc, OPT_Wstringop_overflow_,
4360 "%K%qD specified bound %E equals destination size",
4361 exp, get_callee_fndecl (exp), maxread);
4362
4363 return NULL_RTX;
4364 }
4365
4366 if (!srclen
4367 || (maxread && tree_fits_uhwi_p (maxread)
4368 && tree_fits_uhwi_p (srclen)
4369 && tree_int_cst_lt (maxread, srclen)))
4370 srclen = maxread;
4371
4372 /* The number of bytes to write is SRCLEN. */
4373 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4374
4375 return NULL_RTX;
4376 }
4377
4378 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4379 NULL_RTX if we failed the caller should emit a normal call. */
4380
4381 static rtx
4382 expand_builtin_strncpy (tree exp, rtx target)
4383 {
4384 location_t loc = EXPR_LOCATION (exp);
4385
4386 if (validate_arglist (exp,
4387 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4388 {
4389 tree dest = CALL_EXPR_ARG (exp, 0);
4390 tree src = CALL_EXPR_ARG (exp, 1);
4391 /* The number of bytes to write (not the maximum). */
4392 tree len = CALL_EXPR_ARG (exp, 2);
4393 /* The length of the source sequence. */
4394 tree slen = c_strlen (src, 1);
4395
4396 if (warn_stringop_overflow)
4397 {
4398 tree destsize = compute_objsize (dest,
4399 warn_stringop_overflow - 1);
4400
4401 /* The number of bytes to write is LEN but check_access will also
4402 check SLEN if LEN's value isn't known. */
4403 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4404 destsize);
4405 }
4406
4407 /* We must be passed a constant len and src parameter. */
4408 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4409 return NULL_RTX;
4410
4411 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4412
4413 /* We're required to pad with trailing zeros if the requested
4414 len is greater than strlen(s2)+1. In that case try to
4415 use store_by_pieces, if it fails, punt. */
4416 if (tree_int_cst_lt (slen, len))
4417 {
4418 unsigned int dest_align = get_pointer_alignment (dest);
4419 const char *p = c_getstr (src);
4420 rtx dest_mem;
4421
4422 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4423 || !can_store_by_pieces (tree_to_uhwi (len),
4424 builtin_strncpy_read_str,
4425 CONST_CAST (char *, p),
4426 dest_align, false))
4427 return NULL_RTX;
4428
4429 dest_mem = get_memory_rtx (dest, len);
4430 store_by_pieces (dest_mem, tree_to_uhwi (len),
4431 builtin_strncpy_read_str,
4432 CONST_CAST (char *, p), dest_align, false,
4433 RETURN_BEGIN);
4434 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4436 return dest_mem;
4437 }
4438 }
4439 return NULL_RTX;
4440 }
4441
4442 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4443 bytes from constant string DATA + OFFSET and return it as target
4444 constant. */
4445
4446 rtx
4447 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4448 scalar_int_mode mode)
4449 {
4450 const char *c = (const char *) data;
4451 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4452
4453 memset (p, *c, GET_MODE_SIZE (mode));
4454
4455 return c_readstr (p, mode);
4456 }
4457
4458 /* Callback routine for store_by_pieces. Return the RTL of a register
4459 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4460 char value given in the RTL register data. For example, if mode is
4461 4 bytes wide, return the RTL for 0x01010101*data. */
4462
4463 static rtx
4464 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4465 scalar_int_mode mode)
4466 {
4467 rtx target, coeff;
4468 size_t size;
4469 char *p;
4470
4471 size = GET_MODE_SIZE (mode);
4472 if (size == 1)
4473 return (rtx) data;
4474
4475 p = XALLOCAVEC (char, size);
4476 memset (p, 1, size);
4477 coeff = c_readstr (p, mode);
4478
4479 target = convert_to_mode (mode, (rtx) data, 1);
4480 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4481 return force_reg (mode, target);
4482 }
4483
4484 /* Expand expression EXP, which is a call to the memset builtin. Return
4485 NULL_RTX if we failed the caller should emit a normal call, otherwise
4486 try to get the result in TARGET, if convenient (and in mode MODE if that's
4487 convenient). */
4488
4489 static rtx
4490 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4491 {
4492 if (!validate_arglist (exp,
4493 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4494 return NULL_RTX;
4495
4496 tree dest = CALL_EXPR_ARG (exp, 0);
4497 tree val = CALL_EXPR_ARG (exp, 1);
4498 tree len = CALL_EXPR_ARG (exp, 2);
4499
4500 check_memop_access (exp, dest, NULL_TREE, len);
4501
4502 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4503 }
4504
4505 /* Helper function to do the actual work for expand_builtin_memset. The
4506 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4507 so that this can also be called without constructing an actual CALL_EXPR.
4508 The other arguments and return value are the same as for
4509 expand_builtin_memset. */
4510
4511 static rtx
4512 expand_builtin_memset_args (tree dest, tree val, tree len,
4513 rtx target, machine_mode mode, tree orig_exp)
4514 {
4515 tree fndecl, fn;
4516 enum built_in_function fcode;
4517 machine_mode val_mode;
4518 char c;
4519 unsigned int dest_align;
4520 rtx dest_mem, dest_addr, len_rtx;
4521 HOST_WIDE_INT expected_size = -1;
4522 unsigned int expected_align = 0;
4523 unsigned HOST_WIDE_INT min_size;
4524 unsigned HOST_WIDE_INT max_size;
4525 unsigned HOST_WIDE_INT probable_max_size;
4526
4527 dest_align = get_pointer_alignment (dest);
4528
4529 /* If DEST is not a pointer type, don't do this operation in-line. */
4530 if (dest_align == 0)
4531 return NULL_RTX;
4532
4533 if (currently_expanding_gimple_stmt)
4534 stringop_block_profile (currently_expanding_gimple_stmt,
4535 &expected_align, &expected_size);
4536
4537 if (expected_align < dest_align)
4538 expected_align = dest_align;
4539
4540 /* If the LEN parameter is zero, return DEST. */
4541 if (integer_zerop (len))
4542 {
4543 /* Evaluate and ignore VAL in case it has side-effects. */
4544 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4545 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4546 }
4547
4548 /* Stabilize the arguments in case we fail. */
4549 dest = builtin_save_expr (dest);
4550 val = builtin_save_expr (val);
4551 len = builtin_save_expr (len);
4552
4553 len_rtx = expand_normal (len);
4554 determine_block_size (len, len_rtx, &min_size, &max_size,
4555 &probable_max_size);
4556 dest_mem = get_memory_rtx (dest, len);
4557 val_mode = TYPE_MODE (unsigned_char_type_node);
4558
4559 if (TREE_CODE (val) != INTEGER_CST)
4560 {
4561 rtx val_rtx;
4562
4563 val_rtx = expand_normal (val);
4564 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4565
4566 /* Assume that we can memset by pieces if we can store
4567 * the coefficients by pieces (in the required modes).
4568 * We can't pass builtin_memset_gen_str as that emits RTL. */
4569 c = 1;
4570 if (tree_fits_uhwi_p (len)
4571 && can_store_by_pieces (tree_to_uhwi (len),
4572 builtin_memset_read_str, &c, dest_align,
4573 true))
4574 {
4575 val_rtx = force_reg (val_mode, val_rtx);
4576 store_by_pieces (dest_mem, tree_to_uhwi (len),
4577 builtin_memset_gen_str, val_rtx, dest_align,
4578 true, RETURN_BEGIN);
4579 }
4580 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4581 dest_align, expected_align,
4582 expected_size, min_size, max_size,
4583 probable_max_size))
4584 goto do_libcall;
4585
4586 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4587 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4588 return dest_mem;
4589 }
4590
4591 if (target_char_cast (val, &c))
4592 goto do_libcall;
4593
4594 if (c)
4595 {
4596 if (tree_fits_uhwi_p (len)
4597 && can_store_by_pieces (tree_to_uhwi (len),
4598 builtin_memset_read_str, &c, dest_align,
4599 true))
4600 store_by_pieces (dest_mem, tree_to_uhwi (len),
4601 builtin_memset_read_str, &c, dest_align, true,
4602 RETURN_BEGIN);
4603 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4604 gen_int_mode (c, val_mode),
4605 dest_align, expected_align,
4606 expected_size, min_size, max_size,
4607 probable_max_size))
4608 goto do_libcall;
4609
4610 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4611 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4612 return dest_mem;
4613 }
4614
4615 set_mem_align (dest_mem, dest_align);
4616 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4617 CALL_EXPR_TAILCALL (orig_exp)
4618 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4619 expected_align, expected_size,
4620 min_size, max_size,
4621 probable_max_size);
4622
4623 if (dest_addr == 0)
4624 {
4625 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4626 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4627 }
4628
4629 return dest_addr;
4630
4631 do_libcall:
4632 fndecl = get_callee_fndecl (orig_exp);
4633 fcode = DECL_FUNCTION_CODE (fndecl);
4634 if (fcode == BUILT_IN_MEMSET)
4635 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4636 dest, val, len);
4637 else if (fcode == BUILT_IN_BZERO)
4638 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4639 dest, len);
4640 else
4641 gcc_unreachable ();
4642 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4643 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4644 return expand_call (fn, target, target == const0_rtx);
4645 }
4646
4647 /* Expand expression EXP, which is a call to the bzero builtin. Return
4648 NULL_RTX if we failed the caller should emit a normal call. */
4649
4650 static rtx
4651 expand_builtin_bzero (tree exp)
4652 {
4653 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4654 return NULL_RTX;
4655
4656 tree dest = CALL_EXPR_ARG (exp, 0);
4657 tree size = CALL_EXPR_ARG (exp, 1);
4658
4659 check_memop_access (exp, dest, NULL_TREE, size);
4660
4661 /* New argument list transforming bzero(ptr x, int y) to
4662 memset(ptr x, int 0, size_t y). This is done this way
4663 so that if it isn't expanded inline, we fallback to
4664 calling bzero instead of memset. */
4665
4666 location_t loc = EXPR_LOCATION (exp);
4667
4668 return expand_builtin_memset_args (dest, integer_zero_node,
4669 fold_convert_loc (loc,
4670 size_type_node, size),
4671 const0_rtx, VOIDmode, exp);
4672 }
4673
4674 /* Try to expand cmpstr operation ICODE with the given operands.
4675 Return the result rtx on success, otherwise return null. */
4676
4677 static rtx
4678 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4679 HOST_WIDE_INT align)
4680 {
4681 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4682
4683 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4684 target = NULL_RTX;
4685
4686 class expand_operand ops[4];
4687 create_output_operand (&ops[0], target, insn_mode);
4688 create_fixed_operand (&ops[1], arg1_rtx);
4689 create_fixed_operand (&ops[2], arg2_rtx);
4690 create_integer_operand (&ops[3], align);
4691 if (maybe_expand_insn (icode, 4, ops))
4692 return ops[0].value;
4693 return NULL_RTX;
4694 }
4695
4696 /* Expand expression EXP, which is a call to the memcmp built-in function.
4697 Return NULL_RTX if we failed and the caller should emit a normal call,
4698 otherwise try to get the result in TARGET, if convenient.
4699 RESULT_EQ is true if we can relax the returned value to be either zero
4700 or nonzero, without caring about the sign. */
4701
4702 static rtx
4703 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4704 {
4705 if (!validate_arglist (exp,
4706 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4707 return NULL_RTX;
4708
4709 tree arg1 = CALL_EXPR_ARG (exp, 0);
4710 tree arg2 = CALL_EXPR_ARG (exp, 1);
4711 tree len = CALL_EXPR_ARG (exp, 2);
4712 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4713 bool no_overflow = true;
4714
4715 /* Diagnose calls where the specified length exceeds the size of either
4716 object. */
4717 tree size = compute_objsize (arg1, 0);
4718 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4719 len, /*maxread=*/NULL_TREE, size,
4720 /*objsize=*/NULL_TREE);
4721 if (no_overflow)
4722 {
4723 size = compute_objsize (arg2, 0);
4724 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4725 len, /*maxread=*/NULL_TREE, size,
4726 /*objsize=*/NULL_TREE);
4727 }
4728
4729 /* If the specified length exceeds the size of either object,
4730 call the function. */
4731 if (!no_overflow)
4732 return NULL_RTX;
4733
4734 /* Due to the performance benefit, always inline the calls first
4735 when result_eq is false. */
4736 rtx result = NULL_RTX;
4737
4738 if (!result_eq && fcode != BUILT_IN_BCMP)
4739 {
4740 result = inline_expand_builtin_string_cmp (exp, target);
4741 if (result)
4742 return result;
4743 }
4744
4745 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4746 location_t loc = EXPR_LOCATION (exp);
4747
4748 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4749 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4750
4751 /* If we don't have POINTER_TYPE, call the function. */
4752 if (arg1_align == 0 || arg2_align == 0)
4753 return NULL_RTX;
4754
4755 rtx arg1_rtx = get_memory_rtx (arg1, len);
4756 rtx arg2_rtx = get_memory_rtx (arg2, len);
4757 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4758
4759 /* Set MEM_SIZE as appropriate. */
4760 if (CONST_INT_P (len_rtx))
4761 {
4762 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4763 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4764 }
4765
4766 by_pieces_constfn constfn = NULL;
4767
4768 const char *src_str = c_getstr (arg2);
4769 if (result_eq && src_str == NULL)
4770 {
4771 src_str = c_getstr (arg1);
4772 if (src_str != NULL)
4773 std::swap (arg1_rtx, arg2_rtx);
4774 }
4775
4776 /* If SRC is a string constant and block move would be done
4777 by pieces, we can avoid loading the string from memory
4778 and only stored the computed constants. */
4779 if (src_str
4780 && CONST_INT_P (len_rtx)
4781 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4782 constfn = builtin_memcpy_read_str;
4783
4784 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4785 TREE_TYPE (len), target,
4786 result_eq, constfn,
4787 CONST_CAST (char *, src_str));
4788
4789 if (result)
4790 {
4791 /* Return the value in the proper mode for this function. */
4792 if (GET_MODE (result) == mode)
4793 return result;
4794
4795 if (target != 0)
4796 {
4797 convert_move (target, result, 0);
4798 return target;
4799 }
4800
4801 return convert_to_mode (mode, result, 0);
4802 }
4803
4804 return NULL_RTX;
4805 }
4806
4807 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4808 if we failed the caller should emit a normal call, otherwise try to get
4809 the result in TARGET, if convenient. */
4810
4811 static rtx
4812 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4813 {
4814 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4815 return NULL_RTX;
4816
4817 /* Due to the performance benefit, always inline the calls first. */
4818 rtx result = NULL_RTX;
4819 result = inline_expand_builtin_string_cmp (exp, target);
4820 if (result)
4821 return result;
4822
4823 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4824 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4825 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4826 return NULL_RTX;
4827
4828 tree arg1 = CALL_EXPR_ARG (exp, 0);
4829 tree arg2 = CALL_EXPR_ARG (exp, 1);
4830
4831 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4832 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4833
4834 /* If we don't have POINTER_TYPE, call the function. */
4835 if (arg1_align == 0 || arg2_align == 0)
4836 return NULL_RTX;
4837
4838 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4839 arg1 = builtin_save_expr (arg1);
4840 arg2 = builtin_save_expr (arg2);
4841
4842 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4843 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4844
4845 /* Try to call cmpstrsi. */
4846 if (cmpstr_icode != CODE_FOR_nothing)
4847 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4848 MIN (arg1_align, arg2_align));
4849
4850 /* Try to determine at least one length and call cmpstrnsi. */
4851 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4852 {
4853 tree len;
4854 rtx arg3_rtx;
4855
4856 tree len1 = c_strlen (arg1, 1);
4857 tree len2 = c_strlen (arg2, 1);
4858
4859 if (len1)
4860 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4861 if (len2)
4862 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4863
4864 /* If we don't have a constant length for the first, use the length
4865 of the second, if we know it. We don't require a constant for
4866 this case; some cost analysis could be done if both are available
4867 but neither is constant. For now, assume they're equally cheap,
4868 unless one has side effects. If both strings have constant lengths,
4869 use the smaller. */
4870
4871 if (!len1)
4872 len = len2;
4873 else if (!len2)
4874 len = len1;
4875 else if (TREE_SIDE_EFFECTS (len1))
4876 len = len2;
4877 else if (TREE_SIDE_EFFECTS (len2))
4878 len = len1;
4879 else if (TREE_CODE (len1) != INTEGER_CST)
4880 len = len2;
4881 else if (TREE_CODE (len2) != INTEGER_CST)
4882 len = len1;
4883 else if (tree_int_cst_lt (len1, len2))
4884 len = len1;
4885 else
4886 len = len2;
4887
4888 /* If both arguments have side effects, we cannot optimize. */
4889 if (len && !TREE_SIDE_EFFECTS (len))
4890 {
4891 arg3_rtx = expand_normal (len);
4892 result = expand_cmpstrn_or_cmpmem
4893 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4894 arg3_rtx, MIN (arg1_align, arg2_align));
4895 }
4896 }
4897
4898 tree fndecl = get_callee_fndecl (exp);
4899 if (result)
4900 {
4901 /* Check to see if the argument was declared attribute nonstring
4902 and if so, issue a warning since at this point it's not known
4903 to be nul-terminated. */
4904 maybe_warn_nonstring_arg (fndecl, exp);
4905
4906 /* Return the value in the proper mode for this function. */
4907 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4908 if (GET_MODE (result) == mode)
4909 return result;
4910 if (target == 0)
4911 return convert_to_mode (mode, result, 0);
4912 convert_move (target, result, 0);
4913 return target;
4914 }
4915
4916 /* Expand the library call ourselves using a stabilized argument
4917 list to avoid re-evaluating the function's arguments twice. */
4918 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4919 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4920 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4921 return expand_call (fn, target, target == const0_rtx);
4922 }
4923
4924 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4925 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4926 the result in TARGET, if convenient. */
4927
4928 static rtx
4929 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4930 ATTRIBUTE_UNUSED machine_mode mode)
4931 {
4932 if (!validate_arglist (exp,
4933 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4934 return NULL_RTX;
4935
4936 /* Due to the performance benefit, always inline the calls first. */
4937 rtx result = NULL_RTX;
4938 result = inline_expand_builtin_string_cmp (exp, target);
4939 if (result)
4940 return result;
4941
4942 /* If c_strlen can determine an expression for one of the string
4943 lengths, and it doesn't have side effects, then emit cmpstrnsi
4944 using length MIN(strlen(string)+1, arg3). */
4945 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4946 if (cmpstrn_icode == CODE_FOR_nothing)
4947 return NULL_RTX;
4948
4949 tree len;
4950
4951 tree arg1 = CALL_EXPR_ARG (exp, 0);
4952 tree arg2 = CALL_EXPR_ARG (exp, 1);
4953 tree arg3 = CALL_EXPR_ARG (exp, 2);
4954
4955 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4956 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4957
4958 tree len1 = c_strlen (arg1, 1);
4959 tree len2 = c_strlen (arg2, 1);
4960
4961 location_t loc = EXPR_LOCATION (exp);
4962
4963 if (len1)
4964 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4965 if (len2)
4966 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4967
4968 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4969
4970 /* If we don't have a constant length for the first, use the length
4971 of the second, if we know it. If neither string is constant length,
4972 use the given length argument. We don't require a constant for
4973 this case; some cost analysis could be done if both are available
4974 but neither is constant. For now, assume they're equally cheap,
4975 unless one has side effects. If both strings have constant lengths,
4976 use the smaller. */
4977
4978 if (!len1 && !len2)
4979 len = len3;
4980 else if (!len1)
4981 len = len2;
4982 else if (!len2)
4983 len = len1;
4984 else if (TREE_SIDE_EFFECTS (len1))
4985 len = len2;
4986 else if (TREE_SIDE_EFFECTS (len2))
4987 len = len1;
4988 else if (TREE_CODE (len1) != INTEGER_CST)
4989 len = len2;
4990 else if (TREE_CODE (len2) != INTEGER_CST)
4991 len = len1;
4992 else if (tree_int_cst_lt (len1, len2))
4993 len = len1;
4994 else
4995 len = len2;
4996
4997 /* If we are not using the given length, we must incorporate it here.
4998 The actual new length parameter will be MIN(len,arg3) in this case. */
4999 if (len != len3)
5000 {
5001 len = fold_convert_loc (loc, sizetype, len);
5002 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5003 }
5004 rtx arg1_rtx = get_memory_rtx (arg1, len);
5005 rtx arg2_rtx = get_memory_rtx (arg2, len);
5006 rtx arg3_rtx = expand_normal (len);
5007 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5008 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5009 MIN (arg1_align, arg2_align));
5010
5011 tree fndecl = get_callee_fndecl (exp);
5012 if (result)
5013 {
5014 /* Check to see if the argument was declared attribute nonstring
5015 and if so, issue a warning since at this point it's not known
5016 to be nul-terminated. */
5017 maybe_warn_nonstring_arg (fndecl, exp);
5018
5019 /* Return the value in the proper mode for this function. */
5020 mode = TYPE_MODE (TREE_TYPE (exp));
5021 if (GET_MODE (result) == mode)
5022 return result;
5023 if (target == 0)
5024 return convert_to_mode (mode, result, 0);
5025 convert_move (target, result, 0);
5026 return target;
5027 }
5028
5029 /* Expand the library call ourselves using a stabilized argument
5030 list to avoid re-evaluating the function's arguments twice. */
5031 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5032 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5033 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5034 return expand_call (fn, target, target == const0_rtx);
5035 }
5036
5037 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5038 if that's convenient. */
5039
5040 rtx
5041 expand_builtin_saveregs (void)
5042 {
5043 rtx val;
5044 rtx_insn *seq;
5045
5046 /* Don't do __builtin_saveregs more than once in a function.
5047 Save the result of the first call and reuse it. */
5048 if (saveregs_value != 0)
5049 return saveregs_value;
5050
5051 /* When this function is called, it means that registers must be
5052 saved on entry to this function. So we migrate the call to the
5053 first insn of this function. */
5054
5055 start_sequence ();
5056
5057 /* Do whatever the machine needs done in this case. */
5058 val = targetm.calls.expand_builtin_saveregs ();
5059
5060 seq = get_insns ();
5061 end_sequence ();
5062
5063 saveregs_value = val;
5064
5065 /* Put the insns after the NOTE that starts the function. If this
5066 is inside a start_sequence, make the outer-level insn chain current, so
5067 the code is placed at the start of the function. */
5068 push_topmost_sequence ();
5069 emit_insn_after (seq, entry_of_function ());
5070 pop_topmost_sequence ();
5071
5072 return val;
5073 }
5074
5075 /* Expand a call to __builtin_next_arg. */
5076
5077 static rtx
5078 expand_builtin_next_arg (void)
5079 {
5080 /* Checking arguments is already done in fold_builtin_next_arg
5081 that must be called before this function. */
5082 return expand_binop (ptr_mode, add_optab,
5083 crtl->args.internal_arg_pointer,
5084 crtl->args.arg_offset_rtx,
5085 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5086 }
5087
5088 /* Make it easier for the backends by protecting the valist argument
5089 from multiple evaluations. */
5090
5091 static tree
5092 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5093 {
5094 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5095
5096 /* The current way of determining the type of valist is completely
5097 bogus. We should have the information on the va builtin instead. */
5098 if (!vatype)
5099 vatype = targetm.fn_abi_va_list (cfun->decl);
5100
5101 if (TREE_CODE (vatype) == ARRAY_TYPE)
5102 {
5103 if (TREE_SIDE_EFFECTS (valist))
5104 valist = save_expr (valist);
5105
5106 /* For this case, the backends will be expecting a pointer to
5107 vatype, but it's possible we've actually been given an array
5108 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5109 So fix it. */
5110 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5111 {
5112 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5113 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5114 }
5115 }
5116 else
5117 {
5118 tree pt = build_pointer_type (vatype);
5119
5120 if (! needs_lvalue)
5121 {
5122 if (! TREE_SIDE_EFFECTS (valist))
5123 return valist;
5124
5125 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5126 TREE_SIDE_EFFECTS (valist) = 1;
5127 }
5128
5129 if (TREE_SIDE_EFFECTS (valist))
5130 valist = save_expr (valist);
5131 valist = fold_build2_loc (loc, MEM_REF,
5132 vatype, valist, build_int_cst (pt, 0));
5133 }
5134
5135 return valist;
5136 }
5137
5138 /* The "standard" definition of va_list is void*. */
5139
5140 tree
5141 std_build_builtin_va_list (void)
5142 {
5143 return ptr_type_node;
5144 }
5145
5146 /* The "standard" abi va_list is va_list_type_node. */
5147
5148 tree
5149 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5150 {
5151 return va_list_type_node;
5152 }
5153
5154 /* The "standard" type of va_list is va_list_type_node. */
5155
5156 tree
5157 std_canonical_va_list_type (tree type)
5158 {
5159 tree wtype, htype;
5160
5161 wtype = va_list_type_node;
5162 htype = type;
5163
5164 if (TREE_CODE (wtype) == ARRAY_TYPE)
5165 {
5166 /* If va_list is an array type, the argument may have decayed
5167 to a pointer type, e.g. by being passed to another function.
5168 In that case, unwrap both types so that we can compare the
5169 underlying records. */
5170 if (TREE_CODE (htype) == ARRAY_TYPE
5171 || POINTER_TYPE_P (htype))
5172 {
5173 wtype = TREE_TYPE (wtype);
5174 htype = TREE_TYPE (htype);
5175 }
5176 }
5177 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5178 return va_list_type_node;
5179
5180 return NULL_TREE;
5181 }
5182
5183 /* The "standard" implementation of va_start: just assign `nextarg' to
5184 the variable. */
5185
5186 void
5187 std_expand_builtin_va_start (tree valist, rtx nextarg)
5188 {
5189 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5190 convert_move (va_r, nextarg, 0);
5191 }
5192
5193 /* Expand EXP, a call to __builtin_va_start. */
5194
5195 static rtx
5196 expand_builtin_va_start (tree exp)
5197 {
5198 rtx nextarg;
5199 tree valist;
5200 location_t loc = EXPR_LOCATION (exp);
5201
5202 if (call_expr_nargs (exp) < 2)
5203 {
5204 error_at (loc, "too few arguments to function %<va_start%>");
5205 return const0_rtx;
5206 }
5207
5208 if (fold_builtin_next_arg (exp, true))
5209 return const0_rtx;
5210
5211 nextarg = expand_builtin_next_arg ();
5212 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5213
5214 if (targetm.expand_builtin_va_start)
5215 targetm.expand_builtin_va_start (valist, nextarg);
5216 else
5217 std_expand_builtin_va_start (valist, nextarg);
5218
5219 return const0_rtx;
5220 }
5221
5222 /* Expand EXP, a call to __builtin_va_end. */
5223
5224 static rtx
5225 expand_builtin_va_end (tree exp)
5226 {
5227 tree valist = CALL_EXPR_ARG (exp, 0);
5228
5229 /* Evaluate for side effects, if needed. I hate macros that don't
5230 do that. */
5231 if (TREE_SIDE_EFFECTS (valist))
5232 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5233
5234 return const0_rtx;
5235 }
5236
5237 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5238 builtin rather than just as an assignment in stdarg.h because of the
5239 nastiness of array-type va_list types. */
5240
5241 static rtx
5242 expand_builtin_va_copy (tree exp)
5243 {
5244 tree dst, src, t;
5245 location_t loc = EXPR_LOCATION (exp);
5246
5247 dst = CALL_EXPR_ARG (exp, 0);
5248 src = CALL_EXPR_ARG (exp, 1);
5249
5250 dst = stabilize_va_list_loc (loc, dst, 1);
5251 src = stabilize_va_list_loc (loc, src, 0);
5252
5253 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5254
5255 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5256 {
5257 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5258 TREE_SIDE_EFFECTS (t) = 1;
5259 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5260 }
5261 else
5262 {
5263 rtx dstb, srcb, size;
5264
5265 /* Evaluate to pointers. */
5266 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5267 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5268 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5269 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5270
5271 dstb = convert_memory_address (Pmode, dstb);
5272 srcb = convert_memory_address (Pmode, srcb);
5273
5274 /* "Dereference" to BLKmode memories. */
5275 dstb = gen_rtx_MEM (BLKmode, dstb);
5276 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5277 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5278 srcb = gen_rtx_MEM (BLKmode, srcb);
5279 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5280 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5281
5282 /* Copy. */
5283 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5284 }
5285
5286 return const0_rtx;
5287 }
5288
5289 /* Expand a call to one of the builtin functions __builtin_frame_address or
5290 __builtin_return_address. */
5291
5292 static rtx
5293 expand_builtin_frame_address (tree fndecl, tree exp)
5294 {
5295 /* The argument must be a nonnegative integer constant.
5296 It counts the number of frames to scan up the stack.
5297 The value is either the frame pointer value or the return
5298 address saved in that frame. */
5299 if (call_expr_nargs (exp) == 0)
5300 /* Warning about missing arg was already issued. */
5301 return const0_rtx;
5302 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5303 {
5304 error ("invalid argument to %qD", fndecl);
5305 return const0_rtx;
5306 }
5307 else
5308 {
5309 /* Number of frames to scan up the stack. */
5310 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5311
5312 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5313
5314 /* Some ports cannot access arbitrary stack frames. */
5315 if (tem == NULL)
5316 {
5317 warning (0, "unsupported argument to %qD", fndecl);
5318 return const0_rtx;
5319 }
5320
5321 if (count)
5322 {
5323 /* Warn since no effort is made to ensure that any frame
5324 beyond the current one exists or can be safely reached. */
5325 warning (OPT_Wframe_address, "calling %qD with "
5326 "a nonzero argument is unsafe", fndecl);
5327 }
5328
5329 /* For __builtin_frame_address, return what we've got. */
5330 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5331 return tem;
5332
5333 if (!REG_P (tem)
5334 && ! CONSTANT_P (tem))
5335 tem = copy_addr_to_reg (tem);
5336 return tem;
5337 }
5338 }
5339
5340 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5341 failed and the caller should emit a normal call. */
5342
5343 static rtx
5344 expand_builtin_alloca (tree exp)
5345 {
5346 rtx op0;
5347 rtx result;
5348 unsigned int align;
5349 tree fndecl = get_callee_fndecl (exp);
5350 HOST_WIDE_INT max_size;
5351 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5352 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5353 bool valid_arglist
5354 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5355 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5356 VOID_TYPE)
5357 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5358 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5359 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5360
5361 if (!valid_arglist)
5362 return NULL_RTX;
5363
5364 if ((alloca_for_var
5365 && warn_vla_limit >= HOST_WIDE_INT_MAX
5366 && warn_alloc_size_limit < warn_vla_limit)
5367 || (!alloca_for_var
5368 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5369 && warn_alloc_size_limit < warn_alloca_limit
5370 ))
5371 {
5372 /* -Walloca-larger-than and -Wvla-larger-than settings of
5373 less than HOST_WIDE_INT_MAX override the more general
5374 -Walloc-size-larger-than so unless either of the former
5375 options is smaller than the last one (wchich would imply
5376 that the call was already checked), check the alloca
5377 arguments for overflow. */
5378 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5379 int idx[] = { 0, -1 };
5380 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5381 }
5382
5383 /* Compute the argument. */
5384 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5385
5386 /* Compute the alignment. */
5387 align = (fcode == BUILT_IN_ALLOCA
5388 ? BIGGEST_ALIGNMENT
5389 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5390
5391 /* Compute the maximum size. */
5392 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5393 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5394 : -1);
5395
5396 /* Allocate the desired space. If the allocation stems from the declaration
5397 of a variable-sized object, it cannot accumulate. */
5398 result
5399 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5400 result = convert_memory_address (ptr_mode, result);
5401
5402 return result;
5403 }
5404
5405 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5406 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5407 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5408 handle_builtin_stack_restore function. */
5409
5410 static rtx
5411 expand_asan_emit_allocas_unpoison (tree exp)
5412 {
5413 tree arg0 = CALL_EXPR_ARG (exp, 0);
5414 tree arg1 = CALL_EXPR_ARG (exp, 1);
5415 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5416 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5417 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5418 stack_pointer_rtx, NULL_RTX, 0,
5419 OPTAB_LIB_WIDEN);
5420 off = convert_modes (ptr_mode, Pmode, off, 0);
5421 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5422 OPTAB_LIB_WIDEN);
5423 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5424 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5425 top, ptr_mode, bot, ptr_mode);
5426 return ret;
5427 }
5428
5429 /* Expand a call to bswap builtin in EXP.
5430 Return NULL_RTX if a normal call should be emitted rather than expanding the
5431 function in-line. If convenient, the result should be placed in TARGET.
5432 SUBTARGET may be used as the target for computing one of EXP's operands. */
5433
5434 static rtx
5435 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5436 rtx subtarget)
5437 {
5438 tree arg;
5439 rtx op0;
5440
5441 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5442 return NULL_RTX;
5443
5444 arg = CALL_EXPR_ARG (exp, 0);
5445 op0 = expand_expr (arg,
5446 subtarget && GET_MODE (subtarget) == target_mode
5447 ? subtarget : NULL_RTX,
5448 target_mode, EXPAND_NORMAL);
5449 if (GET_MODE (op0) != target_mode)
5450 op0 = convert_to_mode (target_mode, op0, 1);
5451
5452 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5453
5454 gcc_assert (target);
5455
5456 return convert_to_mode (target_mode, target, 1);
5457 }
5458
5459 /* Expand a call to a unary builtin in EXP.
5460 Return NULL_RTX if a normal call should be emitted rather than expanding the
5461 function in-line. If convenient, the result should be placed in TARGET.
5462 SUBTARGET may be used as the target for computing one of EXP's operands. */
5463
5464 static rtx
5465 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5466 rtx subtarget, optab op_optab)
5467 {
5468 rtx op0;
5469
5470 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5471 return NULL_RTX;
5472
5473 /* Compute the argument. */
5474 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5475 (subtarget
5476 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5477 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5478 VOIDmode, EXPAND_NORMAL);
5479 /* Compute op, into TARGET if possible.
5480 Set TARGET to wherever the result comes back. */
5481 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5482 op_optab, op0, target, op_optab != clrsb_optab);
5483 gcc_assert (target);
5484
5485 return convert_to_mode (target_mode, target, 0);
5486 }
5487
5488 /* Expand a call to __builtin_expect. We just return our argument
5489 as the builtin_expect semantic should've been already executed by
5490 tree branch prediction pass. */
5491
5492 static rtx
5493 expand_builtin_expect (tree exp, rtx target)
5494 {
5495 tree arg;
5496
5497 if (call_expr_nargs (exp) < 2)
5498 return const0_rtx;
5499 arg = CALL_EXPR_ARG (exp, 0);
5500
5501 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5502 /* When guessing was done, the hints should be already stripped away. */
5503 gcc_assert (!flag_guess_branch_prob
5504 || optimize == 0 || seen_error ());
5505 return target;
5506 }
5507
5508 /* Expand a call to __builtin_expect_with_probability. We just return our
5509 argument as the builtin_expect semantic should've been already executed by
5510 tree branch prediction pass. */
5511
5512 static rtx
5513 expand_builtin_expect_with_probability (tree exp, rtx target)
5514 {
5515 tree arg;
5516
5517 if (call_expr_nargs (exp) < 3)
5518 return const0_rtx;
5519 arg = CALL_EXPR_ARG (exp, 0);
5520
5521 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5522 /* When guessing was done, the hints should be already stripped away. */
5523 gcc_assert (!flag_guess_branch_prob
5524 || optimize == 0 || seen_error ());
5525 return target;
5526 }
5527
5528
5529 /* Expand a call to __builtin_assume_aligned. We just return our first
5530 argument as the builtin_assume_aligned semantic should've been already
5531 executed by CCP. */
5532
5533 static rtx
5534 expand_builtin_assume_aligned (tree exp, rtx target)
5535 {
5536 if (call_expr_nargs (exp) < 2)
5537 return const0_rtx;
5538 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5539 EXPAND_NORMAL);
5540 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5541 && (call_expr_nargs (exp) < 3
5542 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5543 return target;
5544 }
5545
5546 void
5547 expand_builtin_trap (void)
5548 {
5549 if (targetm.have_trap ())
5550 {
5551 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5552 /* For trap insns when not accumulating outgoing args force
5553 REG_ARGS_SIZE note to prevent crossjumping of calls with
5554 different args sizes. */
5555 if (!ACCUMULATE_OUTGOING_ARGS)
5556 add_args_size_note (insn, stack_pointer_delta);
5557 }
5558 else
5559 {
5560 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5561 tree call_expr = build_call_expr (fn, 0);
5562 expand_call (call_expr, NULL_RTX, false);
5563 }
5564
5565 emit_barrier ();
5566 }
5567
5568 /* Expand a call to __builtin_unreachable. We do nothing except emit
5569 a barrier saying that control flow will not pass here.
5570
5571 It is the responsibility of the program being compiled to ensure
5572 that control flow does never reach __builtin_unreachable. */
5573 static void
5574 expand_builtin_unreachable (void)
5575 {
5576 emit_barrier ();
5577 }
5578
5579 /* Expand EXP, a call to fabs, fabsf or fabsl.
5580 Return NULL_RTX if a normal call should be emitted rather than expanding
5581 the function inline. If convenient, the result should be placed
5582 in TARGET. SUBTARGET may be used as the target for computing
5583 the operand. */
5584
5585 static rtx
5586 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5587 {
5588 machine_mode mode;
5589 tree arg;
5590 rtx op0;
5591
5592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5593 return NULL_RTX;
5594
5595 arg = CALL_EXPR_ARG (exp, 0);
5596 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5597 mode = TYPE_MODE (TREE_TYPE (arg));
5598 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5599 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5600 }
5601
5602 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5603 Return NULL is a normal call should be emitted rather than expanding the
5604 function inline. If convenient, the result should be placed in TARGET.
5605 SUBTARGET may be used as the target for computing the operand. */
5606
5607 static rtx
5608 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5609 {
5610 rtx op0, op1;
5611 tree arg;
5612
5613 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5614 return NULL_RTX;
5615
5616 arg = CALL_EXPR_ARG (exp, 0);
5617 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5618
5619 arg = CALL_EXPR_ARG (exp, 1);
5620 op1 = expand_normal (arg);
5621
5622 return expand_copysign (op0, op1, target);
5623 }
5624
5625 /* Expand a call to __builtin___clear_cache. */
5626
5627 static rtx
5628 expand_builtin___clear_cache (tree exp)
5629 {
5630 if (!targetm.code_for_clear_cache)
5631 {
5632 #ifdef CLEAR_INSN_CACHE
5633 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5634 does something. Just do the default expansion to a call to
5635 __clear_cache(). */
5636 return NULL_RTX;
5637 #else
5638 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5639 does nothing. There is no need to call it. Do nothing. */
5640 return const0_rtx;
5641 #endif /* CLEAR_INSN_CACHE */
5642 }
5643
5644 /* We have a "clear_cache" insn, and it will handle everything. */
5645 tree begin, end;
5646 rtx begin_rtx, end_rtx;
5647
5648 /* We must not expand to a library call. If we did, any
5649 fallback library function in libgcc that might contain a call to
5650 __builtin___clear_cache() would recurse infinitely. */
5651 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5652 {
5653 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5654 return const0_rtx;
5655 }
5656
5657 if (targetm.have_clear_cache ())
5658 {
5659 class expand_operand ops[2];
5660
5661 begin = CALL_EXPR_ARG (exp, 0);
5662 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5663
5664 end = CALL_EXPR_ARG (exp, 1);
5665 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5666
5667 create_address_operand (&ops[0], begin_rtx);
5668 create_address_operand (&ops[1], end_rtx);
5669 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5670 return const0_rtx;
5671 }
5672 return const0_rtx;
5673 }
5674
5675 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5676
5677 static rtx
5678 round_trampoline_addr (rtx tramp)
5679 {
5680 rtx temp, addend, mask;
5681
5682 /* If we don't need too much alignment, we'll have been guaranteed
5683 proper alignment by get_trampoline_type. */
5684 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5685 return tramp;
5686
5687 /* Round address up to desired boundary. */
5688 temp = gen_reg_rtx (Pmode);
5689 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5690 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5691
5692 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5693 temp, 0, OPTAB_LIB_WIDEN);
5694 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5695 temp, 0, OPTAB_LIB_WIDEN);
5696
5697 return tramp;
5698 }
5699
5700 static rtx
5701 expand_builtin_init_trampoline (tree exp, bool onstack)
5702 {
5703 tree t_tramp, t_func, t_chain;
5704 rtx m_tramp, r_tramp, r_chain, tmp;
5705
5706 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5707 POINTER_TYPE, VOID_TYPE))
5708 return NULL_RTX;
5709
5710 t_tramp = CALL_EXPR_ARG (exp, 0);
5711 t_func = CALL_EXPR_ARG (exp, 1);
5712 t_chain = CALL_EXPR_ARG (exp, 2);
5713
5714 r_tramp = expand_normal (t_tramp);
5715 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5716 MEM_NOTRAP_P (m_tramp) = 1;
5717
5718 /* If ONSTACK, the TRAMP argument should be the address of a field
5719 within the local function's FRAME decl. Either way, let's see if
5720 we can fill in the MEM_ATTRs for this memory. */
5721 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5722 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5723
5724 /* Creator of a heap trampoline is responsible for making sure the
5725 address is aligned to at least STACK_BOUNDARY. Normally malloc
5726 will ensure this anyhow. */
5727 tmp = round_trampoline_addr (r_tramp);
5728 if (tmp != r_tramp)
5729 {
5730 m_tramp = change_address (m_tramp, BLKmode, tmp);
5731 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5732 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5733 }
5734
5735 /* The FUNC argument should be the address of the nested function.
5736 Extract the actual function decl to pass to the hook. */
5737 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5738 t_func = TREE_OPERAND (t_func, 0);
5739 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5740
5741 r_chain = expand_normal (t_chain);
5742
5743 /* Generate insns to initialize the trampoline. */
5744 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5745
5746 if (onstack)
5747 {
5748 trampolines_created = 1;
5749
5750 if (targetm.calls.custom_function_descriptors != 0)
5751 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5752 "trampoline generated for nested function %qD", t_func);
5753 }
5754
5755 return const0_rtx;
5756 }
5757
5758 static rtx
5759 expand_builtin_adjust_trampoline (tree exp)
5760 {
5761 rtx tramp;
5762
5763 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5764 return NULL_RTX;
5765
5766 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5767 tramp = round_trampoline_addr (tramp);
5768 if (targetm.calls.trampoline_adjust_address)
5769 tramp = targetm.calls.trampoline_adjust_address (tramp);
5770
5771 return tramp;
5772 }
5773
5774 /* Expand a call to the builtin descriptor initialization routine.
5775 A descriptor is made up of a couple of pointers to the static
5776 chain and the code entry in this order. */
5777
5778 static rtx
5779 expand_builtin_init_descriptor (tree exp)
5780 {
5781 tree t_descr, t_func, t_chain;
5782 rtx m_descr, r_descr, r_func, r_chain;
5783
5784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5785 VOID_TYPE))
5786 return NULL_RTX;
5787
5788 t_descr = CALL_EXPR_ARG (exp, 0);
5789 t_func = CALL_EXPR_ARG (exp, 1);
5790 t_chain = CALL_EXPR_ARG (exp, 2);
5791
5792 r_descr = expand_normal (t_descr);
5793 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5794 MEM_NOTRAP_P (m_descr) = 1;
5795 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5796
5797 r_func = expand_normal (t_func);
5798 r_chain = expand_normal (t_chain);
5799
5800 /* Generate insns to initialize the descriptor. */
5801 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5802 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5803 POINTER_SIZE / BITS_PER_UNIT), r_func);
5804
5805 return const0_rtx;
5806 }
5807
5808 /* Expand a call to the builtin descriptor adjustment routine. */
5809
5810 static rtx
5811 expand_builtin_adjust_descriptor (tree exp)
5812 {
5813 rtx tramp;
5814
5815 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5816 return NULL_RTX;
5817
5818 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5819
5820 /* Unalign the descriptor to allow runtime identification. */
5821 tramp = plus_constant (ptr_mode, tramp,
5822 targetm.calls.custom_function_descriptors);
5823
5824 return force_operand (tramp, NULL_RTX);
5825 }
5826
5827 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5828 function. The function first checks whether the back end provides
5829 an insn to implement signbit for the respective mode. If not, it
5830 checks whether the floating point format of the value is such that
5831 the sign bit can be extracted. If that is not the case, error out.
5832 EXP is the expression that is a call to the builtin function; if
5833 convenient, the result should be placed in TARGET. */
5834 static rtx
5835 expand_builtin_signbit (tree exp, rtx target)
5836 {
5837 const struct real_format *fmt;
5838 scalar_float_mode fmode;
5839 scalar_int_mode rmode, imode;
5840 tree arg;
5841 int word, bitpos;
5842 enum insn_code icode;
5843 rtx temp;
5844 location_t loc = EXPR_LOCATION (exp);
5845
5846 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5847 return NULL_RTX;
5848
5849 arg = CALL_EXPR_ARG (exp, 0);
5850 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5851 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5852 fmt = REAL_MODE_FORMAT (fmode);
5853
5854 arg = builtin_save_expr (arg);
5855
5856 /* Expand the argument yielding a RTX expression. */
5857 temp = expand_normal (arg);
5858
5859 /* Check if the back end provides an insn that handles signbit for the
5860 argument's mode. */
5861 icode = optab_handler (signbit_optab, fmode);
5862 if (icode != CODE_FOR_nothing)
5863 {
5864 rtx_insn *last = get_last_insn ();
5865 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5866 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5867 return target;
5868 delete_insns_since (last);
5869 }
5870
5871 /* For floating point formats without a sign bit, implement signbit
5872 as "ARG < 0.0". */
5873 bitpos = fmt->signbit_ro;
5874 if (bitpos < 0)
5875 {
5876 /* But we can't do this if the format supports signed zero. */
5877 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5878
5879 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5880 build_real (TREE_TYPE (arg), dconst0));
5881 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5882 }
5883
5884 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5885 {
5886 imode = int_mode_for_mode (fmode).require ();
5887 temp = gen_lowpart (imode, temp);
5888 }
5889 else
5890 {
5891 imode = word_mode;
5892 /* Handle targets with different FP word orders. */
5893 if (FLOAT_WORDS_BIG_ENDIAN)
5894 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5895 else
5896 word = bitpos / BITS_PER_WORD;
5897 temp = operand_subword_force (temp, word, fmode);
5898 bitpos = bitpos % BITS_PER_WORD;
5899 }
5900
5901 /* Force the intermediate word_mode (or narrower) result into a
5902 register. This avoids attempting to create paradoxical SUBREGs
5903 of floating point modes below. */
5904 temp = force_reg (imode, temp);
5905
5906 /* If the bitpos is within the "result mode" lowpart, the operation
5907 can be implement with a single bitwise AND. Otherwise, we need
5908 a right shift and an AND. */
5909
5910 if (bitpos < GET_MODE_BITSIZE (rmode))
5911 {
5912 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5913
5914 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5915 temp = gen_lowpart (rmode, temp);
5916 temp = expand_binop (rmode, and_optab, temp,
5917 immed_wide_int_const (mask, rmode),
5918 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5919 }
5920 else
5921 {
5922 /* Perform a logical right shift to place the signbit in the least
5923 significant bit, then truncate the result to the desired mode
5924 and mask just this bit. */
5925 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5926 temp = gen_lowpart (rmode, temp);
5927 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5928 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5929 }
5930
5931 return temp;
5932 }
5933
5934 /* Expand fork or exec calls. TARGET is the desired target of the
5935 call. EXP is the call. FN is the
5936 identificator of the actual function. IGNORE is nonzero if the
5937 value is to be ignored. */
5938
5939 static rtx
5940 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5941 {
5942 tree id, decl;
5943 tree call;
5944
5945 /* If we are not profiling, just call the function. */
5946 if (!profile_arc_flag)
5947 return NULL_RTX;
5948
5949 /* Otherwise call the wrapper. This should be equivalent for the rest of
5950 compiler, so the code does not diverge, and the wrapper may run the
5951 code necessary for keeping the profiling sane. */
5952
5953 switch (DECL_FUNCTION_CODE (fn))
5954 {
5955 case BUILT_IN_FORK:
5956 id = get_identifier ("__gcov_fork");
5957 break;
5958
5959 case BUILT_IN_EXECL:
5960 id = get_identifier ("__gcov_execl");
5961 break;
5962
5963 case BUILT_IN_EXECV:
5964 id = get_identifier ("__gcov_execv");
5965 break;
5966
5967 case BUILT_IN_EXECLP:
5968 id = get_identifier ("__gcov_execlp");
5969 break;
5970
5971 case BUILT_IN_EXECLE:
5972 id = get_identifier ("__gcov_execle");
5973 break;
5974
5975 case BUILT_IN_EXECVP:
5976 id = get_identifier ("__gcov_execvp");
5977 break;
5978
5979 case BUILT_IN_EXECVE:
5980 id = get_identifier ("__gcov_execve");
5981 break;
5982
5983 default:
5984 gcc_unreachable ();
5985 }
5986
5987 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5988 FUNCTION_DECL, id, TREE_TYPE (fn));
5989 DECL_EXTERNAL (decl) = 1;
5990 TREE_PUBLIC (decl) = 1;
5991 DECL_ARTIFICIAL (decl) = 1;
5992 TREE_NOTHROW (decl) = 1;
5993 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5994 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5995 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5996 return expand_call (call, target, ignore);
5997 }
5998
5999
6000 \f
6001 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6002 the pointer in these functions is void*, the tree optimizers may remove
6003 casts. The mode computed in expand_builtin isn't reliable either, due
6004 to __sync_bool_compare_and_swap.
6005
6006 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6007 group of builtins. This gives us log2 of the mode size. */
6008
6009 static inline machine_mode
6010 get_builtin_sync_mode (int fcode_diff)
6011 {
6012 /* The size is not negotiable, so ask not to get BLKmode in return
6013 if the target indicates that a smaller size would be better. */
6014 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6015 }
6016
6017 /* Expand the memory expression LOC and return the appropriate memory operand
6018 for the builtin_sync operations. */
6019
6020 static rtx
6021 get_builtin_sync_mem (tree loc, machine_mode mode)
6022 {
6023 rtx addr, mem;
6024 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6025 ? TREE_TYPE (TREE_TYPE (loc))
6026 : TREE_TYPE (loc));
6027 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6028
6029 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6030 addr = convert_memory_address (addr_mode, addr);
6031
6032 /* Note that we explicitly do not want any alias information for this
6033 memory, so that we kill all other live memories. Otherwise we don't
6034 satisfy the full barrier semantics of the intrinsic. */
6035 mem = gen_rtx_MEM (mode, addr);
6036
6037 set_mem_addr_space (mem, addr_space);
6038
6039 mem = validize_mem (mem);
6040
6041 /* The alignment needs to be at least according to that of the mode. */
6042 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6043 get_pointer_alignment (loc)));
6044 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6045 MEM_VOLATILE_P (mem) = 1;
6046
6047 return mem;
6048 }
6049
6050 /* Make sure an argument is in the right mode.
6051 EXP is the tree argument.
6052 MODE is the mode it should be in. */
6053
6054 static rtx
6055 expand_expr_force_mode (tree exp, machine_mode mode)
6056 {
6057 rtx val;
6058 machine_mode old_mode;
6059
6060 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6061 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6062 of CONST_INTs, where we know the old_mode only from the call argument. */
6063
6064 old_mode = GET_MODE (val);
6065 if (old_mode == VOIDmode)
6066 old_mode = TYPE_MODE (TREE_TYPE (exp));
6067 val = convert_modes (mode, old_mode, val, 1);
6068 return val;
6069 }
6070
6071
6072 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6073 EXP is the CALL_EXPR. CODE is the rtx code
6074 that corresponds to the arithmetic or logical operation from the name;
6075 an exception here is that NOT actually means NAND. TARGET is an optional
6076 place for us to store the results; AFTER is true if this is the
6077 fetch_and_xxx form. */
6078
6079 static rtx
6080 expand_builtin_sync_operation (machine_mode mode, tree exp,
6081 enum rtx_code code, bool after,
6082 rtx target)
6083 {
6084 rtx val, mem;
6085 location_t loc = EXPR_LOCATION (exp);
6086
6087 if (code == NOT && warn_sync_nand)
6088 {
6089 tree fndecl = get_callee_fndecl (exp);
6090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6091
6092 static bool warned_f_a_n, warned_n_a_f;
6093
6094 switch (fcode)
6095 {
6096 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6097 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6098 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6099 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6100 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6101 if (warned_f_a_n)
6102 break;
6103
6104 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6105 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6106 warned_f_a_n = true;
6107 break;
6108
6109 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6110 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6111 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6112 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6113 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6114 if (warned_n_a_f)
6115 break;
6116
6117 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6118 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6119 warned_n_a_f = true;
6120 break;
6121
6122 default:
6123 gcc_unreachable ();
6124 }
6125 }
6126
6127 /* Expand the operands. */
6128 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6129 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6130
6131 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6132 after);
6133 }
6134
6135 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6136 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6137 true if this is the boolean form. TARGET is a place for us to store the
6138 results; this is NOT optional if IS_BOOL is true. */
6139
6140 static rtx
6141 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6142 bool is_bool, rtx target)
6143 {
6144 rtx old_val, new_val, mem;
6145 rtx *pbool, *poval;
6146
6147 /* Expand the operands. */
6148 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6149 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6150 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6151
6152 pbool = poval = NULL;
6153 if (target != const0_rtx)
6154 {
6155 if (is_bool)
6156 pbool = &target;
6157 else
6158 poval = &target;
6159 }
6160 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6161 false, MEMMODEL_SYNC_SEQ_CST,
6162 MEMMODEL_SYNC_SEQ_CST))
6163 return NULL_RTX;
6164
6165 return target;
6166 }
6167
6168 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6169 general form is actually an atomic exchange, and some targets only
6170 support a reduced form with the second argument being a constant 1.
6171 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6172 the results. */
6173
6174 static rtx
6175 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6176 rtx target)
6177 {
6178 rtx val, mem;
6179
6180 /* Expand the operands. */
6181 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6182 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6183
6184 return expand_sync_lock_test_and_set (target, mem, val);
6185 }
6186
6187 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6188
6189 static void
6190 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6191 {
6192 rtx mem;
6193
6194 /* Expand the operands. */
6195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6196
6197 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6198 }
6199
6200 /* Given an integer representing an ``enum memmodel'', verify its
6201 correctness and return the memory model enum. */
6202
6203 static enum memmodel
6204 get_memmodel (tree exp)
6205 {
6206 rtx op;
6207 unsigned HOST_WIDE_INT val;
6208 location_t loc
6209 = expansion_point_location_if_in_system_header (input_location);
6210
6211 /* If the parameter is not a constant, it's a run time value so we'll just
6212 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6213 if (TREE_CODE (exp) != INTEGER_CST)
6214 return MEMMODEL_SEQ_CST;
6215
6216 op = expand_normal (exp);
6217
6218 val = INTVAL (op);
6219 if (targetm.memmodel_check)
6220 val = targetm.memmodel_check (val);
6221 else if (val & ~MEMMODEL_MASK)
6222 {
6223 warning_at (loc, OPT_Winvalid_memory_model,
6224 "unknown architecture specifier in memory model to builtin");
6225 return MEMMODEL_SEQ_CST;
6226 }
6227
6228 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6229 if (memmodel_base (val) >= MEMMODEL_LAST)
6230 {
6231 warning_at (loc, OPT_Winvalid_memory_model,
6232 "invalid memory model argument to builtin");
6233 return MEMMODEL_SEQ_CST;
6234 }
6235
6236 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6237 be conservative and promote consume to acquire. */
6238 if (val == MEMMODEL_CONSUME)
6239 val = MEMMODEL_ACQUIRE;
6240
6241 return (enum memmodel) val;
6242 }
6243
6244 /* Expand the __atomic_exchange intrinsic:
6245 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6246 EXP is the CALL_EXPR.
6247 TARGET is an optional place for us to store the results. */
6248
6249 static rtx
6250 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6251 {
6252 rtx val, mem;
6253 enum memmodel model;
6254
6255 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6256
6257 if (!flag_inline_atomics)
6258 return NULL_RTX;
6259
6260 /* Expand the operands. */
6261 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6262 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6263
6264 return expand_atomic_exchange (target, mem, val, model);
6265 }
6266
6267 /* Expand the __atomic_compare_exchange intrinsic:
6268 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6269 TYPE desired, BOOL weak,
6270 enum memmodel success,
6271 enum memmodel failure)
6272 EXP is the CALL_EXPR.
6273 TARGET is an optional place for us to store the results. */
6274
6275 static rtx
6276 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6277 rtx target)
6278 {
6279 rtx expect, desired, mem, oldval;
6280 rtx_code_label *label;
6281 enum memmodel success, failure;
6282 tree weak;
6283 bool is_weak;
6284 location_t loc
6285 = expansion_point_location_if_in_system_header (input_location);
6286
6287 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6288 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6289
6290 if (failure > success)
6291 {
6292 warning_at (loc, OPT_Winvalid_memory_model,
6293 "failure memory model cannot be stronger than success "
6294 "memory model for %<__atomic_compare_exchange%>");
6295 success = MEMMODEL_SEQ_CST;
6296 }
6297
6298 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6299 {
6300 warning_at (loc, OPT_Winvalid_memory_model,
6301 "invalid failure memory model for "
6302 "%<__atomic_compare_exchange%>");
6303 failure = MEMMODEL_SEQ_CST;
6304 success = MEMMODEL_SEQ_CST;
6305 }
6306
6307
6308 if (!flag_inline_atomics)
6309 return NULL_RTX;
6310
6311 /* Expand the operands. */
6312 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6313
6314 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6315 expect = convert_memory_address (Pmode, expect);
6316 expect = gen_rtx_MEM (mode, expect);
6317 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6318
6319 weak = CALL_EXPR_ARG (exp, 3);
6320 is_weak = false;
6321 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6322 is_weak = true;
6323
6324 if (target == const0_rtx)
6325 target = NULL;
6326
6327 /* Lest the rtl backend create a race condition with an imporoper store
6328 to memory, always create a new pseudo for OLDVAL. */
6329 oldval = NULL;
6330
6331 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6332 is_weak, success, failure))
6333 return NULL_RTX;
6334
6335 /* Conditionally store back to EXPECT, lest we create a race condition
6336 with an improper store to memory. */
6337 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6338 the normal case where EXPECT is totally private, i.e. a register. At
6339 which point the store can be unconditional. */
6340 label = gen_label_rtx ();
6341 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6342 GET_MODE (target), 1, label);
6343 emit_move_insn (expect, oldval);
6344 emit_label (label);
6345
6346 return target;
6347 }
6348
6349 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6350 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6351 call. The weak parameter must be dropped to match the expected parameter
6352 list and the expected argument changed from value to pointer to memory
6353 slot. */
6354
6355 static void
6356 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6357 {
6358 unsigned int z;
6359 vec<tree, va_gc> *vec;
6360
6361 vec_alloc (vec, 5);
6362 vec->quick_push (gimple_call_arg (call, 0));
6363 tree expected = gimple_call_arg (call, 1);
6364 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6365 TREE_TYPE (expected));
6366 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6367 if (expd != x)
6368 emit_move_insn (x, expd);
6369 tree v = make_tree (TREE_TYPE (expected), x);
6370 vec->quick_push (build1 (ADDR_EXPR,
6371 build_pointer_type (TREE_TYPE (expected)), v));
6372 vec->quick_push (gimple_call_arg (call, 2));
6373 /* Skip the boolean weak parameter. */
6374 for (z = 4; z < 6; z++)
6375 vec->quick_push (gimple_call_arg (call, z));
6376 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6377 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6378 gcc_assert (bytes_log2 < 5);
6379 built_in_function fncode
6380 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6381 + bytes_log2);
6382 tree fndecl = builtin_decl_explicit (fncode);
6383 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6384 fndecl);
6385 tree exp = build_call_vec (boolean_type_node, fn, vec);
6386 tree lhs = gimple_call_lhs (call);
6387 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6388 if (lhs)
6389 {
6390 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6391 if (GET_MODE (boolret) != mode)
6392 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6393 x = force_reg (mode, x);
6394 write_complex_part (target, boolret, true);
6395 write_complex_part (target, x, false);
6396 }
6397 }
6398
6399 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6400
6401 void
6402 expand_ifn_atomic_compare_exchange (gcall *call)
6403 {
6404 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6405 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6406 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6407 rtx expect, desired, mem, oldval, boolret;
6408 enum memmodel success, failure;
6409 tree lhs;
6410 bool is_weak;
6411 location_t loc
6412 = expansion_point_location_if_in_system_header (gimple_location (call));
6413
6414 success = get_memmodel (gimple_call_arg (call, 4));
6415 failure = get_memmodel (gimple_call_arg (call, 5));
6416
6417 if (failure > success)
6418 {
6419 warning_at (loc, OPT_Winvalid_memory_model,
6420 "failure memory model cannot be stronger than success "
6421 "memory model for %<__atomic_compare_exchange%>");
6422 success = MEMMODEL_SEQ_CST;
6423 }
6424
6425 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6426 {
6427 warning_at (loc, OPT_Winvalid_memory_model,
6428 "invalid failure memory model for "
6429 "%<__atomic_compare_exchange%>");
6430 failure = MEMMODEL_SEQ_CST;
6431 success = MEMMODEL_SEQ_CST;
6432 }
6433
6434 if (!flag_inline_atomics)
6435 {
6436 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6437 return;
6438 }
6439
6440 /* Expand the operands. */
6441 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6442
6443 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6444 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6445
6446 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6447
6448 boolret = NULL;
6449 oldval = NULL;
6450
6451 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6452 is_weak, success, failure))
6453 {
6454 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6455 return;
6456 }
6457
6458 lhs = gimple_call_lhs (call);
6459 if (lhs)
6460 {
6461 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6462 if (GET_MODE (boolret) != mode)
6463 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6464 write_complex_part (target, boolret, true);
6465 write_complex_part (target, oldval, false);
6466 }
6467 }
6468
6469 /* Expand the __atomic_load intrinsic:
6470 TYPE __atomic_load (TYPE *object, enum memmodel)
6471 EXP is the CALL_EXPR.
6472 TARGET is an optional place for us to store the results. */
6473
6474 static rtx
6475 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6476 {
6477 rtx mem;
6478 enum memmodel model;
6479
6480 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6481 if (is_mm_release (model) || is_mm_acq_rel (model))
6482 {
6483 location_t loc
6484 = expansion_point_location_if_in_system_header (input_location);
6485 warning_at (loc, OPT_Winvalid_memory_model,
6486 "invalid memory model for %<__atomic_load%>");
6487 model = MEMMODEL_SEQ_CST;
6488 }
6489
6490 if (!flag_inline_atomics)
6491 return NULL_RTX;
6492
6493 /* Expand the operand. */
6494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6495
6496 return expand_atomic_load (target, mem, model);
6497 }
6498
6499
6500 /* Expand the __atomic_store intrinsic:
6501 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6502 EXP is the CALL_EXPR.
6503 TARGET is an optional place for us to store the results. */
6504
6505 static rtx
6506 expand_builtin_atomic_store (machine_mode mode, tree exp)
6507 {
6508 rtx mem, val;
6509 enum memmodel model;
6510
6511 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6512 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6513 || is_mm_release (model)))
6514 {
6515 location_t loc
6516 = expansion_point_location_if_in_system_header (input_location);
6517 warning_at (loc, OPT_Winvalid_memory_model,
6518 "invalid memory model for %<__atomic_store%>");
6519 model = MEMMODEL_SEQ_CST;
6520 }
6521
6522 if (!flag_inline_atomics)
6523 return NULL_RTX;
6524
6525 /* Expand the operands. */
6526 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6527 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6528
6529 return expand_atomic_store (mem, val, model, false);
6530 }
6531
6532 /* Expand the __atomic_fetch_XXX intrinsic:
6533 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6534 EXP is the CALL_EXPR.
6535 TARGET is an optional place for us to store the results.
6536 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6537 FETCH_AFTER is true if returning the result of the operation.
6538 FETCH_AFTER is false if returning the value before the operation.
6539 IGNORE is true if the result is not used.
6540 EXT_CALL is the correct builtin for an external call if this cannot be
6541 resolved to an instruction sequence. */
6542
6543 static rtx
6544 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6545 enum rtx_code code, bool fetch_after,
6546 bool ignore, enum built_in_function ext_call)
6547 {
6548 rtx val, mem, ret;
6549 enum memmodel model;
6550 tree fndecl;
6551 tree addr;
6552
6553 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6554
6555 /* Expand the operands. */
6556 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6557 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6558
6559 /* Only try generating instructions if inlining is turned on. */
6560 if (flag_inline_atomics)
6561 {
6562 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6563 if (ret)
6564 return ret;
6565 }
6566
6567 /* Return if a different routine isn't needed for the library call. */
6568 if (ext_call == BUILT_IN_NONE)
6569 return NULL_RTX;
6570
6571 /* Change the call to the specified function. */
6572 fndecl = get_callee_fndecl (exp);
6573 addr = CALL_EXPR_FN (exp);
6574 STRIP_NOPS (addr);
6575
6576 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6577 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6578
6579 /* If we will emit code after the call, the call cannot be a tail call.
6580 If it is emitted as a tail call, a barrier is emitted after it, and
6581 then all trailing code is removed. */
6582 if (!ignore)
6583 CALL_EXPR_TAILCALL (exp) = 0;
6584
6585 /* Expand the call here so we can emit trailing code. */
6586 ret = expand_call (exp, target, ignore);
6587
6588 /* Replace the original function just in case it matters. */
6589 TREE_OPERAND (addr, 0) = fndecl;
6590
6591 /* Then issue the arithmetic correction to return the right result. */
6592 if (!ignore)
6593 {
6594 if (code == NOT)
6595 {
6596 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6597 OPTAB_LIB_WIDEN);
6598 ret = expand_simple_unop (mode, NOT, ret, target, true);
6599 }
6600 else
6601 ret = expand_simple_binop (mode, code, ret, val, target, true,
6602 OPTAB_LIB_WIDEN);
6603 }
6604 return ret;
6605 }
6606
6607 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6608
6609 void
6610 expand_ifn_atomic_bit_test_and (gcall *call)
6611 {
6612 tree ptr = gimple_call_arg (call, 0);
6613 tree bit = gimple_call_arg (call, 1);
6614 tree flag = gimple_call_arg (call, 2);
6615 tree lhs = gimple_call_lhs (call);
6616 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6617 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6618 enum rtx_code code;
6619 optab optab;
6620 class expand_operand ops[5];
6621
6622 gcc_assert (flag_inline_atomics);
6623
6624 if (gimple_call_num_args (call) == 4)
6625 model = get_memmodel (gimple_call_arg (call, 3));
6626
6627 rtx mem = get_builtin_sync_mem (ptr, mode);
6628 rtx val = expand_expr_force_mode (bit, mode);
6629
6630 switch (gimple_call_internal_fn (call))
6631 {
6632 case IFN_ATOMIC_BIT_TEST_AND_SET:
6633 code = IOR;
6634 optab = atomic_bit_test_and_set_optab;
6635 break;
6636 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6637 code = XOR;
6638 optab = atomic_bit_test_and_complement_optab;
6639 break;
6640 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6641 code = AND;
6642 optab = atomic_bit_test_and_reset_optab;
6643 break;
6644 default:
6645 gcc_unreachable ();
6646 }
6647
6648 if (lhs == NULL_TREE)
6649 {
6650 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6651 val, NULL_RTX, true, OPTAB_DIRECT);
6652 if (code == AND)
6653 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6654 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6655 return;
6656 }
6657
6658 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6659 enum insn_code icode = direct_optab_handler (optab, mode);
6660 gcc_assert (icode != CODE_FOR_nothing);
6661 create_output_operand (&ops[0], target, mode);
6662 create_fixed_operand (&ops[1], mem);
6663 create_convert_operand_to (&ops[2], val, mode, true);
6664 create_integer_operand (&ops[3], model);
6665 create_integer_operand (&ops[4], integer_onep (flag));
6666 if (maybe_expand_insn (icode, 5, ops))
6667 return;
6668
6669 rtx bitval = val;
6670 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6671 val, NULL_RTX, true, OPTAB_DIRECT);
6672 rtx maskval = val;
6673 if (code == AND)
6674 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6675 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6676 code, model, false);
6677 if (integer_onep (flag))
6678 {
6679 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6680 NULL_RTX, true, OPTAB_DIRECT);
6681 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6682 true, OPTAB_DIRECT);
6683 }
6684 else
6685 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6686 OPTAB_DIRECT);
6687 if (result != target)
6688 emit_move_insn (target, result);
6689 }
6690
6691 /* Expand an atomic clear operation.
6692 void _atomic_clear (BOOL *obj, enum memmodel)
6693 EXP is the call expression. */
6694
6695 static rtx
6696 expand_builtin_atomic_clear (tree exp)
6697 {
6698 machine_mode mode;
6699 rtx mem, ret;
6700 enum memmodel model;
6701
6702 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6703 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6704 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6705
6706 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6707 {
6708 location_t loc
6709 = expansion_point_location_if_in_system_header (input_location);
6710 warning_at (loc, OPT_Winvalid_memory_model,
6711 "invalid memory model for %<__atomic_store%>");
6712 model = MEMMODEL_SEQ_CST;
6713 }
6714
6715 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6716 Failing that, a store is issued by __atomic_store. The only way this can
6717 fail is if the bool type is larger than a word size. Unlikely, but
6718 handle it anyway for completeness. Assume a single threaded model since
6719 there is no atomic support in this case, and no barriers are required. */
6720 ret = expand_atomic_store (mem, const0_rtx, model, true);
6721 if (!ret)
6722 emit_move_insn (mem, const0_rtx);
6723 return const0_rtx;
6724 }
6725
6726 /* Expand an atomic test_and_set operation.
6727 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6728 EXP is the call expression. */
6729
6730 static rtx
6731 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6732 {
6733 rtx mem;
6734 enum memmodel model;
6735 machine_mode mode;
6736
6737 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6738 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6739 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6740
6741 return expand_atomic_test_and_set (target, mem, model);
6742 }
6743
6744
6745 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6746 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6747
6748 static tree
6749 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6750 {
6751 int size;
6752 machine_mode mode;
6753 unsigned int mode_align, type_align;
6754
6755 if (TREE_CODE (arg0) != INTEGER_CST)
6756 return NULL_TREE;
6757
6758 /* We need a corresponding integer mode for the access to be lock-free. */
6759 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6760 if (!int_mode_for_size (size, 0).exists (&mode))
6761 return boolean_false_node;
6762
6763 mode_align = GET_MODE_ALIGNMENT (mode);
6764
6765 if (TREE_CODE (arg1) == INTEGER_CST)
6766 {
6767 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6768
6769 /* Either this argument is null, or it's a fake pointer encoding
6770 the alignment of the object. */
6771 val = least_bit_hwi (val);
6772 val *= BITS_PER_UNIT;
6773
6774 if (val == 0 || mode_align < val)
6775 type_align = mode_align;
6776 else
6777 type_align = val;
6778 }
6779 else
6780 {
6781 tree ttype = TREE_TYPE (arg1);
6782
6783 /* This function is usually invoked and folded immediately by the front
6784 end before anything else has a chance to look at it. The pointer
6785 parameter at this point is usually cast to a void *, so check for that
6786 and look past the cast. */
6787 if (CONVERT_EXPR_P (arg1)
6788 && POINTER_TYPE_P (ttype)
6789 && VOID_TYPE_P (TREE_TYPE (ttype))
6790 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6791 arg1 = TREE_OPERAND (arg1, 0);
6792
6793 ttype = TREE_TYPE (arg1);
6794 gcc_assert (POINTER_TYPE_P (ttype));
6795
6796 /* Get the underlying type of the object. */
6797 ttype = TREE_TYPE (ttype);
6798 type_align = TYPE_ALIGN (ttype);
6799 }
6800
6801 /* If the object has smaller alignment, the lock free routines cannot
6802 be used. */
6803 if (type_align < mode_align)
6804 return boolean_false_node;
6805
6806 /* Check if a compare_and_swap pattern exists for the mode which represents
6807 the required size. The pattern is not allowed to fail, so the existence
6808 of the pattern indicates support is present. Also require that an
6809 atomic load exists for the required size. */
6810 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6811 return boolean_true_node;
6812 else
6813 return boolean_false_node;
6814 }
6815
6816 /* Return true if the parameters to call EXP represent an object which will
6817 always generate lock free instructions. The first argument represents the
6818 size of the object, and the second parameter is a pointer to the object
6819 itself. If NULL is passed for the object, then the result is based on
6820 typical alignment for an object of the specified size. Otherwise return
6821 false. */
6822
6823 static rtx
6824 expand_builtin_atomic_always_lock_free (tree exp)
6825 {
6826 tree size;
6827 tree arg0 = CALL_EXPR_ARG (exp, 0);
6828 tree arg1 = CALL_EXPR_ARG (exp, 1);
6829
6830 if (TREE_CODE (arg0) != INTEGER_CST)
6831 {
6832 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6833 return const0_rtx;
6834 }
6835
6836 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6837 if (size == boolean_true_node)
6838 return const1_rtx;
6839 return const0_rtx;
6840 }
6841
6842 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6843 is lock free on this architecture. */
6844
6845 static tree
6846 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6847 {
6848 if (!flag_inline_atomics)
6849 return NULL_TREE;
6850
6851 /* If it isn't always lock free, don't generate a result. */
6852 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6853 return boolean_true_node;
6854
6855 return NULL_TREE;
6856 }
6857
6858 /* Return true if the parameters to call EXP represent an object which will
6859 always generate lock free instructions. The first argument represents the
6860 size of the object, and the second parameter is a pointer to the object
6861 itself. If NULL is passed for the object, then the result is based on
6862 typical alignment for an object of the specified size. Otherwise return
6863 NULL*/
6864
6865 static rtx
6866 expand_builtin_atomic_is_lock_free (tree exp)
6867 {
6868 tree size;
6869 tree arg0 = CALL_EXPR_ARG (exp, 0);
6870 tree arg1 = CALL_EXPR_ARG (exp, 1);
6871
6872 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6873 {
6874 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6875 return NULL_RTX;
6876 }
6877
6878 if (!flag_inline_atomics)
6879 return NULL_RTX;
6880
6881 /* If the value is known at compile time, return the RTX for it. */
6882 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6883 if (size == boolean_true_node)
6884 return const1_rtx;
6885
6886 return NULL_RTX;
6887 }
6888
6889 /* Expand the __atomic_thread_fence intrinsic:
6890 void __atomic_thread_fence (enum memmodel)
6891 EXP is the CALL_EXPR. */
6892
6893 static void
6894 expand_builtin_atomic_thread_fence (tree exp)
6895 {
6896 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6897 expand_mem_thread_fence (model);
6898 }
6899
6900 /* Expand the __atomic_signal_fence intrinsic:
6901 void __atomic_signal_fence (enum memmodel)
6902 EXP is the CALL_EXPR. */
6903
6904 static void
6905 expand_builtin_atomic_signal_fence (tree exp)
6906 {
6907 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6908 expand_mem_signal_fence (model);
6909 }
6910
6911 /* Expand the __sync_synchronize intrinsic. */
6912
6913 static void
6914 expand_builtin_sync_synchronize (void)
6915 {
6916 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6917 }
6918
6919 static rtx
6920 expand_builtin_thread_pointer (tree exp, rtx target)
6921 {
6922 enum insn_code icode;
6923 if (!validate_arglist (exp, VOID_TYPE))
6924 return const0_rtx;
6925 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6926 if (icode != CODE_FOR_nothing)
6927 {
6928 class expand_operand op;
6929 /* If the target is not sutitable then create a new target. */
6930 if (target == NULL_RTX
6931 || !REG_P (target)
6932 || GET_MODE (target) != Pmode)
6933 target = gen_reg_rtx (Pmode);
6934 create_output_operand (&op, target, Pmode);
6935 expand_insn (icode, 1, &op);
6936 return target;
6937 }
6938 error ("%<__builtin_thread_pointer%> is not supported on this target");
6939 return const0_rtx;
6940 }
6941
6942 static void
6943 expand_builtin_set_thread_pointer (tree exp)
6944 {
6945 enum insn_code icode;
6946 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6947 return;
6948 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6949 if (icode != CODE_FOR_nothing)
6950 {
6951 class expand_operand op;
6952 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6953 Pmode, EXPAND_NORMAL);
6954 create_input_operand (&op, val, Pmode);
6955 expand_insn (icode, 1, &op);
6956 return;
6957 }
6958 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6959 }
6960
6961 \f
6962 /* Emit code to restore the current value of stack. */
6963
6964 static void
6965 expand_stack_restore (tree var)
6966 {
6967 rtx_insn *prev;
6968 rtx sa = expand_normal (var);
6969
6970 sa = convert_memory_address (Pmode, sa);
6971
6972 prev = get_last_insn ();
6973 emit_stack_restore (SAVE_BLOCK, sa);
6974
6975 record_new_stack_level ();
6976
6977 fixup_args_size_notes (prev, get_last_insn (), 0);
6978 }
6979
6980 /* Emit code to save the current value of stack. */
6981
6982 static rtx
6983 expand_stack_save (void)
6984 {
6985 rtx ret = NULL_RTX;
6986
6987 emit_stack_save (SAVE_BLOCK, &ret);
6988 return ret;
6989 }
6990
6991 /* Emit code to get the openacc gang, worker or vector id or size. */
6992
6993 static rtx
6994 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6995 {
6996 const char *name;
6997 rtx fallback_retval;
6998 rtx_insn *(*gen_fn) (rtx, rtx);
6999 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7000 {
7001 case BUILT_IN_GOACC_PARLEVEL_ID:
7002 name = "__builtin_goacc_parlevel_id";
7003 fallback_retval = const0_rtx;
7004 gen_fn = targetm.gen_oacc_dim_pos;
7005 break;
7006 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7007 name = "__builtin_goacc_parlevel_size";
7008 fallback_retval = const1_rtx;
7009 gen_fn = targetm.gen_oacc_dim_size;
7010 break;
7011 default:
7012 gcc_unreachable ();
7013 }
7014
7015 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7016 {
7017 error ("%qs only supported in OpenACC code", name);
7018 return const0_rtx;
7019 }
7020
7021 tree arg = CALL_EXPR_ARG (exp, 0);
7022 if (TREE_CODE (arg) != INTEGER_CST)
7023 {
7024 error ("non-constant argument 0 to %qs", name);
7025 return const0_rtx;
7026 }
7027
7028 int dim = TREE_INT_CST_LOW (arg);
7029 switch (dim)
7030 {
7031 case GOMP_DIM_GANG:
7032 case GOMP_DIM_WORKER:
7033 case GOMP_DIM_VECTOR:
7034 break;
7035 default:
7036 error ("illegal argument 0 to %qs", name);
7037 return const0_rtx;
7038 }
7039
7040 if (ignore)
7041 return target;
7042
7043 if (target == NULL_RTX)
7044 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7045
7046 if (!targetm.have_oacc_dim_size ())
7047 {
7048 emit_move_insn (target, fallback_retval);
7049 return target;
7050 }
7051
7052 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7053 emit_insn (gen_fn (reg, GEN_INT (dim)));
7054 if (reg != target)
7055 emit_move_insn (target, reg);
7056
7057 return target;
7058 }
7059
7060 /* Expand a string compare operation using a sequence of char comparison
7061 to get rid of the calling overhead, with result going to TARGET if
7062 that's convenient.
7063
7064 VAR_STR is the variable string source;
7065 CONST_STR is the constant string source;
7066 LENGTH is the number of chars to compare;
7067 CONST_STR_N indicates which source string is the constant string;
7068 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7069
7070 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7071
7072 target = (int) (unsigned char) var_str[0]
7073 - (int) (unsigned char) const_str[0];
7074 if (target != 0)
7075 goto ne_label;
7076 ...
7077 target = (int) (unsigned char) var_str[length - 2]
7078 - (int) (unsigned char) const_str[length - 2];
7079 if (target != 0)
7080 goto ne_label;
7081 target = (int) (unsigned char) var_str[length - 1]
7082 - (int) (unsigned char) const_str[length - 1];
7083 ne_label:
7084 */
7085
7086 static rtx
7087 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7088 unsigned HOST_WIDE_INT length,
7089 int const_str_n, machine_mode mode)
7090 {
7091 HOST_WIDE_INT offset = 0;
7092 rtx var_rtx_array
7093 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7094 rtx var_rtx = NULL_RTX;
7095 rtx const_rtx = NULL_RTX;
7096 rtx result = target ? target : gen_reg_rtx (mode);
7097 rtx_code_label *ne_label = gen_label_rtx ();
7098 tree unit_type_node = unsigned_char_type_node;
7099 scalar_int_mode unit_mode
7100 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7101
7102 start_sequence ();
7103
7104 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7105 {
7106 var_rtx
7107 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7108 const_rtx = c_readstr (const_str + offset, unit_mode);
7109 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7110 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7111
7112 op0 = convert_modes (mode, unit_mode, op0, 1);
7113 op1 = convert_modes (mode, unit_mode, op1, 1);
7114 result = expand_simple_binop (mode, MINUS, op0, op1,
7115 result, 1, OPTAB_WIDEN);
7116 if (i < length - 1)
7117 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7118 mode, true, ne_label);
7119 offset += GET_MODE_SIZE (unit_mode);
7120 }
7121
7122 emit_label (ne_label);
7123 rtx_insn *insns = get_insns ();
7124 end_sequence ();
7125 emit_insn (insns);
7126
7127 return result;
7128 }
7129
7130 /* Inline expansion a call to str(n)cmp, with result going to
7131 TARGET if that's convenient.
7132 If the call is not been inlined, return NULL_RTX. */
7133 static rtx
7134 inline_expand_builtin_string_cmp (tree exp, rtx target)
7135 {
7136 tree fndecl = get_callee_fndecl (exp);
7137 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7138 unsigned HOST_WIDE_INT length = 0;
7139 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7140
7141 /* Do NOT apply this inlining expansion when optimizing for size or
7142 optimization level below 2. */
7143 if (optimize < 2 || optimize_insn_for_size_p ())
7144 return NULL_RTX;
7145
7146 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7147 || fcode == BUILT_IN_STRNCMP
7148 || fcode == BUILT_IN_MEMCMP);
7149
7150 /* On a target where the type of the call (int) has same or narrower presicion
7151 than unsigned char, give up the inlining expansion. */
7152 if (TYPE_PRECISION (unsigned_char_type_node)
7153 >= TYPE_PRECISION (TREE_TYPE (exp)))
7154 return NULL_RTX;
7155
7156 tree arg1 = CALL_EXPR_ARG (exp, 0);
7157 tree arg2 = CALL_EXPR_ARG (exp, 1);
7158 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7159
7160 unsigned HOST_WIDE_INT len1 = 0;
7161 unsigned HOST_WIDE_INT len2 = 0;
7162 unsigned HOST_WIDE_INT len3 = 0;
7163
7164 const char *src_str1 = c_getstr (arg1, &len1);
7165 const char *src_str2 = c_getstr (arg2, &len2);
7166
7167 /* If neither strings is constant string, the call is not qualify. */
7168 if (!src_str1 && !src_str2)
7169 return NULL_RTX;
7170
7171 /* For strncmp, if the length is not a const, not qualify. */
7172 if (is_ncmp)
7173 {
7174 if (!tree_fits_uhwi_p (len3_tree))
7175 return NULL_RTX;
7176 else
7177 len3 = tree_to_uhwi (len3_tree);
7178 }
7179
7180 if (src_str1 != NULL)
7181 len1 = strnlen (src_str1, len1) + 1;
7182
7183 if (src_str2 != NULL)
7184 len2 = strnlen (src_str2, len2) + 1;
7185
7186 int const_str_n = 0;
7187 if (!len1)
7188 const_str_n = 2;
7189 else if (!len2)
7190 const_str_n = 1;
7191 else if (len2 > len1)
7192 const_str_n = 1;
7193 else
7194 const_str_n = 2;
7195
7196 gcc_checking_assert (const_str_n > 0);
7197 length = (const_str_n == 1) ? len1 : len2;
7198
7199 if (is_ncmp && len3 < length)
7200 length = len3;
7201
7202 /* If the length of the comparision is larger than the threshold,
7203 do nothing. */
7204 if (length > (unsigned HOST_WIDE_INT)
7205 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7206 return NULL_RTX;
7207
7208 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7209
7210 /* Now, start inline expansion the call. */
7211 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7212 (const_str_n == 1) ? src_str1 : src_str2, length,
7213 const_str_n, mode);
7214 }
7215
7216 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7217 represents the size of the first argument to that call, or VOIDmode
7218 if the argument is a pointer. IGNORE will be true if the result
7219 isn't used. */
7220 static rtx
7221 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7222 bool ignore)
7223 {
7224 rtx val, failsafe;
7225 unsigned nargs = call_expr_nargs (exp);
7226
7227 tree arg0 = CALL_EXPR_ARG (exp, 0);
7228
7229 if (mode == VOIDmode)
7230 {
7231 mode = TYPE_MODE (TREE_TYPE (arg0));
7232 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7233 }
7234
7235 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7236
7237 /* An optional second argument can be used as a failsafe value on
7238 some machines. If it isn't present, then the failsafe value is
7239 assumed to be 0. */
7240 if (nargs > 1)
7241 {
7242 tree arg1 = CALL_EXPR_ARG (exp, 1);
7243 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7244 }
7245 else
7246 failsafe = const0_rtx;
7247
7248 /* If the result isn't used, the behavior is undefined. It would be
7249 nice to emit a warning here, but path splitting means this might
7250 happen with legitimate code. So simply drop the builtin
7251 expansion in that case; we've handled any side-effects above. */
7252 if (ignore)
7253 return const0_rtx;
7254
7255 /* If we don't have a suitable target, create one to hold the result. */
7256 if (target == NULL || GET_MODE (target) != mode)
7257 target = gen_reg_rtx (mode);
7258
7259 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7260 val = convert_modes (mode, VOIDmode, val, false);
7261
7262 return targetm.speculation_safe_value (mode, target, val, failsafe);
7263 }
7264
7265 /* Expand an expression EXP that calls a built-in function,
7266 with result going to TARGET if that's convenient
7267 (and in mode MODE if that's convenient).
7268 SUBTARGET may be used as the target for computing one of EXP's operands.
7269 IGNORE is nonzero if the value is to be ignored. */
7270
7271 rtx
7272 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7273 int ignore)
7274 {
7275 tree fndecl = get_callee_fndecl (exp);
7276 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7277 int flags;
7278
7279 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7280 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7281
7282 /* When ASan is enabled, we don't want to expand some memory/string
7283 builtins and rely on libsanitizer's hooks. This allows us to avoid
7284 redundant checks and be sure, that possible overflow will be detected
7285 by ASan. */
7286
7287 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7288 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7289 return expand_call (exp, target, ignore);
7290
7291 /* When not optimizing, generate calls to library functions for a certain
7292 set of builtins. */
7293 if (!optimize
7294 && !called_as_built_in (fndecl)
7295 && fcode != BUILT_IN_FORK
7296 && fcode != BUILT_IN_EXECL
7297 && fcode != BUILT_IN_EXECV
7298 && fcode != BUILT_IN_EXECLP
7299 && fcode != BUILT_IN_EXECLE
7300 && fcode != BUILT_IN_EXECVP
7301 && fcode != BUILT_IN_EXECVE
7302 && !ALLOCA_FUNCTION_CODE_P (fcode)
7303 && fcode != BUILT_IN_FREE)
7304 return expand_call (exp, target, ignore);
7305
7306 /* The built-in function expanders test for target == const0_rtx
7307 to determine whether the function's result will be ignored. */
7308 if (ignore)
7309 target = const0_rtx;
7310
7311 /* If the result of a pure or const built-in function is ignored, and
7312 none of its arguments are volatile, we can avoid expanding the
7313 built-in call and just evaluate the arguments for side-effects. */
7314 if (target == const0_rtx
7315 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7316 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7317 {
7318 bool volatilep = false;
7319 tree arg;
7320 call_expr_arg_iterator iter;
7321
7322 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7323 if (TREE_THIS_VOLATILE (arg))
7324 {
7325 volatilep = true;
7326 break;
7327 }
7328
7329 if (! volatilep)
7330 {
7331 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7332 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7333 return const0_rtx;
7334 }
7335 }
7336
7337 switch (fcode)
7338 {
7339 CASE_FLT_FN (BUILT_IN_FABS):
7340 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7341 case BUILT_IN_FABSD32:
7342 case BUILT_IN_FABSD64:
7343 case BUILT_IN_FABSD128:
7344 target = expand_builtin_fabs (exp, target, subtarget);
7345 if (target)
7346 return target;
7347 break;
7348
7349 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7350 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7351 target = expand_builtin_copysign (exp, target, subtarget);
7352 if (target)
7353 return target;
7354 break;
7355
7356 /* Just do a normal library call if we were unable to fold
7357 the values. */
7358 CASE_FLT_FN (BUILT_IN_CABS):
7359 break;
7360
7361 CASE_FLT_FN (BUILT_IN_FMA):
7362 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7363 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7364 if (target)
7365 return target;
7366 break;
7367
7368 CASE_FLT_FN (BUILT_IN_ILOGB):
7369 if (! flag_unsafe_math_optimizations)
7370 break;
7371 gcc_fallthrough ();
7372 CASE_FLT_FN (BUILT_IN_ISINF):
7373 CASE_FLT_FN (BUILT_IN_FINITE):
7374 case BUILT_IN_ISFINITE:
7375 case BUILT_IN_ISNORMAL:
7376 target = expand_builtin_interclass_mathfn (exp, target);
7377 if (target)
7378 return target;
7379 break;
7380
7381 CASE_FLT_FN (BUILT_IN_ICEIL):
7382 CASE_FLT_FN (BUILT_IN_LCEIL):
7383 CASE_FLT_FN (BUILT_IN_LLCEIL):
7384 CASE_FLT_FN (BUILT_IN_LFLOOR):
7385 CASE_FLT_FN (BUILT_IN_IFLOOR):
7386 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7387 target = expand_builtin_int_roundingfn (exp, target);
7388 if (target)
7389 return target;
7390 break;
7391
7392 CASE_FLT_FN (BUILT_IN_IRINT):
7393 CASE_FLT_FN (BUILT_IN_LRINT):
7394 CASE_FLT_FN (BUILT_IN_LLRINT):
7395 CASE_FLT_FN (BUILT_IN_IROUND):
7396 CASE_FLT_FN (BUILT_IN_LROUND):
7397 CASE_FLT_FN (BUILT_IN_LLROUND):
7398 target = expand_builtin_int_roundingfn_2 (exp, target);
7399 if (target)
7400 return target;
7401 break;
7402
7403 CASE_FLT_FN (BUILT_IN_POWI):
7404 target = expand_builtin_powi (exp, target);
7405 if (target)
7406 return target;
7407 break;
7408
7409 CASE_FLT_FN (BUILT_IN_CEXPI):
7410 target = expand_builtin_cexpi (exp, target);
7411 gcc_assert (target);
7412 return target;
7413
7414 CASE_FLT_FN (BUILT_IN_SIN):
7415 CASE_FLT_FN (BUILT_IN_COS):
7416 if (! flag_unsafe_math_optimizations)
7417 break;
7418 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7419 if (target)
7420 return target;
7421 break;
7422
7423 CASE_FLT_FN (BUILT_IN_SINCOS):
7424 if (! flag_unsafe_math_optimizations)
7425 break;
7426 target = expand_builtin_sincos (exp);
7427 if (target)
7428 return target;
7429 break;
7430
7431 case BUILT_IN_APPLY_ARGS:
7432 return expand_builtin_apply_args ();
7433
7434 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7435 FUNCTION with a copy of the parameters described by
7436 ARGUMENTS, and ARGSIZE. It returns a block of memory
7437 allocated on the stack into which is stored all the registers
7438 that might possibly be used for returning the result of a
7439 function. ARGUMENTS is the value returned by
7440 __builtin_apply_args. ARGSIZE is the number of bytes of
7441 arguments that must be copied. ??? How should this value be
7442 computed? We'll also need a safe worst case value for varargs
7443 functions. */
7444 case BUILT_IN_APPLY:
7445 if (!validate_arglist (exp, POINTER_TYPE,
7446 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7447 && !validate_arglist (exp, REFERENCE_TYPE,
7448 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7449 return const0_rtx;
7450 else
7451 {
7452 rtx ops[3];
7453
7454 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7455 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7456 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7457
7458 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7459 }
7460
7461 /* __builtin_return (RESULT) causes the function to return the
7462 value described by RESULT. RESULT is address of the block of
7463 memory returned by __builtin_apply. */
7464 case BUILT_IN_RETURN:
7465 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7466 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7467 return const0_rtx;
7468
7469 case BUILT_IN_SAVEREGS:
7470 return expand_builtin_saveregs ();
7471
7472 case BUILT_IN_VA_ARG_PACK:
7473 /* All valid uses of __builtin_va_arg_pack () are removed during
7474 inlining. */
7475 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7476 return const0_rtx;
7477
7478 case BUILT_IN_VA_ARG_PACK_LEN:
7479 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7480 inlining. */
7481 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7482 return const0_rtx;
7483
7484 /* Return the address of the first anonymous stack arg. */
7485 case BUILT_IN_NEXT_ARG:
7486 if (fold_builtin_next_arg (exp, false))
7487 return const0_rtx;
7488 return expand_builtin_next_arg ();
7489
7490 case BUILT_IN_CLEAR_CACHE:
7491 target = expand_builtin___clear_cache (exp);
7492 if (target)
7493 return target;
7494 break;
7495
7496 case BUILT_IN_CLASSIFY_TYPE:
7497 return expand_builtin_classify_type (exp);
7498
7499 case BUILT_IN_CONSTANT_P:
7500 return const0_rtx;
7501
7502 case BUILT_IN_FRAME_ADDRESS:
7503 case BUILT_IN_RETURN_ADDRESS:
7504 return expand_builtin_frame_address (fndecl, exp);
7505
7506 /* Returns the address of the area where the structure is returned.
7507 0 otherwise. */
7508 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7509 if (call_expr_nargs (exp) != 0
7510 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7511 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7512 return const0_rtx;
7513 else
7514 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7515
7516 CASE_BUILT_IN_ALLOCA:
7517 target = expand_builtin_alloca (exp);
7518 if (target)
7519 return target;
7520 break;
7521
7522 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7523 return expand_asan_emit_allocas_unpoison (exp);
7524
7525 case BUILT_IN_STACK_SAVE:
7526 return expand_stack_save ();
7527
7528 case BUILT_IN_STACK_RESTORE:
7529 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7530 return const0_rtx;
7531
7532 case BUILT_IN_BSWAP16:
7533 case BUILT_IN_BSWAP32:
7534 case BUILT_IN_BSWAP64:
7535 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7536 if (target)
7537 return target;
7538 break;
7539
7540 CASE_INT_FN (BUILT_IN_FFS):
7541 target = expand_builtin_unop (target_mode, exp, target,
7542 subtarget, ffs_optab);
7543 if (target)
7544 return target;
7545 break;
7546
7547 CASE_INT_FN (BUILT_IN_CLZ):
7548 target = expand_builtin_unop (target_mode, exp, target,
7549 subtarget, clz_optab);
7550 if (target)
7551 return target;
7552 break;
7553
7554 CASE_INT_FN (BUILT_IN_CTZ):
7555 target = expand_builtin_unop (target_mode, exp, target,
7556 subtarget, ctz_optab);
7557 if (target)
7558 return target;
7559 break;
7560
7561 CASE_INT_FN (BUILT_IN_CLRSB):
7562 target = expand_builtin_unop (target_mode, exp, target,
7563 subtarget, clrsb_optab);
7564 if (target)
7565 return target;
7566 break;
7567
7568 CASE_INT_FN (BUILT_IN_POPCOUNT):
7569 target = expand_builtin_unop (target_mode, exp, target,
7570 subtarget, popcount_optab);
7571 if (target)
7572 return target;
7573 break;
7574
7575 CASE_INT_FN (BUILT_IN_PARITY):
7576 target = expand_builtin_unop (target_mode, exp, target,
7577 subtarget, parity_optab);
7578 if (target)
7579 return target;
7580 break;
7581
7582 case BUILT_IN_STRLEN:
7583 target = expand_builtin_strlen (exp, target, target_mode);
7584 if (target)
7585 return target;
7586 break;
7587
7588 case BUILT_IN_STRNLEN:
7589 target = expand_builtin_strnlen (exp, target, target_mode);
7590 if (target)
7591 return target;
7592 break;
7593
7594 case BUILT_IN_STRCAT:
7595 target = expand_builtin_strcat (exp, target);
7596 if (target)
7597 return target;
7598 break;
7599
7600 case BUILT_IN_STRCPY:
7601 target = expand_builtin_strcpy (exp, target);
7602 if (target)
7603 return target;
7604 break;
7605
7606 case BUILT_IN_STRNCAT:
7607 target = expand_builtin_strncat (exp, target);
7608 if (target)
7609 return target;
7610 break;
7611
7612 case BUILT_IN_STRNCPY:
7613 target = expand_builtin_strncpy (exp, target);
7614 if (target)
7615 return target;
7616 break;
7617
7618 case BUILT_IN_STPCPY:
7619 target = expand_builtin_stpcpy (exp, target, mode);
7620 if (target)
7621 return target;
7622 break;
7623
7624 case BUILT_IN_STPNCPY:
7625 target = expand_builtin_stpncpy (exp, target);
7626 if (target)
7627 return target;
7628 break;
7629
7630 case BUILT_IN_MEMCHR:
7631 target = expand_builtin_memchr (exp, target);
7632 if (target)
7633 return target;
7634 break;
7635
7636 case BUILT_IN_MEMCPY:
7637 target = expand_builtin_memcpy (exp, target);
7638 if (target)
7639 return target;
7640 break;
7641
7642 case BUILT_IN_MEMMOVE:
7643 target = expand_builtin_memmove (exp, target);
7644 if (target)
7645 return target;
7646 break;
7647
7648 case BUILT_IN_MEMPCPY:
7649 target = expand_builtin_mempcpy (exp, target);
7650 if (target)
7651 return target;
7652 break;
7653
7654 case BUILT_IN_MEMSET:
7655 target = expand_builtin_memset (exp, target, mode);
7656 if (target)
7657 return target;
7658 break;
7659
7660 case BUILT_IN_BZERO:
7661 target = expand_builtin_bzero (exp);
7662 if (target)
7663 return target;
7664 break;
7665
7666 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7667 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7668 when changing it to a strcmp call. */
7669 case BUILT_IN_STRCMP_EQ:
7670 target = expand_builtin_memcmp (exp, target, true);
7671 if (target)
7672 return target;
7673
7674 /* Change this call back to a BUILT_IN_STRCMP. */
7675 TREE_OPERAND (exp, 1)
7676 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7677
7678 /* Delete the last parameter. */
7679 unsigned int i;
7680 vec<tree, va_gc> *arg_vec;
7681 vec_alloc (arg_vec, 2);
7682 for (i = 0; i < 2; i++)
7683 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7684 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7685 /* FALLTHROUGH */
7686
7687 case BUILT_IN_STRCMP:
7688 target = expand_builtin_strcmp (exp, target);
7689 if (target)
7690 return target;
7691 break;
7692
7693 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7694 back to a BUILT_IN_STRNCMP. */
7695 case BUILT_IN_STRNCMP_EQ:
7696 target = expand_builtin_memcmp (exp, target, true);
7697 if (target)
7698 return target;
7699
7700 /* Change it back to a BUILT_IN_STRNCMP. */
7701 TREE_OPERAND (exp, 1)
7702 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7703 /* FALLTHROUGH */
7704
7705 case BUILT_IN_STRNCMP:
7706 target = expand_builtin_strncmp (exp, target, mode);
7707 if (target)
7708 return target;
7709 break;
7710
7711 case BUILT_IN_BCMP:
7712 case BUILT_IN_MEMCMP:
7713 case BUILT_IN_MEMCMP_EQ:
7714 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7715 if (target)
7716 return target;
7717 if (fcode == BUILT_IN_MEMCMP_EQ)
7718 {
7719 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7720 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7721 }
7722 break;
7723
7724 case BUILT_IN_SETJMP:
7725 /* This should have been lowered to the builtins below. */
7726 gcc_unreachable ();
7727
7728 case BUILT_IN_SETJMP_SETUP:
7729 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7730 and the receiver label. */
7731 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7732 {
7733 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7734 VOIDmode, EXPAND_NORMAL);
7735 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7736 rtx_insn *label_r = label_rtx (label);
7737
7738 /* This is copied from the handling of non-local gotos. */
7739 expand_builtin_setjmp_setup (buf_addr, label_r);
7740 nonlocal_goto_handler_labels
7741 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7742 nonlocal_goto_handler_labels);
7743 /* ??? Do not let expand_label treat us as such since we would
7744 not want to be both on the list of non-local labels and on
7745 the list of forced labels. */
7746 FORCED_LABEL (label) = 0;
7747 return const0_rtx;
7748 }
7749 break;
7750
7751 case BUILT_IN_SETJMP_RECEIVER:
7752 /* __builtin_setjmp_receiver is passed the receiver label. */
7753 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7754 {
7755 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7756 rtx_insn *label_r = label_rtx (label);
7757
7758 expand_builtin_setjmp_receiver (label_r);
7759 return const0_rtx;
7760 }
7761 break;
7762
7763 /* __builtin_longjmp is passed a pointer to an array of five words.
7764 It's similar to the C library longjmp function but works with
7765 __builtin_setjmp above. */
7766 case BUILT_IN_LONGJMP:
7767 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7768 {
7769 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7770 VOIDmode, EXPAND_NORMAL);
7771 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7772
7773 if (value != const1_rtx)
7774 {
7775 error ("%<__builtin_longjmp%> second argument must be 1");
7776 return const0_rtx;
7777 }
7778
7779 expand_builtin_longjmp (buf_addr, value);
7780 return const0_rtx;
7781 }
7782 break;
7783
7784 case BUILT_IN_NONLOCAL_GOTO:
7785 target = expand_builtin_nonlocal_goto (exp);
7786 if (target)
7787 return target;
7788 break;
7789
7790 /* This updates the setjmp buffer that is its argument with the value
7791 of the current stack pointer. */
7792 case BUILT_IN_UPDATE_SETJMP_BUF:
7793 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7794 {
7795 rtx buf_addr
7796 = expand_normal (CALL_EXPR_ARG (exp, 0));
7797
7798 expand_builtin_update_setjmp_buf (buf_addr);
7799 return const0_rtx;
7800 }
7801 break;
7802
7803 case BUILT_IN_TRAP:
7804 expand_builtin_trap ();
7805 return const0_rtx;
7806
7807 case BUILT_IN_UNREACHABLE:
7808 expand_builtin_unreachable ();
7809 return const0_rtx;
7810
7811 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7812 case BUILT_IN_SIGNBITD32:
7813 case BUILT_IN_SIGNBITD64:
7814 case BUILT_IN_SIGNBITD128:
7815 target = expand_builtin_signbit (exp, target);
7816 if (target)
7817 return target;
7818 break;
7819
7820 /* Various hooks for the DWARF 2 __throw routine. */
7821 case BUILT_IN_UNWIND_INIT:
7822 expand_builtin_unwind_init ();
7823 return const0_rtx;
7824 case BUILT_IN_DWARF_CFA:
7825 return virtual_cfa_rtx;
7826 #ifdef DWARF2_UNWIND_INFO
7827 case BUILT_IN_DWARF_SP_COLUMN:
7828 return expand_builtin_dwarf_sp_column ();
7829 case BUILT_IN_INIT_DWARF_REG_SIZES:
7830 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7831 return const0_rtx;
7832 #endif
7833 case BUILT_IN_FROB_RETURN_ADDR:
7834 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7835 case BUILT_IN_EXTRACT_RETURN_ADDR:
7836 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7837 case BUILT_IN_EH_RETURN:
7838 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7839 CALL_EXPR_ARG (exp, 1));
7840 return const0_rtx;
7841 case BUILT_IN_EH_RETURN_DATA_REGNO:
7842 return expand_builtin_eh_return_data_regno (exp);
7843 case BUILT_IN_EXTEND_POINTER:
7844 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7845 case BUILT_IN_EH_POINTER:
7846 return expand_builtin_eh_pointer (exp);
7847 case BUILT_IN_EH_FILTER:
7848 return expand_builtin_eh_filter (exp);
7849 case BUILT_IN_EH_COPY_VALUES:
7850 return expand_builtin_eh_copy_values (exp);
7851
7852 case BUILT_IN_VA_START:
7853 return expand_builtin_va_start (exp);
7854 case BUILT_IN_VA_END:
7855 return expand_builtin_va_end (exp);
7856 case BUILT_IN_VA_COPY:
7857 return expand_builtin_va_copy (exp);
7858 case BUILT_IN_EXPECT:
7859 return expand_builtin_expect (exp, target);
7860 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7861 return expand_builtin_expect_with_probability (exp, target);
7862 case BUILT_IN_ASSUME_ALIGNED:
7863 return expand_builtin_assume_aligned (exp, target);
7864 case BUILT_IN_PREFETCH:
7865 expand_builtin_prefetch (exp);
7866 return const0_rtx;
7867
7868 case BUILT_IN_INIT_TRAMPOLINE:
7869 return expand_builtin_init_trampoline (exp, true);
7870 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7871 return expand_builtin_init_trampoline (exp, false);
7872 case BUILT_IN_ADJUST_TRAMPOLINE:
7873 return expand_builtin_adjust_trampoline (exp);
7874
7875 case BUILT_IN_INIT_DESCRIPTOR:
7876 return expand_builtin_init_descriptor (exp);
7877 case BUILT_IN_ADJUST_DESCRIPTOR:
7878 return expand_builtin_adjust_descriptor (exp);
7879
7880 case BUILT_IN_FORK:
7881 case BUILT_IN_EXECL:
7882 case BUILT_IN_EXECV:
7883 case BUILT_IN_EXECLP:
7884 case BUILT_IN_EXECLE:
7885 case BUILT_IN_EXECVP:
7886 case BUILT_IN_EXECVE:
7887 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7888 if (target)
7889 return target;
7890 break;
7891
7892 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7893 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7894 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7895 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7896 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7897 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7898 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7899 if (target)
7900 return target;
7901 break;
7902
7903 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7904 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7905 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7906 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7907 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7908 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7909 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7910 if (target)
7911 return target;
7912 break;
7913
7914 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7915 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7916 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7917 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7918 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7919 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7920 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7921 if (target)
7922 return target;
7923 break;
7924
7925 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7926 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7927 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7928 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7929 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7930 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7931 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7932 if (target)
7933 return target;
7934 break;
7935
7936 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7937 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7938 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7939 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7940 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7942 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7943 if (target)
7944 return target;
7945 break;
7946
7947 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7948 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7949 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7950 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7951 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7952 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7953 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7954 if (target)
7955 return target;
7956 break;
7957
7958 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7959 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7960 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7961 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7962 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7963 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7964 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7965 if (target)
7966 return target;
7967 break;
7968
7969 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7970 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7971 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7972 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7973 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7974 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7975 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7976 if (target)
7977 return target;
7978 break;
7979
7980 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7981 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7982 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7983 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7984 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7985 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7986 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7987 if (target)
7988 return target;
7989 break;
7990
7991 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7992 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7993 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7994 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7995 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7996 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7997 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7998 if (target)
7999 return target;
8000 break;
8001
8002 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8003 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8004 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8005 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8006 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8007 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8008 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8009 if (target)
8010 return target;
8011 break;
8012
8013 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8014 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8015 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8016 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8017 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8018 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8019 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8020 if (target)
8021 return target;
8022 break;
8023
8024 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8025 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8026 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8027 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8028 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8029 if (mode == VOIDmode)
8030 mode = TYPE_MODE (boolean_type_node);
8031 if (!target || !register_operand (target, mode))
8032 target = gen_reg_rtx (mode);
8033
8034 mode = get_builtin_sync_mode
8035 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8036 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8037 if (target)
8038 return target;
8039 break;
8040
8041 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8042 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8043 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8044 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8045 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8046 mode = get_builtin_sync_mode
8047 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8048 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8049 if (target)
8050 return target;
8051 break;
8052
8053 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8054 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8055 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8056 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8057 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8058 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8059 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8060 if (target)
8061 return target;
8062 break;
8063
8064 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8065 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8066 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8067 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8068 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8069 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8070 expand_builtin_sync_lock_release (mode, exp);
8071 return const0_rtx;
8072
8073 case BUILT_IN_SYNC_SYNCHRONIZE:
8074 expand_builtin_sync_synchronize ();
8075 return const0_rtx;
8076
8077 case BUILT_IN_ATOMIC_EXCHANGE_1:
8078 case BUILT_IN_ATOMIC_EXCHANGE_2:
8079 case BUILT_IN_ATOMIC_EXCHANGE_4:
8080 case BUILT_IN_ATOMIC_EXCHANGE_8:
8081 case BUILT_IN_ATOMIC_EXCHANGE_16:
8082 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8083 target = expand_builtin_atomic_exchange (mode, exp, target);
8084 if (target)
8085 return target;
8086 break;
8087
8088 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8089 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8090 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8091 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8092 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8093 {
8094 unsigned int nargs, z;
8095 vec<tree, va_gc> *vec;
8096
8097 mode =
8098 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8099 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8100 if (target)
8101 return target;
8102
8103 /* If this is turned into an external library call, the weak parameter
8104 must be dropped to match the expected parameter list. */
8105 nargs = call_expr_nargs (exp);
8106 vec_alloc (vec, nargs - 1);
8107 for (z = 0; z < 3; z++)
8108 vec->quick_push (CALL_EXPR_ARG (exp, z));
8109 /* Skip the boolean weak parameter. */
8110 for (z = 4; z < 6; z++)
8111 vec->quick_push (CALL_EXPR_ARG (exp, z));
8112 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8113 break;
8114 }
8115
8116 case BUILT_IN_ATOMIC_LOAD_1:
8117 case BUILT_IN_ATOMIC_LOAD_2:
8118 case BUILT_IN_ATOMIC_LOAD_4:
8119 case BUILT_IN_ATOMIC_LOAD_8:
8120 case BUILT_IN_ATOMIC_LOAD_16:
8121 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8122 target = expand_builtin_atomic_load (mode, exp, target);
8123 if (target)
8124 return target;
8125 break;
8126
8127 case BUILT_IN_ATOMIC_STORE_1:
8128 case BUILT_IN_ATOMIC_STORE_2:
8129 case BUILT_IN_ATOMIC_STORE_4:
8130 case BUILT_IN_ATOMIC_STORE_8:
8131 case BUILT_IN_ATOMIC_STORE_16:
8132 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8133 target = expand_builtin_atomic_store (mode, exp);
8134 if (target)
8135 return const0_rtx;
8136 break;
8137
8138 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8139 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8140 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8141 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8142 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8143 {
8144 enum built_in_function lib;
8145 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8146 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8147 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8148 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8149 ignore, lib);
8150 if (target)
8151 return target;
8152 break;
8153 }
8154 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8155 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8156 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8157 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8158 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8159 {
8160 enum built_in_function lib;
8161 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8162 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8163 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8164 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8165 ignore, lib);
8166 if (target)
8167 return target;
8168 break;
8169 }
8170 case BUILT_IN_ATOMIC_AND_FETCH_1:
8171 case BUILT_IN_ATOMIC_AND_FETCH_2:
8172 case BUILT_IN_ATOMIC_AND_FETCH_4:
8173 case BUILT_IN_ATOMIC_AND_FETCH_8:
8174 case BUILT_IN_ATOMIC_AND_FETCH_16:
8175 {
8176 enum built_in_function lib;
8177 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8178 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8179 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8180 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8181 ignore, lib);
8182 if (target)
8183 return target;
8184 break;
8185 }
8186 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8187 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8188 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8189 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8190 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8191 {
8192 enum built_in_function lib;
8193 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8194 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8195 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8196 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8197 ignore, lib);
8198 if (target)
8199 return target;
8200 break;
8201 }
8202 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8203 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8204 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8205 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8206 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8207 {
8208 enum built_in_function lib;
8209 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8210 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8211 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8212 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8213 ignore, lib);
8214 if (target)
8215 return target;
8216 break;
8217 }
8218 case BUILT_IN_ATOMIC_OR_FETCH_1:
8219 case BUILT_IN_ATOMIC_OR_FETCH_2:
8220 case BUILT_IN_ATOMIC_OR_FETCH_4:
8221 case BUILT_IN_ATOMIC_OR_FETCH_8:
8222 case BUILT_IN_ATOMIC_OR_FETCH_16:
8223 {
8224 enum built_in_function lib;
8225 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8226 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8227 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8228 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8229 ignore, lib);
8230 if (target)
8231 return target;
8232 break;
8233 }
8234 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8235 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8236 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8237 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8238 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8239 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8240 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8241 ignore, BUILT_IN_NONE);
8242 if (target)
8243 return target;
8244 break;
8245
8246 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8247 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8248 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8249 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8250 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8251 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8252 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8253 ignore, BUILT_IN_NONE);
8254 if (target)
8255 return target;
8256 break;
8257
8258 case BUILT_IN_ATOMIC_FETCH_AND_1:
8259 case BUILT_IN_ATOMIC_FETCH_AND_2:
8260 case BUILT_IN_ATOMIC_FETCH_AND_4:
8261 case BUILT_IN_ATOMIC_FETCH_AND_8:
8262 case BUILT_IN_ATOMIC_FETCH_AND_16:
8263 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8264 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8265 ignore, BUILT_IN_NONE);
8266 if (target)
8267 return target;
8268 break;
8269
8270 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8271 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8272 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8273 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8274 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8275 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8276 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8277 ignore, BUILT_IN_NONE);
8278 if (target)
8279 return target;
8280 break;
8281
8282 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8283 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8284 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8285 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8286 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8287 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8288 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8289 ignore, BUILT_IN_NONE);
8290 if (target)
8291 return target;
8292 break;
8293
8294 case BUILT_IN_ATOMIC_FETCH_OR_1:
8295 case BUILT_IN_ATOMIC_FETCH_OR_2:
8296 case BUILT_IN_ATOMIC_FETCH_OR_4:
8297 case BUILT_IN_ATOMIC_FETCH_OR_8:
8298 case BUILT_IN_ATOMIC_FETCH_OR_16:
8299 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8300 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8301 ignore, BUILT_IN_NONE);
8302 if (target)
8303 return target;
8304 break;
8305
8306 case BUILT_IN_ATOMIC_TEST_AND_SET:
8307 return expand_builtin_atomic_test_and_set (exp, target);
8308
8309 case BUILT_IN_ATOMIC_CLEAR:
8310 return expand_builtin_atomic_clear (exp);
8311
8312 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8313 return expand_builtin_atomic_always_lock_free (exp);
8314
8315 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8316 target = expand_builtin_atomic_is_lock_free (exp);
8317 if (target)
8318 return target;
8319 break;
8320
8321 case BUILT_IN_ATOMIC_THREAD_FENCE:
8322 expand_builtin_atomic_thread_fence (exp);
8323 return const0_rtx;
8324
8325 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8326 expand_builtin_atomic_signal_fence (exp);
8327 return const0_rtx;
8328
8329 case BUILT_IN_OBJECT_SIZE:
8330 return expand_builtin_object_size (exp);
8331
8332 case BUILT_IN_MEMCPY_CHK:
8333 case BUILT_IN_MEMPCPY_CHK:
8334 case BUILT_IN_MEMMOVE_CHK:
8335 case BUILT_IN_MEMSET_CHK:
8336 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8337 if (target)
8338 return target;
8339 break;
8340
8341 case BUILT_IN_STRCPY_CHK:
8342 case BUILT_IN_STPCPY_CHK:
8343 case BUILT_IN_STRNCPY_CHK:
8344 case BUILT_IN_STPNCPY_CHK:
8345 case BUILT_IN_STRCAT_CHK:
8346 case BUILT_IN_STRNCAT_CHK:
8347 case BUILT_IN_SNPRINTF_CHK:
8348 case BUILT_IN_VSNPRINTF_CHK:
8349 maybe_emit_chk_warning (exp, fcode);
8350 break;
8351
8352 case BUILT_IN_SPRINTF_CHK:
8353 case BUILT_IN_VSPRINTF_CHK:
8354 maybe_emit_sprintf_chk_warning (exp, fcode);
8355 break;
8356
8357 case BUILT_IN_FREE:
8358 if (warn_free_nonheap_object)
8359 maybe_emit_free_warning (exp);
8360 break;
8361
8362 case BUILT_IN_THREAD_POINTER:
8363 return expand_builtin_thread_pointer (exp, target);
8364
8365 case BUILT_IN_SET_THREAD_POINTER:
8366 expand_builtin_set_thread_pointer (exp);
8367 return const0_rtx;
8368
8369 case BUILT_IN_ACC_ON_DEVICE:
8370 /* Do library call, if we failed to expand the builtin when
8371 folding. */
8372 break;
8373
8374 case BUILT_IN_GOACC_PARLEVEL_ID:
8375 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8376 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8377
8378 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8379 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8380
8381 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8382 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8383 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8384 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8385 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8387 return expand_speculation_safe_value (mode, exp, target, ignore);
8388
8389 default: /* just do library call, if unknown builtin */
8390 break;
8391 }
8392
8393 /* The switch statement above can drop through to cause the function
8394 to be called normally. */
8395 return expand_call (exp, target, ignore);
8396 }
8397
8398 /* Determine whether a tree node represents a call to a built-in
8399 function. If the tree T is a call to a built-in function with
8400 the right number of arguments of the appropriate types, return
8401 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8402 Otherwise the return value is END_BUILTINS. */
8403
8404 enum built_in_function
8405 builtin_mathfn_code (const_tree t)
8406 {
8407 const_tree fndecl, arg, parmlist;
8408 const_tree argtype, parmtype;
8409 const_call_expr_arg_iterator iter;
8410
8411 if (TREE_CODE (t) != CALL_EXPR)
8412 return END_BUILTINS;
8413
8414 fndecl = get_callee_fndecl (t);
8415 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8416 return END_BUILTINS;
8417
8418 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8419 init_const_call_expr_arg_iterator (t, &iter);
8420 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8421 {
8422 /* If a function doesn't take a variable number of arguments,
8423 the last element in the list will have type `void'. */
8424 parmtype = TREE_VALUE (parmlist);
8425 if (VOID_TYPE_P (parmtype))
8426 {
8427 if (more_const_call_expr_args_p (&iter))
8428 return END_BUILTINS;
8429 return DECL_FUNCTION_CODE (fndecl);
8430 }
8431
8432 if (! more_const_call_expr_args_p (&iter))
8433 return END_BUILTINS;
8434
8435 arg = next_const_call_expr_arg (&iter);
8436 argtype = TREE_TYPE (arg);
8437
8438 if (SCALAR_FLOAT_TYPE_P (parmtype))
8439 {
8440 if (! SCALAR_FLOAT_TYPE_P (argtype))
8441 return END_BUILTINS;
8442 }
8443 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8444 {
8445 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8446 return END_BUILTINS;
8447 }
8448 else if (POINTER_TYPE_P (parmtype))
8449 {
8450 if (! POINTER_TYPE_P (argtype))
8451 return END_BUILTINS;
8452 }
8453 else if (INTEGRAL_TYPE_P (parmtype))
8454 {
8455 if (! INTEGRAL_TYPE_P (argtype))
8456 return END_BUILTINS;
8457 }
8458 else
8459 return END_BUILTINS;
8460 }
8461
8462 /* Variable-length argument list. */
8463 return DECL_FUNCTION_CODE (fndecl);
8464 }
8465
8466 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8467 evaluate to a constant. */
8468
8469 static tree
8470 fold_builtin_constant_p (tree arg)
8471 {
8472 /* We return 1 for a numeric type that's known to be a constant
8473 value at compile-time or for an aggregate type that's a
8474 literal constant. */
8475 STRIP_NOPS (arg);
8476
8477 /* If we know this is a constant, emit the constant of one. */
8478 if (CONSTANT_CLASS_P (arg)
8479 || (TREE_CODE (arg) == CONSTRUCTOR
8480 && TREE_CONSTANT (arg)))
8481 return integer_one_node;
8482 if (TREE_CODE (arg) == ADDR_EXPR)
8483 {
8484 tree op = TREE_OPERAND (arg, 0);
8485 if (TREE_CODE (op) == STRING_CST
8486 || (TREE_CODE (op) == ARRAY_REF
8487 && integer_zerop (TREE_OPERAND (op, 1))
8488 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8489 return integer_one_node;
8490 }
8491
8492 /* If this expression has side effects, show we don't know it to be a
8493 constant. Likewise if it's a pointer or aggregate type since in
8494 those case we only want literals, since those are only optimized
8495 when generating RTL, not later.
8496 And finally, if we are compiling an initializer, not code, we
8497 need to return a definite result now; there's not going to be any
8498 more optimization done. */
8499 if (TREE_SIDE_EFFECTS (arg)
8500 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8501 || POINTER_TYPE_P (TREE_TYPE (arg))
8502 || cfun == 0
8503 || folding_initializer
8504 || force_folding_builtin_constant_p)
8505 return integer_zero_node;
8506
8507 return NULL_TREE;
8508 }
8509
8510 /* Create builtin_expect or builtin_expect_with_probability
8511 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8512 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8513 builtin_expect_with_probability instead uses third argument as PROBABILITY
8514 value. */
8515
8516 static tree
8517 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8518 tree predictor, tree probability)
8519 {
8520 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8521
8522 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8523 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8524 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8525 ret_type = TREE_TYPE (TREE_TYPE (fn));
8526 pred_type = TREE_VALUE (arg_types);
8527 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8528
8529 pred = fold_convert_loc (loc, pred_type, pred);
8530 expected = fold_convert_loc (loc, expected_type, expected);
8531
8532 if (probability)
8533 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8534 else
8535 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8536 predictor);
8537
8538 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8539 build_int_cst (ret_type, 0));
8540 }
8541
8542 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8543 NULL_TREE if no simplification is possible. */
8544
8545 tree
8546 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8547 tree arg3)
8548 {
8549 tree inner, fndecl, inner_arg0;
8550 enum tree_code code;
8551
8552 /* Distribute the expected value over short-circuiting operators.
8553 See through the cast from truthvalue_type_node to long. */
8554 inner_arg0 = arg0;
8555 while (CONVERT_EXPR_P (inner_arg0)
8556 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8557 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8558 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8559
8560 /* If this is a builtin_expect within a builtin_expect keep the
8561 inner one. See through a comparison against a constant. It
8562 might have been added to create a thruthvalue. */
8563 inner = inner_arg0;
8564
8565 if (COMPARISON_CLASS_P (inner)
8566 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8567 inner = TREE_OPERAND (inner, 0);
8568
8569 if (TREE_CODE (inner) == CALL_EXPR
8570 && (fndecl = get_callee_fndecl (inner))
8571 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8572 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8573 return arg0;
8574
8575 inner = inner_arg0;
8576 code = TREE_CODE (inner);
8577 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8578 {
8579 tree op0 = TREE_OPERAND (inner, 0);
8580 tree op1 = TREE_OPERAND (inner, 1);
8581 arg1 = save_expr (arg1);
8582
8583 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8584 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8585 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8586
8587 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8588 }
8589
8590 /* If the argument isn't invariant then there's nothing else we can do. */
8591 if (!TREE_CONSTANT (inner_arg0))
8592 return NULL_TREE;
8593
8594 /* If we expect that a comparison against the argument will fold to
8595 a constant return the constant. In practice, this means a true
8596 constant or the address of a non-weak symbol. */
8597 inner = inner_arg0;
8598 STRIP_NOPS (inner);
8599 if (TREE_CODE (inner) == ADDR_EXPR)
8600 {
8601 do
8602 {
8603 inner = TREE_OPERAND (inner, 0);
8604 }
8605 while (TREE_CODE (inner) == COMPONENT_REF
8606 || TREE_CODE (inner) == ARRAY_REF);
8607 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8608 return NULL_TREE;
8609 }
8610
8611 /* Otherwise, ARG0 already has the proper type for the return value. */
8612 return arg0;
8613 }
8614
8615 /* Fold a call to __builtin_classify_type with argument ARG. */
8616
8617 static tree
8618 fold_builtin_classify_type (tree arg)
8619 {
8620 if (arg == 0)
8621 return build_int_cst (integer_type_node, no_type_class);
8622
8623 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8624 }
8625
8626 /* Fold a call to __builtin_strlen with argument ARG. */
8627
8628 static tree
8629 fold_builtin_strlen (location_t loc, tree type, tree arg)
8630 {
8631 if (!validate_arg (arg, POINTER_TYPE))
8632 return NULL_TREE;
8633 else
8634 {
8635 c_strlen_data lendata = { };
8636 tree len = c_strlen (arg, 0, &lendata);
8637
8638 if (len)
8639 return fold_convert_loc (loc, type, len);
8640
8641 if (!lendata.decl)
8642 c_strlen (arg, 1, &lendata);
8643
8644 if (lendata.decl)
8645 {
8646 if (EXPR_HAS_LOCATION (arg))
8647 loc = EXPR_LOCATION (arg);
8648 else if (loc == UNKNOWN_LOCATION)
8649 loc = input_location;
8650 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8651 }
8652
8653 return NULL_TREE;
8654 }
8655 }
8656
8657 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8658
8659 static tree
8660 fold_builtin_inf (location_t loc, tree type, int warn)
8661 {
8662 REAL_VALUE_TYPE real;
8663
8664 /* __builtin_inff is intended to be usable to define INFINITY on all
8665 targets. If an infinity is not available, INFINITY expands "to a
8666 positive constant of type float that overflows at translation
8667 time", footnote "In this case, using INFINITY will violate the
8668 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8669 Thus we pedwarn to ensure this constraint violation is
8670 diagnosed. */
8671 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8672 pedwarn (loc, 0, "target format does not support infinity");
8673
8674 real_inf (&real);
8675 return build_real (type, real);
8676 }
8677
8678 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8679 NULL_TREE if no simplification can be made. */
8680
8681 static tree
8682 fold_builtin_sincos (location_t loc,
8683 tree arg0, tree arg1, tree arg2)
8684 {
8685 tree type;
8686 tree fndecl, call = NULL_TREE;
8687
8688 if (!validate_arg (arg0, REAL_TYPE)
8689 || !validate_arg (arg1, POINTER_TYPE)
8690 || !validate_arg (arg2, POINTER_TYPE))
8691 return NULL_TREE;
8692
8693 type = TREE_TYPE (arg0);
8694
8695 /* Calculate the result when the argument is a constant. */
8696 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8697 if (fn == END_BUILTINS)
8698 return NULL_TREE;
8699
8700 /* Canonicalize sincos to cexpi. */
8701 if (TREE_CODE (arg0) == REAL_CST)
8702 {
8703 tree complex_type = build_complex_type (type);
8704 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8705 }
8706 if (!call)
8707 {
8708 if (!targetm.libc_has_function (function_c99_math_complex)
8709 || !builtin_decl_implicit_p (fn))
8710 return NULL_TREE;
8711 fndecl = builtin_decl_explicit (fn);
8712 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8713 call = builtin_save_expr (call);
8714 }
8715
8716 tree ptype = build_pointer_type (type);
8717 arg1 = fold_convert (ptype, arg1);
8718 arg2 = fold_convert (ptype, arg2);
8719 return build2 (COMPOUND_EXPR, void_type_node,
8720 build2 (MODIFY_EXPR, void_type_node,
8721 build_fold_indirect_ref_loc (loc, arg1),
8722 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8723 build2 (MODIFY_EXPR, void_type_node,
8724 build_fold_indirect_ref_loc (loc, arg2),
8725 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8726 }
8727
8728 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8729 Return NULL_TREE if no simplification can be made. */
8730
8731 static tree
8732 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8733 {
8734 if (!validate_arg (arg1, POINTER_TYPE)
8735 || !validate_arg (arg2, POINTER_TYPE)
8736 || !validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8738
8739 /* If the LEN parameter is zero, return zero. */
8740 if (integer_zerop (len))
8741 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8742 arg1, arg2);
8743
8744 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8745 if (operand_equal_p (arg1, arg2, 0))
8746 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8747
8748 /* If len parameter is one, return an expression corresponding to
8749 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8750 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8751 {
8752 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8753 tree cst_uchar_ptr_node
8754 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8755
8756 tree ind1
8757 = fold_convert_loc (loc, integer_type_node,
8758 build1 (INDIRECT_REF, cst_uchar_node,
8759 fold_convert_loc (loc,
8760 cst_uchar_ptr_node,
8761 arg1)));
8762 tree ind2
8763 = fold_convert_loc (loc, integer_type_node,
8764 build1 (INDIRECT_REF, cst_uchar_node,
8765 fold_convert_loc (loc,
8766 cst_uchar_ptr_node,
8767 arg2)));
8768 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8769 }
8770
8771 return NULL_TREE;
8772 }
8773
8774 /* Fold a call to builtin isascii with argument ARG. */
8775
8776 static tree
8777 fold_builtin_isascii (location_t loc, tree arg)
8778 {
8779 if (!validate_arg (arg, INTEGER_TYPE))
8780 return NULL_TREE;
8781 else
8782 {
8783 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8784 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8785 build_int_cst (integer_type_node,
8786 ~ (unsigned HOST_WIDE_INT) 0x7f));
8787 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8788 arg, integer_zero_node);
8789 }
8790 }
8791
8792 /* Fold a call to builtin toascii with argument ARG. */
8793
8794 static tree
8795 fold_builtin_toascii (location_t loc, tree arg)
8796 {
8797 if (!validate_arg (arg, INTEGER_TYPE))
8798 return NULL_TREE;
8799
8800 /* Transform toascii(c) -> (c & 0x7f). */
8801 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8802 build_int_cst (integer_type_node, 0x7f));
8803 }
8804
8805 /* Fold a call to builtin isdigit with argument ARG. */
8806
8807 static tree
8808 fold_builtin_isdigit (location_t loc, tree arg)
8809 {
8810 if (!validate_arg (arg, INTEGER_TYPE))
8811 return NULL_TREE;
8812 else
8813 {
8814 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8815 /* According to the C standard, isdigit is unaffected by locale.
8816 However, it definitely is affected by the target character set. */
8817 unsigned HOST_WIDE_INT target_digit0
8818 = lang_hooks.to_target_charset ('0');
8819
8820 if (target_digit0 == 0)
8821 return NULL_TREE;
8822
8823 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8824 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8825 build_int_cst (unsigned_type_node, target_digit0));
8826 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8827 build_int_cst (unsigned_type_node, 9));
8828 }
8829 }
8830
8831 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8832
8833 static tree
8834 fold_builtin_fabs (location_t loc, tree arg, tree type)
8835 {
8836 if (!validate_arg (arg, REAL_TYPE))
8837 return NULL_TREE;
8838
8839 arg = fold_convert_loc (loc, type, arg);
8840 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8841 }
8842
8843 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8844
8845 static tree
8846 fold_builtin_abs (location_t loc, tree arg, tree type)
8847 {
8848 if (!validate_arg (arg, INTEGER_TYPE))
8849 return NULL_TREE;
8850
8851 arg = fold_convert_loc (loc, type, arg);
8852 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8853 }
8854
8855 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8856
8857 static tree
8858 fold_builtin_carg (location_t loc, tree arg, tree type)
8859 {
8860 if (validate_arg (arg, COMPLEX_TYPE)
8861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8862 {
8863 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8864
8865 if (atan2_fn)
8866 {
8867 tree new_arg = builtin_save_expr (arg);
8868 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8869 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8870 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8871 }
8872 }
8873
8874 return NULL_TREE;
8875 }
8876
8877 /* Fold a call to builtin frexp, we can assume the base is 2. */
8878
8879 static tree
8880 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8881 {
8882 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8883 return NULL_TREE;
8884
8885 STRIP_NOPS (arg0);
8886
8887 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8888 return NULL_TREE;
8889
8890 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8891
8892 /* Proceed if a valid pointer type was passed in. */
8893 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8894 {
8895 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8896 tree frac, exp;
8897
8898 switch (value->cl)
8899 {
8900 case rvc_zero:
8901 /* For +-0, return (*exp = 0, +-0). */
8902 exp = integer_zero_node;
8903 frac = arg0;
8904 break;
8905 case rvc_nan:
8906 case rvc_inf:
8907 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8908 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8909 case rvc_normal:
8910 {
8911 /* Since the frexp function always expects base 2, and in
8912 GCC normalized significands are already in the range
8913 [0.5, 1.0), we have exactly what frexp wants. */
8914 REAL_VALUE_TYPE frac_rvt = *value;
8915 SET_REAL_EXP (&frac_rvt, 0);
8916 frac = build_real (rettype, frac_rvt);
8917 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8918 }
8919 break;
8920 default:
8921 gcc_unreachable ();
8922 }
8923
8924 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8925 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8926 TREE_SIDE_EFFECTS (arg1) = 1;
8927 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8928 }
8929
8930 return NULL_TREE;
8931 }
8932
8933 /* Fold a call to builtin modf. */
8934
8935 static tree
8936 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8937 {
8938 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8939 return NULL_TREE;
8940
8941 STRIP_NOPS (arg0);
8942
8943 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8944 return NULL_TREE;
8945
8946 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8947
8948 /* Proceed if a valid pointer type was passed in. */
8949 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8950 {
8951 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8952 REAL_VALUE_TYPE trunc, frac;
8953
8954 switch (value->cl)
8955 {
8956 case rvc_nan:
8957 case rvc_zero:
8958 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8959 trunc = frac = *value;
8960 break;
8961 case rvc_inf:
8962 /* For +-Inf, return (*arg1 = arg0, +-0). */
8963 frac = dconst0;
8964 frac.sign = value->sign;
8965 trunc = *value;
8966 break;
8967 case rvc_normal:
8968 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8969 real_trunc (&trunc, VOIDmode, value);
8970 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8971 /* If the original number was negative and already
8972 integral, then the fractional part is -0.0. */
8973 if (value->sign && frac.cl == rvc_zero)
8974 frac.sign = value->sign;
8975 break;
8976 }
8977
8978 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8979 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8980 build_real (rettype, trunc));
8981 TREE_SIDE_EFFECTS (arg1) = 1;
8982 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8983 build_real (rettype, frac));
8984 }
8985
8986 return NULL_TREE;
8987 }
8988
8989 /* Given a location LOC, an interclass builtin function decl FNDECL
8990 and its single argument ARG, return an folded expression computing
8991 the same, or NULL_TREE if we either couldn't or didn't want to fold
8992 (the latter happen if there's an RTL instruction available). */
8993
8994 static tree
8995 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8996 {
8997 machine_mode mode;
8998
8999 if (!validate_arg (arg, REAL_TYPE))
9000 return NULL_TREE;
9001
9002 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9003 return NULL_TREE;
9004
9005 mode = TYPE_MODE (TREE_TYPE (arg));
9006
9007 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9008
9009 /* If there is no optab, try generic code. */
9010 switch (DECL_FUNCTION_CODE (fndecl))
9011 {
9012 tree result;
9013
9014 CASE_FLT_FN (BUILT_IN_ISINF):
9015 {
9016 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9017 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9018 tree type = TREE_TYPE (arg);
9019 REAL_VALUE_TYPE r;
9020 char buf[128];
9021
9022 if (is_ibm_extended)
9023 {
9024 /* NaN and Inf are encoded in the high-order double value
9025 only. The low-order value is not significant. */
9026 type = double_type_node;
9027 mode = DFmode;
9028 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9029 }
9030 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9031 real_from_string (&r, buf);
9032 result = build_call_expr (isgr_fn, 2,
9033 fold_build1_loc (loc, ABS_EXPR, type, arg),
9034 build_real (type, r));
9035 return result;
9036 }
9037 CASE_FLT_FN (BUILT_IN_FINITE):
9038 case BUILT_IN_ISFINITE:
9039 {
9040 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9041 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9042 tree type = TREE_TYPE (arg);
9043 REAL_VALUE_TYPE r;
9044 char buf[128];
9045
9046 if (is_ibm_extended)
9047 {
9048 /* NaN and Inf are encoded in the high-order double value
9049 only. The low-order value is not significant. */
9050 type = double_type_node;
9051 mode = DFmode;
9052 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9053 }
9054 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9055 real_from_string (&r, buf);
9056 result = build_call_expr (isle_fn, 2,
9057 fold_build1_loc (loc, ABS_EXPR, type, arg),
9058 build_real (type, r));
9059 /*result = fold_build2_loc (loc, UNGT_EXPR,
9060 TREE_TYPE (TREE_TYPE (fndecl)),
9061 fold_build1_loc (loc, ABS_EXPR, type, arg),
9062 build_real (type, r));
9063 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9064 TREE_TYPE (TREE_TYPE (fndecl)),
9065 result);*/
9066 return result;
9067 }
9068 case BUILT_IN_ISNORMAL:
9069 {
9070 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9071 islessequal(fabs(x),DBL_MAX). */
9072 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9073 tree type = TREE_TYPE (arg);
9074 tree orig_arg, max_exp, min_exp;
9075 machine_mode orig_mode = mode;
9076 REAL_VALUE_TYPE rmax, rmin;
9077 char buf[128];
9078
9079 orig_arg = arg = builtin_save_expr (arg);
9080 if (is_ibm_extended)
9081 {
9082 /* Use double to test the normal range of IBM extended
9083 precision. Emin for IBM extended precision is
9084 different to emin for IEEE double, being 53 higher
9085 since the low double exponent is at least 53 lower
9086 than the high double exponent. */
9087 type = double_type_node;
9088 mode = DFmode;
9089 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9090 }
9091 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9092
9093 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9094 real_from_string (&rmax, buf);
9095 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9096 real_from_string (&rmin, buf);
9097 max_exp = build_real (type, rmax);
9098 min_exp = build_real (type, rmin);
9099
9100 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9101 if (is_ibm_extended)
9102 {
9103 /* Testing the high end of the range is done just using
9104 the high double, using the same test as isfinite().
9105 For the subnormal end of the range we first test the
9106 high double, then if its magnitude is equal to the
9107 limit of 0x1p-969, we test whether the low double is
9108 non-zero and opposite sign to the high double. */
9109 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9110 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9111 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9112 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9113 arg, min_exp);
9114 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9115 complex_double_type_node, orig_arg);
9116 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9117 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9118 tree zero = build_real (type, dconst0);
9119 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9120 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9121 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9122 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9123 fold_build3 (COND_EXPR,
9124 integer_type_node,
9125 hilt, logt, lolt));
9126 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9127 eq_min, ok_lo);
9128 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9129 gt_min, eq_min);
9130 }
9131 else
9132 {
9133 tree const isge_fn
9134 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9135 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9136 }
9137 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9138 max_exp, min_exp);
9139 return result;
9140 }
9141 default:
9142 break;
9143 }
9144
9145 return NULL_TREE;
9146 }
9147
9148 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9149 ARG is the argument for the call. */
9150
9151 static tree
9152 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9153 {
9154 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9155
9156 if (!validate_arg (arg, REAL_TYPE))
9157 return NULL_TREE;
9158
9159 switch (builtin_index)
9160 {
9161 case BUILT_IN_ISINF:
9162 if (!HONOR_INFINITIES (arg))
9163 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9164
9165 return NULL_TREE;
9166
9167 case BUILT_IN_ISINF_SIGN:
9168 {
9169 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9170 /* In a boolean context, GCC will fold the inner COND_EXPR to
9171 1. So e.g. "if (isinf_sign(x))" would be folded to just
9172 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9173 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9174 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9175 tree tmp = NULL_TREE;
9176
9177 arg = builtin_save_expr (arg);
9178
9179 if (signbit_fn && isinf_fn)
9180 {
9181 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9182 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9183
9184 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9185 signbit_call, integer_zero_node);
9186 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9187 isinf_call, integer_zero_node);
9188
9189 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9190 integer_minus_one_node, integer_one_node);
9191 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9192 isinf_call, tmp,
9193 integer_zero_node);
9194 }
9195
9196 return tmp;
9197 }
9198
9199 case BUILT_IN_ISFINITE:
9200 if (!HONOR_NANS (arg)
9201 && !HONOR_INFINITIES (arg))
9202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9203
9204 return NULL_TREE;
9205
9206 case BUILT_IN_ISNAN:
9207 if (!HONOR_NANS (arg))
9208 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9209
9210 {
9211 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9212 if (is_ibm_extended)
9213 {
9214 /* NaN and Inf are encoded in the high-order double value
9215 only. The low-order value is not significant. */
9216 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9217 }
9218 }
9219 arg = builtin_save_expr (arg);
9220 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9221
9222 default:
9223 gcc_unreachable ();
9224 }
9225 }
9226
9227 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9228 This builtin will generate code to return the appropriate floating
9229 point classification depending on the value of the floating point
9230 number passed in. The possible return values must be supplied as
9231 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9232 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9233 one floating point argument which is "type generic". */
9234
9235 static tree
9236 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9237 {
9238 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9239 arg, type, res, tmp;
9240 machine_mode mode;
9241 REAL_VALUE_TYPE r;
9242 char buf[128];
9243
9244 /* Verify the required arguments in the original call. */
9245 if (nargs != 6
9246 || !validate_arg (args[0], INTEGER_TYPE)
9247 || !validate_arg (args[1], INTEGER_TYPE)
9248 || !validate_arg (args[2], INTEGER_TYPE)
9249 || !validate_arg (args[3], INTEGER_TYPE)
9250 || !validate_arg (args[4], INTEGER_TYPE)
9251 || !validate_arg (args[5], REAL_TYPE))
9252 return NULL_TREE;
9253
9254 fp_nan = args[0];
9255 fp_infinite = args[1];
9256 fp_normal = args[2];
9257 fp_subnormal = args[3];
9258 fp_zero = args[4];
9259 arg = args[5];
9260 type = TREE_TYPE (arg);
9261 mode = TYPE_MODE (type);
9262 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9263
9264 /* fpclassify(x) ->
9265 isnan(x) ? FP_NAN :
9266 (fabs(x) == Inf ? FP_INFINITE :
9267 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9268 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9269
9270 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9271 build_real (type, dconst0));
9272 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9273 tmp, fp_zero, fp_subnormal);
9274
9275 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9276 real_from_string (&r, buf);
9277 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9278 arg, build_real (type, r));
9279 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9280
9281 if (HONOR_INFINITIES (mode))
9282 {
9283 real_inf (&r);
9284 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9285 build_real (type, r));
9286 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9287 fp_infinite, res);
9288 }
9289
9290 if (HONOR_NANS (mode))
9291 {
9292 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9293 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9294 }
9295
9296 return res;
9297 }
9298
9299 /* Fold a call to an unordered comparison function such as
9300 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9301 being called and ARG0 and ARG1 are the arguments for the call.
9302 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9303 the opposite of the desired result. UNORDERED_CODE is used
9304 for modes that can hold NaNs and ORDERED_CODE is used for
9305 the rest. */
9306
9307 static tree
9308 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9309 enum tree_code unordered_code,
9310 enum tree_code ordered_code)
9311 {
9312 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9313 enum tree_code code;
9314 tree type0, type1;
9315 enum tree_code code0, code1;
9316 tree cmp_type = NULL_TREE;
9317
9318 type0 = TREE_TYPE (arg0);
9319 type1 = TREE_TYPE (arg1);
9320
9321 code0 = TREE_CODE (type0);
9322 code1 = TREE_CODE (type1);
9323
9324 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9325 /* Choose the wider of two real types. */
9326 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9327 ? type0 : type1;
9328 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9329 cmp_type = type0;
9330 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9331 cmp_type = type1;
9332
9333 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9334 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9335
9336 if (unordered_code == UNORDERED_EXPR)
9337 {
9338 if (!HONOR_NANS (arg0))
9339 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9340 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9341 }
9342
9343 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9344 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9345 fold_build2_loc (loc, code, type, arg0, arg1));
9346 }
9347
9348 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9349 arithmetics if it can never overflow, or into internal functions that
9350 return both result of arithmetics and overflowed boolean flag in
9351 a complex integer result, or some other check for overflow.
9352 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9353 checking part of that. */
9354
9355 static tree
9356 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9357 tree arg0, tree arg1, tree arg2)
9358 {
9359 enum internal_fn ifn = IFN_LAST;
9360 /* The code of the expression corresponding to the built-in. */
9361 enum tree_code opcode = ERROR_MARK;
9362 bool ovf_only = false;
9363
9364 switch (fcode)
9365 {
9366 case BUILT_IN_ADD_OVERFLOW_P:
9367 ovf_only = true;
9368 /* FALLTHRU */
9369 case BUILT_IN_ADD_OVERFLOW:
9370 case BUILT_IN_SADD_OVERFLOW:
9371 case BUILT_IN_SADDL_OVERFLOW:
9372 case BUILT_IN_SADDLL_OVERFLOW:
9373 case BUILT_IN_UADD_OVERFLOW:
9374 case BUILT_IN_UADDL_OVERFLOW:
9375 case BUILT_IN_UADDLL_OVERFLOW:
9376 opcode = PLUS_EXPR;
9377 ifn = IFN_ADD_OVERFLOW;
9378 break;
9379 case BUILT_IN_SUB_OVERFLOW_P:
9380 ovf_only = true;
9381 /* FALLTHRU */
9382 case BUILT_IN_SUB_OVERFLOW:
9383 case BUILT_IN_SSUB_OVERFLOW:
9384 case BUILT_IN_SSUBL_OVERFLOW:
9385 case BUILT_IN_SSUBLL_OVERFLOW:
9386 case BUILT_IN_USUB_OVERFLOW:
9387 case BUILT_IN_USUBL_OVERFLOW:
9388 case BUILT_IN_USUBLL_OVERFLOW:
9389 opcode = MINUS_EXPR;
9390 ifn = IFN_SUB_OVERFLOW;
9391 break;
9392 case BUILT_IN_MUL_OVERFLOW_P:
9393 ovf_only = true;
9394 /* FALLTHRU */
9395 case BUILT_IN_MUL_OVERFLOW:
9396 case BUILT_IN_SMUL_OVERFLOW:
9397 case BUILT_IN_SMULL_OVERFLOW:
9398 case BUILT_IN_SMULLL_OVERFLOW:
9399 case BUILT_IN_UMUL_OVERFLOW:
9400 case BUILT_IN_UMULL_OVERFLOW:
9401 case BUILT_IN_UMULLL_OVERFLOW:
9402 opcode = MULT_EXPR;
9403 ifn = IFN_MUL_OVERFLOW;
9404 break;
9405 default:
9406 gcc_unreachable ();
9407 }
9408
9409 /* For the "generic" overloads, the first two arguments can have different
9410 types and the last argument determines the target type to use to check
9411 for overflow. The arguments of the other overloads all have the same
9412 type. */
9413 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9414
9415 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9416 arguments are constant, attempt to fold the built-in call into a constant
9417 expression indicating whether or not it detected an overflow. */
9418 if (ovf_only
9419 && TREE_CODE (arg0) == INTEGER_CST
9420 && TREE_CODE (arg1) == INTEGER_CST)
9421 /* Perform the computation in the target type and check for overflow. */
9422 return omit_one_operand_loc (loc, boolean_type_node,
9423 arith_overflowed_p (opcode, type, arg0, arg1)
9424 ? boolean_true_node : boolean_false_node,
9425 arg2);
9426
9427 tree intres, ovfres;
9428 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9429 {
9430 intres = fold_binary_loc (loc, opcode, type,
9431 fold_convert_loc (loc, type, arg0),
9432 fold_convert_loc (loc, type, arg1));
9433 if (TREE_OVERFLOW (intres))
9434 intres = drop_tree_overflow (intres);
9435 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9436 ? boolean_true_node : boolean_false_node);
9437 }
9438 else
9439 {
9440 tree ctype = build_complex_type (type);
9441 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9442 arg0, arg1);
9443 tree tgt = save_expr (call);
9444 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9445 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9446 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9447 }
9448
9449 if (ovf_only)
9450 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9451
9452 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9453 tree store
9454 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9455 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9456 }
9457
9458 /* Fold a call to __builtin_FILE to a constant string. */
9459
9460 static inline tree
9461 fold_builtin_FILE (location_t loc)
9462 {
9463 if (const char *fname = LOCATION_FILE (loc))
9464 {
9465 /* The documentation says this builtin is equivalent to the preprocessor
9466 __FILE__ macro so it appears appropriate to use the same file prefix
9467 mappings. */
9468 fname = remap_macro_filename (fname);
9469 return build_string_literal (strlen (fname) + 1, fname);
9470 }
9471
9472 return build_string_literal (1, "");
9473 }
9474
9475 /* Fold a call to __builtin_FUNCTION to a constant string. */
9476
9477 static inline tree
9478 fold_builtin_FUNCTION ()
9479 {
9480 const char *name = "";
9481
9482 if (current_function_decl)
9483 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9484
9485 return build_string_literal (strlen (name) + 1, name);
9486 }
9487
9488 /* Fold a call to __builtin_LINE to an integer constant. */
9489
9490 static inline tree
9491 fold_builtin_LINE (location_t loc, tree type)
9492 {
9493 return build_int_cst (type, LOCATION_LINE (loc));
9494 }
9495
9496 /* Fold a call to built-in function FNDECL with 0 arguments.
9497 This function returns NULL_TREE if no simplification was possible. */
9498
9499 static tree
9500 fold_builtin_0 (location_t loc, tree fndecl)
9501 {
9502 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9503 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9504 switch (fcode)
9505 {
9506 case BUILT_IN_FILE:
9507 return fold_builtin_FILE (loc);
9508
9509 case BUILT_IN_FUNCTION:
9510 return fold_builtin_FUNCTION ();
9511
9512 case BUILT_IN_LINE:
9513 return fold_builtin_LINE (loc, type);
9514
9515 CASE_FLT_FN (BUILT_IN_INF):
9516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9517 case BUILT_IN_INFD32:
9518 case BUILT_IN_INFD64:
9519 case BUILT_IN_INFD128:
9520 return fold_builtin_inf (loc, type, true);
9521
9522 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9523 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9524 return fold_builtin_inf (loc, type, false);
9525
9526 case BUILT_IN_CLASSIFY_TYPE:
9527 return fold_builtin_classify_type (NULL_TREE);
9528
9529 default:
9530 break;
9531 }
9532 return NULL_TREE;
9533 }
9534
9535 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9536 This function returns NULL_TREE if no simplification was possible. */
9537
9538 static tree
9539 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9540 {
9541 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9542 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9543
9544 if (TREE_CODE (arg0) == ERROR_MARK)
9545 return NULL_TREE;
9546
9547 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9548 return ret;
9549
9550 switch (fcode)
9551 {
9552 case BUILT_IN_CONSTANT_P:
9553 {
9554 tree val = fold_builtin_constant_p (arg0);
9555
9556 /* Gimplification will pull the CALL_EXPR for the builtin out of
9557 an if condition. When not optimizing, we'll not CSE it back.
9558 To avoid link error types of regressions, return false now. */
9559 if (!val && !optimize)
9560 val = integer_zero_node;
9561
9562 return val;
9563 }
9564
9565 case BUILT_IN_CLASSIFY_TYPE:
9566 return fold_builtin_classify_type (arg0);
9567
9568 case BUILT_IN_STRLEN:
9569 return fold_builtin_strlen (loc, type, arg0);
9570
9571 CASE_FLT_FN (BUILT_IN_FABS):
9572 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9573 case BUILT_IN_FABSD32:
9574 case BUILT_IN_FABSD64:
9575 case BUILT_IN_FABSD128:
9576 return fold_builtin_fabs (loc, arg0, type);
9577
9578 case BUILT_IN_ABS:
9579 case BUILT_IN_LABS:
9580 case BUILT_IN_LLABS:
9581 case BUILT_IN_IMAXABS:
9582 return fold_builtin_abs (loc, arg0, type);
9583
9584 CASE_FLT_FN (BUILT_IN_CONJ):
9585 if (validate_arg (arg0, COMPLEX_TYPE)
9586 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9587 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9588 break;
9589
9590 CASE_FLT_FN (BUILT_IN_CREAL):
9591 if (validate_arg (arg0, COMPLEX_TYPE)
9592 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9593 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9594 break;
9595
9596 CASE_FLT_FN (BUILT_IN_CIMAG):
9597 if (validate_arg (arg0, COMPLEX_TYPE)
9598 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9599 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9600 break;
9601
9602 CASE_FLT_FN (BUILT_IN_CARG):
9603 return fold_builtin_carg (loc, arg0, type);
9604
9605 case BUILT_IN_ISASCII:
9606 return fold_builtin_isascii (loc, arg0);
9607
9608 case BUILT_IN_TOASCII:
9609 return fold_builtin_toascii (loc, arg0);
9610
9611 case BUILT_IN_ISDIGIT:
9612 return fold_builtin_isdigit (loc, arg0);
9613
9614 CASE_FLT_FN (BUILT_IN_FINITE):
9615 case BUILT_IN_FINITED32:
9616 case BUILT_IN_FINITED64:
9617 case BUILT_IN_FINITED128:
9618 case BUILT_IN_ISFINITE:
9619 {
9620 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9621 if (ret)
9622 return ret;
9623 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9624 }
9625
9626 CASE_FLT_FN (BUILT_IN_ISINF):
9627 case BUILT_IN_ISINFD32:
9628 case BUILT_IN_ISINFD64:
9629 case BUILT_IN_ISINFD128:
9630 {
9631 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9632 if (ret)
9633 return ret;
9634 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9635 }
9636
9637 case BUILT_IN_ISNORMAL:
9638 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9639
9640 case BUILT_IN_ISINF_SIGN:
9641 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9642
9643 CASE_FLT_FN (BUILT_IN_ISNAN):
9644 case BUILT_IN_ISNAND32:
9645 case BUILT_IN_ISNAND64:
9646 case BUILT_IN_ISNAND128:
9647 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9648
9649 case BUILT_IN_FREE:
9650 if (integer_zerop (arg0))
9651 return build_empty_stmt (loc);
9652 break;
9653
9654 default:
9655 break;
9656 }
9657
9658 return NULL_TREE;
9659
9660 }
9661
9662 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9663 This function returns NULL_TREE if no simplification was possible. */
9664
9665 static tree
9666 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9667 {
9668 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9669 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9670
9671 if (TREE_CODE (arg0) == ERROR_MARK
9672 || TREE_CODE (arg1) == ERROR_MARK)
9673 return NULL_TREE;
9674
9675 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9676 return ret;
9677
9678 switch (fcode)
9679 {
9680 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9681 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9682 if (validate_arg (arg0, REAL_TYPE)
9683 && validate_arg (arg1, POINTER_TYPE))
9684 return do_mpfr_lgamma_r (arg0, arg1, type);
9685 break;
9686
9687 CASE_FLT_FN (BUILT_IN_FREXP):
9688 return fold_builtin_frexp (loc, arg0, arg1, type);
9689
9690 CASE_FLT_FN (BUILT_IN_MODF):
9691 return fold_builtin_modf (loc, arg0, arg1, type);
9692
9693 case BUILT_IN_STRSPN:
9694 return fold_builtin_strspn (loc, arg0, arg1);
9695
9696 case BUILT_IN_STRCSPN:
9697 return fold_builtin_strcspn (loc, arg0, arg1);
9698
9699 case BUILT_IN_STRPBRK:
9700 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9701
9702 case BUILT_IN_EXPECT:
9703 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9704
9705 case BUILT_IN_ISGREATER:
9706 return fold_builtin_unordered_cmp (loc, fndecl,
9707 arg0, arg1, UNLE_EXPR, LE_EXPR);
9708 case BUILT_IN_ISGREATEREQUAL:
9709 return fold_builtin_unordered_cmp (loc, fndecl,
9710 arg0, arg1, UNLT_EXPR, LT_EXPR);
9711 case BUILT_IN_ISLESS:
9712 return fold_builtin_unordered_cmp (loc, fndecl,
9713 arg0, arg1, UNGE_EXPR, GE_EXPR);
9714 case BUILT_IN_ISLESSEQUAL:
9715 return fold_builtin_unordered_cmp (loc, fndecl,
9716 arg0, arg1, UNGT_EXPR, GT_EXPR);
9717 case BUILT_IN_ISLESSGREATER:
9718 return fold_builtin_unordered_cmp (loc, fndecl,
9719 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9720 case BUILT_IN_ISUNORDERED:
9721 return fold_builtin_unordered_cmp (loc, fndecl,
9722 arg0, arg1, UNORDERED_EXPR,
9723 NOP_EXPR);
9724
9725 /* We do the folding for va_start in the expander. */
9726 case BUILT_IN_VA_START:
9727 break;
9728
9729 case BUILT_IN_OBJECT_SIZE:
9730 return fold_builtin_object_size (arg0, arg1);
9731
9732 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9733 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9734
9735 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9736 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9737
9738 default:
9739 break;
9740 }
9741 return NULL_TREE;
9742 }
9743
9744 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9745 and ARG2.
9746 This function returns NULL_TREE if no simplification was possible. */
9747
9748 static tree
9749 fold_builtin_3 (location_t loc, tree fndecl,
9750 tree arg0, tree arg1, tree arg2)
9751 {
9752 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9753 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9754
9755 if (TREE_CODE (arg0) == ERROR_MARK
9756 || TREE_CODE (arg1) == ERROR_MARK
9757 || TREE_CODE (arg2) == ERROR_MARK)
9758 return NULL_TREE;
9759
9760 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9761 arg0, arg1, arg2))
9762 return ret;
9763
9764 switch (fcode)
9765 {
9766
9767 CASE_FLT_FN (BUILT_IN_SINCOS):
9768 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9769
9770 CASE_FLT_FN (BUILT_IN_REMQUO):
9771 if (validate_arg (arg0, REAL_TYPE)
9772 && validate_arg (arg1, REAL_TYPE)
9773 && validate_arg (arg2, POINTER_TYPE))
9774 return do_mpfr_remquo (arg0, arg1, arg2);
9775 break;
9776
9777 case BUILT_IN_MEMCMP:
9778 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9779
9780 case BUILT_IN_EXPECT:
9781 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9782
9783 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9784 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9785
9786 case BUILT_IN_ADD_OVERFLOW:
9787 case BUILT_IN_SUB_OVERFLOW:
9788 case BUILT_IN_MUL_OVERFLOW:
9789 case BUILT_IN_ADD_OVERFLOW_P:
9790 case BUILT_IN_SUB_OVERFLOW_P:
9791 case BUILT_IN_MUL_OVERFLOW_P:
9792 case BUILT_IN_SADD_OVERFLOW:
9793 case BUILT_IN_SADDL_OVERFLOW:
9794 case BUILT_IN_SADDLL_OVERFLOW:
9795 case BUILT_IN_SSUB_OVERFLOW:
9796 case BUILT_IN_SSUBL_OVERFLOW:
9797 case BUILT_IN_SSUBLL_OVERFLOW:
9798 case BUILT_IN_SMUL_OVERFLOW:
9799 case BUILT_IN_SMULL_OVERFLOW:
9800 case BUILT_IN_SMULLL_OVERFLOW:
9801 case BUILT_IN_UADD_OVERFLOW:
9802 case BUILT_IN_UADDL_OVERFLOW:
9803 case BUILT_IN_UADDLL_OVERFLOW:
9804 case BUILT_IN_USUB_OVERFLOW:
9805 case BUILT_IN_USUBL_OVERFLOW:
9806 case BUILT_IN_USUBLL_OVERFLOW:
9807 case BUILT_IN_UMUL_OVERFLOW:
9808 case BUILT_IN_UMULL_OVERFLOW:
9809 case BUILT_IN_UMULLL_OVERFLOW:
9810 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9811
9812 default:
9813 break;
9814 }
9815 return NULL_TREE;
9816 }
9817
9818 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9819 arguments. IGNORE is true if the result of the
9820 function call is ignored. This function returns NULL_TREE if no
9821 simplification was possible. */
9822
9823 tree
9824 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9825 {
9826 tree ret = NULL_TREE;
9827
9828 switch (nargs)
9829 {
9830 case 0:
9831 ret = fold_builtin_0 (loc, fndecl);
9832 break;
9833 case 1:
9834 ret = fold_builtin_1 (loc, fndecl, args[0]);
9835 break;
9836 case 2:
9837 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9838 break;
9839 case 3:
9840 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9841 break;
9842 default:
9843 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9844 break;
9845 }
9846 if (ret)
9847 {
9848 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9849 SET_EXPR_LOCATION (ret, loc);
9850 return ret;
9851 }
9852 return NULL_TREE;
9853 }
9854
9855 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9856 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9857 of arguments in ARGS to be omitted. OLDNARGS is the number of
9858 elements in ARGS. */
9859
9860 static tree
9861 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9862 int skip, tree fndecl, int n, va_list newargs)
9863 {
9864 int nargs = oldnargs - skip + n;
9865 tree *buffer;
9866
9867 if (n > 0)
9868 {
9869 int i, j;
9870
9871 buffer = XALLOCAVEC (tree, nargs);
9872 for (i = 0; i < n; i++)
9873 buffer[i] = va_arg (newargs, tree);
9874 for (j = skip; j < oldnargs; j++, i++)
9875 buffer[i] = args[j];
9876 }
9877 else
9878 buffer = args + skip;
9879
9880 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9881 }
9882
9883 /* Return true if FNDECL shouldn't be folded right now.
9884 If a built-in function has an inline attribute always_inline
9885 wrapper, defer folding it after always_inline functions have
9886 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9887 might not be performed. */
9888
9889 bool
9890 avoid_folding_inline_builtin (tree fndecl)
9891 {
9892 return (DECL_DECLARED_INLINE_P (fndecl)
9893 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9894 && cfun
9895 && !cfun->always_inline_functions_inlined
9896 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9897 }
9898
9899 /* A wrapper function for builtin folding that prevents warnings for
9900 "statement without effect" and the like, caused by removing the
9901 call node earlier than the warning is generated. */
9902
9903 tree
9904 fold_call_expr (location_t loc, tree exp, bool ignore)
9905 {
9906 tree ret = NULL_TREE;
9907 tree fndecl = get_callee_fndecl (exp);
9908 if (fndecl && fndecl_built_in_p (fndecl)
9909 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9910 yet. Defer folding until we see all the arguments
9911 (after inlining). */
9912 && !CALL_EXPR_VA_ARG_PACK (exp))
9913 {
9914 int nargs = call_expr_nargs (exp);
9915
9916 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9917 instead last argument is __builtin_va_arg_pack (). Defer folding
9918 even in that case, until arguments are finalized. */
9919 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9920 {
9921 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9922 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9923 return NULL_TREE;
9924 }
9925
9926 if (avoid_folding_inline_builtin (fndecl))
9927 return NULL_TREE;
9928
9929 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9930 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9931 CALL_EXPR_ARGP (exp), ignore);
9932 else
9933 {
9934 tree *args = CALL_EXPR_ARGP (exp);
9935 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9936 if (ret)
9937 return ret;
9938 }
9939 }
9940 return NULL_TREE;
9941 }
9942
9943 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9944 N arguments are passed in the array ARGARRAY. Return a folded
9945 expression or NULL_TREE if no simplification was possible. */
9946
9947 tree
9948 fold_builtin_call_array (location_t loc, tree,
9949 tree fn,
9950 int n,
9951 tree *argarray)
9952 {
9953 if (TREE_CODE (fn) != ADDR_EXPR)
9954 return NULL_TREE;
9955
9956 tree fndecl = TREE_OPERAND (fn, 0);
9957 if (TREE_CODE (fndecl) == FUNCTION_DECL
9958 && fndecl_built_in_p (fndecl))
9959 {
9960 /* If last argument is __builtin_va_arg_pack (), arguments to this
9961 function are not finalized yet. Defer folding until they are. */
9962 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9963 {
9964 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9965 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9966 return NULL_TREE;
9967 }
9968 if (avoid_folding_inline_builtin (fndecl))
9969 return NULL_TREE;
9970 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9971 return targetm.fold_builtin (fndecl, n, argarray, false);
9972 else
9973 return fold_builtin_n (loc, fndecl, argarray, n, false);
9974 }
9975
9976 return NULL_TREE;
9977 }
9978
9979 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9980 along with N new arguments specified as the "..." parameters. SKIP
9981 is the number of arguments in EXP to be omitted. This function is used
9982 to do varargs-to-varargs transformations. */
9983
9984 static tree
9985 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9986 {
9987 va_list ap;
9988 tree t;
9989
9990 va_start (ap, n);
9991 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9992 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9993 va_end (ap);
9994
9995 return t;
9996 }
9997
9998 /* Validate a single argument ARG against a tree code CODE representing
9999 a type. Return true when argument is valid. */
10000
10001 static bool
10002 validate_arg (const_tree arg, enum tree_code code)
10003 {
10004 if (!arg)
10005 return false;
10006 else if (code == POINTER_TYPE)
10007 return POINTER_TYPE_P (TREE_TYPE (arg));
10008 else if (code == INTEGER_TYPE)
10009 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10010 return code == TREE_CODE (TREE_TYPE (arg));
10011 }
10012
10013 /* This function validates the types of a function call argument list
10014 against a specified list of tree_codes. If the last specifier is a 0,
10015 that represents an ellipses, otherwise the last specifier must be a
10016 VOID_TYPE.
10017
10018 This is the GIMPLE version of validate_arglist. Eventually we want to
10019 completely convert builtins.c to work from GIMPLEs and the tree based
10020 validate_arglist will then be removed. */
10021
10022 bool
10023 validate_gimple_arglist (const gcall *call, ...)
10024 {
10025 enum tree_code code;
10026 bool res = 0;
10027 va_list ap;
10028 const_tree arg;
10029 size_t i;
10030
10031 va_start (ap, call);
10032 i = 0;
10033
10034 do
10035 {
10036 code = (enum tree_code) va_arg (ap, int);
10037 switch (code)
10038 {
10039 case 0:
10040 /* This signifies an ellipses, any further arguments are all ok. */
10041 res = true;
10042 goto end;
10043 case VOID_TYPE:
10044 /* This signifies an endlink, if no arguments remain, return
10045 true, otherwise return false. */
10046 res = (i == gimple_call_num_args (call));
10047 goto end;
10048 default:
10049 /* If no parameters remain or the parameter's code does not
10050 match the specified code, return false. Otherwise continue
10051 checking any remaining arguments. */
10052 arg = gimple_call_arg (call, i++);
10053 if (!validate_arg (arg, code))
10054 goto end;
10055 break;
10056 }
10057 }
10058 while (1);
10059
10060 /* We need gotos here since we can only have one VA_CLOSE in a
10061 function. */
10062 end: ;
10063 va_end (ap);
10064
10065 return res;
10066 }
10067
10068 /* Default target-specific builtin expander that does nothing. */
10069
10070 rtx
10071 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10072 rtx target ATTRIBUTE_UNUSED,
10073 rtx subtarget ATTRIBUTE_UNUSED,
10074 machine_mode mode ATTRIBUTE_UNUSED,
10075 int ignore ATTRIBUTE_UNUSED)
10076 {
10077 return NULL_RTX;
10078 }
10079
10080 /* Returns true is EXP represents data that would potentially reside
10081 in a readonly section. */
10082
10083 bool
10084 readonly_data_expr (tree exp)
10085 {
10086 STRIP_NOPS (exp);
10087
10088 if (TREE_CODE (exp) != ADDR_EXPR)
10089 return false;
10090
10091 exp = get_base_address (TREE_OPERAND (exp, 0));
10092 if (!exp)
10093 return false;
10094
10095 /* Make sure we call decl_readonly_section only for trees it
10096 can handle (since it returns true for everything it doesn't
10097 understand). */
10098 if (TREE_CODE (exp) == STRING_CST
10099 || TREE_CODE (exp) == CONSTRUCTOR
10100 || (VAR_P (exp) && TREE_STATIC (exp)))
10101 return decl_readonly_section (exp, 0);
10102 else
10103 return false;
10104 }
10105
10106 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10107 to the call, and TYPE is its return type.
10108
10109 Return NULL_TREE if no simplification was possible, otherwise return the
10110 simplified form of the call as a tree.
10111
10112 The simplified form may be a constant or other expression which
10113 computes the same value, but in a more efficient manner (including
10114 calls to other builtin functions).
10115
10116 The call may contain arguments which need to be evaluated, but
10117 which are not useful to determine the result of the call. In
10118 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10119 COMPOUND_EXPR will be an argument which must be evaluated.
10120 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10121 COMPOUND_EXPR in the chain will contain the tree for the simplified
10122 form of the builtin function call. */
10123
10124 static tree
10125 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10126 {
10127 if (!validate_arg (s1, POINTER_TYPE)
10128 || !validate_arg (s2, POINTER_TYPE))
10129 return NULL_TREE;
10130 else
10131 {
10132 tree fn;
10133 const char *p1, *p2;
10134
10135 p2 = c_getstr (s2);
10136 if (p2 == NULL)
10137 return NULL_TREE;
10138
10139 p1 = c_getstr (s1);
10140 if (p1 != NULL)
10141 {
10142 const char *r = strpbrk (p1, p2);
10143 tree tem;
10144
10145 if (r == NULL)
10146 return build_int_cst (TREE_TYPE (s1), 0);
10147
10148 /* Return an offset into the constant string argument. */
10149 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10150 return fold_convert_loc (loc, type, tem);
10151 }
10152
10153 if (p2[0] == '\0')
10154 /* strpbrk(x, "") == NULL.
10155 Evaluate and ignore s1 in case it had side-effects. */
10156 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10157
10158 if (p2[1] != '\0')
10159 return NULL_TREE; /* Really call strpbrk. */
10160
10161 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10162 if (!fn)
10163 return NULL_TREE;
10164
10165 /* New argument list transforming strpbrk(s1, s2) to
10166 strchr(s1, s2[0]). */
10167 return build_call_expr_loc (loc, fn, 2, s1,
10168 build_int_cst (integer_type_node, p2[0]));
10169 }
10170 }
10171
10172 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10173 to the call.
10174
10175 Return NULL_TREE if no simplification was possible, otherwise return the
10176 simplified form of the call as a tree.
10177
10178 The simplified form may be a constant or other expression which
10179 computes the same value, but in a more efficient manner (including
10180 calls to other builtin functions).
10181
10182 The call may contain arguments which need to be evaluated, but
10183 which are not useful to determine the result of the call. In
10184 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10185 COMPOUND_EXPR will be an argument which must be evaluated.
10186 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10187 COMPOUND_EXPR in the chain will contain the tree for the simplified
10188 form of the builtin function call. */
10189
10190 static tree
10191 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10192 {
10193 if (!validate_arg (s1, POINTER_TYPE)
10194 || !validate_arg (s2, POINTER_TYPE))
10195 return NULL_TREE;
10196 else
10197 {
10198 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10199
10200 /* If either argument is "", return NULL_TREE. */
10201 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10202 /* Evaluate and ignore both arguments in case either one has
10203 side-effects. */
10204 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10205 s1, s2);
10206 return NULL_TREE;
10207 }
10208 }
10209
10210 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10211 to the call.
10212
10213 Return NULL_TREE if no simplification was possible, otherwise return the
10214 simplified form of the call as a tree.
10215
10216 The simplified form may be a constant or other expression which
10217 computes the same value, but in a more efficient manner (including
10218 calls to other builtin functions).
10219
10220 The call may contain arguments which need to be evaluated, but
10221 which are not useful to determine the result of the call. In
10222 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10223 COMPOUND_EXPR will be an argument which must be evaluated.
10224 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10225 COMPOUND_EXPR in the chain will contain the tree for the simplified
10226 form of the builtin function call. */
10227
10228 static tree
10229 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10230 {
10231 if (!validate_arg (s1, POINTER_TYPE)
10232 || !validate_arg (s2, POINTER_TYPE))
10233 return NULL_TREE;
10234 else
10235 {
10236 /* If the first argument is "", return NULL_TREE. */
10237 const char *p1 = c_getstr (s1);
10238 if (p1 && *p1 == '\0')
10239 {
10240 /* Evaluate and ignore argument s2 in case it has
10241 side-effects. */
10242 return omit_one_operand_loc (loc, size_type_node,
10243 size_zero_node, s2);
10244 }
10245
10246 /* If the second argument is "", return __builtin_strlen(s1). */
10247 const char *p2 = c_getstr (s2);
10248 if (p2 && *p2 == '\0')
10249 {
10250 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10251
10252 /* If the replacement _DECL isn't initialized, don't do the
10253 transformation. */
10254 if (!fn)
10255 return NULL_TREE;
10256
10257 return build_call_expr_loc (loc, fn, 1, s1);
10258 }
10259 return NULL_TREE;
10260 }
10261 }
10262
10263 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10264 produced. False otherwise. This is done so that we don't output the error
10265 or warning twice or three times. */
10266
10267 bool
10268 fold_builtin_next_arg (tree exp, bool va_start_p)
10269 {
10270 tree fntype = TREE_TYPE (current_function_decl);
10271 int nargs = call_expr_nargs (exp);
10272 tree arg;
10273 /* There is good chance the current input_location points inside the
10274 definition of the va_start macro (perhaps on the token for
10275 builtin) in a system header, so warnings will not be emitted.
10276 Use the location in real source code. */
10277 location_t current_location =
10278 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10279 NULL);
10280
10281 if (!stdarg_p (fntype))
10282 {
10283 error ("%<va_start%> used in function with fixed arguments");
10284 return true;
10285 }
10286
10287 if (va_start_p)
10288 {
10289 if (va_start_p && (nargs != 2))
10290 {
10291 error ("wrong number of arguments to function %<va_start%>");
10292 return true;
10293 }
10294 arg = CALL_EXPR_ARG (exp, 1);
10295 }
10296 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10297 when we checked the arguments and if needed issued a warning. */
10298 else
10299 {
10300 if (nargs == 0)
10301 {
10302 /* Evidently an out of date version of <stdarg.h>; can't validate
10303 va_start's second argument, but can still work as intended. */
10304 warning_at (current_location,
10305 OPT_Wvarargs,
10306 "%<__builtin_next_arg%> called without an argument");
10307 return true;
10308 }
10309 else if (nargs > 1)
10310 {
10311 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10312 return true;
10313 }
10314 arg = CALL_EXPR_ARG (exp, 0);
10315 }
10316
10317 if (TREE_CODE (arg) == SSA_NAME)
10318 arg = SSA_NAME_VAR (arg);
10319
10320 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10321 or __builtin_next_arg (0) the first time we see it, after checking
10322 the arguments and if needed issuing a warning. */
10323 if (!integer_zerop (arg))
10324 {
10325 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10326
10327 /* Strip off all nops for the sake of the comparison. This
10328 is not quite the same as STRIP_NOPS. It does more.
10329 We must also strip off INDIRECT_EXPR for C++ reference
10330 parameters. */
10331 while (CONVERT_EXPR_P (arg)
10332 || TREE_CODE (arg) == INDIRECT_REF)
10333 arg = TREE_OPERAND (arg, 0);
10334 if (arg != last_parm)
10335 {
10336 /* FIXME: Sometimes with the tree optimizers we can get the
10337 not the last argument even though the user used the last
10338 argument. We just warn and set the arg to be the last
10339 argument so that we will get wrong-code because of
10340 it. */
10341 warning_at (current_location,
10342 OPT_Wvarargs,
10343 "second parameter of %<va_start%> not last named argument");
10344 }
10345
10346 /* Undefined by C99 7.15.1.4p4 (va_start):
10347 "If the parameter parmN is declared with the register storage
10348 class, with a function or array type, or with a type that is
10349 not compatible with the type that results after application of
10350 the default argument promotions, the behavior is undefined."
10351 */
10352 else if (DECL_REGISTER (arg))
10353 {
10354 warning_at (current_location,
10355 OPT_Wvarargs,
10356 "undefined behavior when second parameter of "
10357 "%<va_start%> is declared with %<register%> storage");
10358 }
10359
10360 /* We want to verify the second parameter just once before the tree
10361 optimizers are run and then avoid keeping it in the tree,
10362 as otherwise we could warn even for correct code like:
10363 void foo (int i, ...)
10364 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10365 if (va_start_p)
10366 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10367 else
10368 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10369 }
10370 return false;
10371 }
10372
10373
10374 /* Expand a call EXP to __builtin_object_size. */
10375
10376 static rtx
10377 expand_builtin_object_size (tree exp)
10378 {
10379 tree ost;
10380 int object_size_type;
10381 tree fndecl = get_callee_fndecl (exp);
10382
10383 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10384 {
10385 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10386 exp, fndecl);
10387 expand_builtin_trap ();
10388 return const0_rtx;
10389 }
10390
10391 ost = CALL_EXPR_ARG (exp, 1);
10392 STRIP_NOPS (ost);
10393
10394 if (TREE_CODE (ost) != INTEGER_CST
10395 || tree_int_cst_sgn (ost) < 0
10396 || compare_tree_int (ost, 3) > 0)
10397 {
10398 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10399 exp, fndecl);
10400 expand_builtin_trap ();
10401 return const0_rtx;
10402 }
10403
10404 object_size_type = tree_to_shwi (ost);
10405
10406 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10407 }
10408
10409 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10410 FCODE is the BUILT_IN_* to use.
10411 Return NULL_RTX if we failed; the caller should emit a normal call,
10412 otherwise try to get the result in TARGET, if convenient (and in
10413 mode MODE if that's convenient). */
10414
10415 static rtx
10416 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10417 enum built_in_function fcode)
10418 {
10419 if (!validate_arglist (exp,
10420 POINTER_TYPE,
10421 fcode == BUILT_IN_MEMSET_CHK
10422 ? INTEGER_TYPE : POINTER_TYPE,
10423 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10424 return NULL_RTX;
10425
10426 tree dest = CALL_EXPR_ARG (exp, 0);
10427 tree src = CALL_EXPR_ARG (exp, 1);
10428 tree len = CALL_EXPR_ARG (exp, 2);
10429 tree size = CALL_EXPR_ARG (exp, 3);
10430
10431 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10432 /*str=*/NULL_TREE, size);
10433
10434 if (!tree_fits_uhwi_p (size))
10435 return NULL_RTX;
10436
10437 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10438 {
10439 /* Avoid transforming the checking call to an ordinary one when
10440 an overflow has been detected or when the call couldn't be
10441 validated because the size is not constant. */
10442 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10443 return NULL_RTX;
10444
10445 tree fn = NULL_TREE;
10446 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10447 mem{cpy,pcpy,move,set} is available. */
10448 switch (fcode)
10449 {
10450 case BUILT_IN_MEMCPY_CHK:
10451 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10452 break;
10453 case BUILT_IN_MEMPCPY_CHK:
10454 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10455 break;
10456 case BUILT_IN_MEMMOVE_CHK:
10457 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10458 break;
10459 case BUILT_IN_MEMSET_CHK:
10460 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10461 break;
10462 default:
10463 break;
10464 }
10465
10466 if (! fn)
10467 return NULL_RTX;
10468
10469 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10470 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10471 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10472 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10473 }
10474 else if (fcode == BUILT_IN_MEMSET_CHK)
10475 return NULL_RTX;
10476 else
10477 {
10478 unsigned int dest_align = get_pointer_alignment (dest);
10479
10480 /* If DEST is not a pointer type, call the normal function. */
10481 if (dest_align == 0)
10482 return NULL_RTX;
10483
10484 /* If SRC and DEST are the same (and not volatile), do nothing. */
10485 if (operand_equal_p (src, dest, 0))
10486 {
10487 tree expr;
10488
10489 if (fcode != BUILT_IN_MEMPCPY_CHK)
10490 {
10491 /* Evaluate and ignore LEN in case it has side-effects. */
10492 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10493 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10494 }
10495
10496 expr = fold_build_pointer_plus (dest, len);
10497 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10498 }
10499
10500 /* __memmove_chk special case. */
10501 if (fcode == BUILT_IN_MEMMOVE_CHK)
10502 {
10503 unsigned int src_align = get_pointer_alignment (src);
10504
10505 if (src_align == 0)
10506 return NULL_RTX;
10507
10508 /* If src is categorized for a readonly section we can use
10509 normal __memcpy_chk. */
10510 if (readonly_data_expr (src))
10511 {
10512 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10513 if (!fn)
10514 return NULL_RTX;
10515 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10516 dest, src, len, size);
10517 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10518 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10519 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10520 }
10521 }
10522 return NULL_RTX;
10523 }
10524 }
10525
10526 /* Emit warning if a buffer overflow is detected at compile time. */
10527
10528 static void
10529 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10530 {
10531 /* The source string. */
10532 tree srcstr = NULL_TREE;
10533 /* The size of the destination object. */
10534 tree objsize = NULL_TREE;
10535 /* The string that is being concatenated with (as in __strcat_chk)
10536 or null if it isn't. */
10537 tree catstr = NULL_TREE;
10538 /* The maximum length of the source sequence in a bounded operation
10539 (such as __strncat_chk) or null if the operation isn't bounded
10540 (such as __strcat_chk). */
10541 tree maxread = NULL_TREE;
10542 /* The exact size of the access (such as in __strncpy_chk). */
10543 tree size = NULL_TREE;
10544
10545 switch (fcode)
10546 {
10547 case BUILT_IN_STRCPY_CHK:
10548 case BUILT_IN_STPCPY_CHK:
10549 srcstr = CALL_EXPR_ARG (exp, 1);
10550 objsize = CALL_EXPR_ARG (exp, 2);
10551 break;
10552
10553 case BUILT_IN_STRCAT_CHK:
10554 /* For __strcat_chk the warning will be emitted only if overflowing
10555 by at least strlen (dest) + 1 bytes. */
10556 catstr = CALL_EXPR_ARG (exp, 0);
10557 srcstr = CALL_EXPR_ARG (exp, 1);
10558 objsize = CALL_EXPR_ARG (exp, 2);
10559 break;
10560
10561 case BUILT_IN_STRNCAT_CHK:
10562 catstr = CALL_EXPR_ARG (exp, 0);
10563 srcstr = CALL_EXPR_ARG (exp, 1);
10564 maxread = CALL_EXPR_ARG (exp, 2);
10565 objsize = CALL_EXPR_ARG (exp, 3);
10566 break;
10567
10568 case BUILT_IN_STRNCPY_CHK:
10569 case BUILT_IN_STPNCPY_CHK:
10570 srcstr = CALL_EXPR_ARG (exp, 1);
10571 size = CALL_EXPR_ARG (exp, 2);
10572 objsize = CALL_EXPR_ARG (exp, 3);
10573 break;
10574
10575 case BUILT_IN_SNPRINTF_CHK:
10576 case BUILT_IN_VSNPRINTF_CHK:
10577 maxread = CALL_EXPR_ARG (exp, 1);
10578 objsize = CALL_EXPR_ARG (exp, 3);
10579 break;
10580 default:
10581 gcc_unreachable ();
10582 }
10583
10584 if (catstr && maxread)
10585 {
10586 /* Check __strncat_chk. There is no way to determine the length
10587 of the string to which the source string is being appended so
10588 just warn when the length of the source string is not known. */
10589 check_strncat_sizes (exp, objsize);
10590 return;
10591 }
10592
10593 /* The destination argument is the first one for all built-ins above. */
10594 tree dst = CALL_EXPR_ARG (exp, 0);
10595
10596 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10597 }
10598
10599 /* Emit warning if a buffer overflow is detected at compile time
10600 in __sprintf_chk/__vsprintf_chk calls. */
10601
10602 static void
10603 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10604 {
10605 tree size, len, fmt;
10606 const char *fmt_str;
10607 int nargs = call_expr_nargs (exp);
10608
10609 /* Verify the required arguments in the original call. */
10610
10611 if (nargs < 4)
10612 return;
10613 size = CALL_EXPR_ARG (exp, 2);
10614 fmt = CALL_EXPR_ARG (exp, 3);
10615
10616 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10617 return;
10618
10619 /* Check whether the format is a literal string constant. */
10620 fmt_str = c_getstr (fmt);
10621 if (fmt_str == NULL)
10622 return;
10623
10624 if (!init_target_chars ())
10625 return;
10626
10627 /* If the format doesn't contain % args or %%, we know its size. */
10628 if (strchr (fmt_str, target_percent) == 0)
10629 len = build_int_cstu (size_type_node, strlen (fmt_str));
10630 /* If the format is "%s" and first ... argument is a string literal,
10631 we know it too. */
10632 else if (fcode == BUILT_IN_SPRINTF_CHK
10633 && strcmp (fmt_str, target_percent_s) == 0)
10634 {
10635 tree arg;
10636
10637 if (nargs < 5)
10638 return;
10639 arg = CALL_EXPR_ARG (exp, 4);
10640 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10641 return;
10642
10643 len = c_strlen (arg, 1);
10644 if (!len || ! tree_fits_uhwi_p (len))
10645 return;
10646 }
10647 else
10648 return;
10649
10650 /* Add one for the terminating nul. */
10651 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10652
10653 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10654 /*maxread=*/NULL_TREE, len, size);
10655 }
10656
10657 /* Emit warning if a free is called with address of a variable. */
10658
10659 static void
10660 maybe_emit_free_warning (tree exp)
10661 {
10662 if (call_expr_nargs (exp) != 1)
10663 return;
10664
10665 tree arg = CALL_EXPR_ARG (exp, 0);
10666
10667 STRIP_NOPS (arg);
10668 if (TREE_CODE (arg) != ADDR_EXPR)
10669 return;
10670
10671 arg = get_base_address (TREE_OPERAND (arg, 0));
10672 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10673 return;
10674
10675 if (SSA_VAR_P (arg))
10676 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10677 "%Kattempt to free a non-heap object %qD", exp, arg);
10678 else
10679 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10680 "%Kattempt to free a non-heap object", exp);
10681 }
10682
10683 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10684 if possible. */
10685
10686 static tree
10687 fold_builtin_object_size (tree ptr, tree ost)
10688 {
10689 unsigned HOST_WIDE_INT bytes;
10690 int object_size_type;
10691
10692 if (!validate_arg (ptr, POINTER_TYPE)
10693 || !validate_arg (ost, INTEGER_TYPE))
10694 return NULL_TREE;
10695
10696 STRIP_NOPS (ost);
10697
10698 if (TREE_CODE (ost) != INTEGER_CST
10699 || tree_int_cst_sgn (ost) < 0
10700 || compare_tree_int (ost, 3) > 0)
10701 return NULL_TREE;
10702
10703 object_size_type = tree_to_shwi (ost);
10704
10705 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10706 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10707 and (size_t) 0 for types 2 and 3. */
10708 if (TREE_SIDE_EFFECTS (ptr))
10709 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10710
10711 if (TREE_CODE (ptr) == ADDR_EXPR)
10712 {
10713 compute_builtin_object_size (ptr, object_size_type, &bytes);
10714 if (wi::fits_to_tree_p (bytes, size_type_node))
10715 return build_int_cstu (size_type_node, bytes);
10716 }
10717 else if (TREE_CODE (ptr) == SSA_NAME)
10718 {
10719 /* If object size is not known yet, delay folding until
10720 later. Maybe subsequent passes will help determining
10721 it. */
10722 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10723 && wi::fits_to_tree_p (bytes, size_type_node))
10724 return build_int_cstu (size_type_node, bytes);
10725 }
10726
10727 return NULL_TREE;
10728 }
10729
10730 /* Builtins with folding operations that operate on "..." arguments
10731 need special handling; we need to store the arguments in a convenient
10732 data structure before attempting any folding. Fortunately there are
10733 only a few builtins that fall into this category. FNDECL is the
10734 function, EXP is the CALL_EXPR for the call. */
10735
10736 static tree
10737 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10738 {
10739 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10740 tree ret = NULL_TREE;
10741
10742 switch (fcode)
10743 {
10744 case BUILT_IN_FPCLASSIFY:
10745 ret = fold_builtin_fpclassify (loc, args, nargs);
10746 break;
10747
10748 default:
10749 break;
10750 }
10751 if (ret)
10752 {
10753 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10754 SET_EXPR_LOCATION (ret, loc);
10755 TREE_NO_WARNING (ret) = 1;
10756 return ret;
10757 }
10758 return NULL_TREE;
10759 }
10760
10761 /* Initialize format string characters in the target charset. */
10762
10763 bool
10764 init_target_chars (void)
10765 {
10766 static bool init;
10767 if (!init)
10768 {
10769 target_newline = lang_hooks.to_target_charset ('\n');
10770 target_percent = lang_hooks.to_target_charset ('%');
10771 target_c = lang_hooks.to_target_charset ('c');
10772 target_s = lang_hooks.to_target_charset ('s');
10773 if (target_newline == 0 || target_percent == 0 || target_c == 0
10774 || target_s == 0)
10775 return false;
10776
10777 target_percent_c[0] = target_percent;
10778 target_percent_c[1] = target_c;
10779 target_percent_c[2] = '\0';
10780
10781 target_percent_s[0] = target_percent;
10782 target_percent_s[1] = target_s;
10783 target_percent_s[2] = '\0';
10784
10785 target_percent_s_newline[0] = target_percent;
10786 target_percent_s_newline[1] = target_s;
10787 target_percent_s_newline[2] = target_newline;
10788 target_percent_s_newline[3] = '\0';
10789
10790 init = true;
10791 }
10792 return true;
10793 }
10794
10795 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10796 and no overflow/underflow occurred. INEXACT is true if M was not
10797 exactly calculated. TYPE is the tree type for the result. This
10798 function assumes that you cleared the MPFR flags and then
10799 calculated M to see if anything subsequently set a flag prior to
10800 entering this function. Return NULL_TREE if any checks fail. */
10801
10802 static tree
10803 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10804 {
10805 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10806 overflow/underflow occurred. If -frounding-math, proceed iff the
10807 result of calling FUNC was exact. */
10808 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10809 && (!flag_rounding_math || !inexact))
10810 {
10811 REAL_VALUE_TYPE rr;
10812
10813 real_from_mpfr (&rr, m, type, GMP_RNDN);
10814 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10815 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10816 but the mpft_t is not, then we underflowed in the
10817 conversion. */
10818 if (real_isfinite (&rr)
10819 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10820 {
10821 REAL_VALUE_TYPE rmode;
10822
10823 real_convert (&rmode, TYPE_MODE (type), &rr);
10824 /* Proceed iff the specified mode can hold the value. */
10825 if (real_identical (&rmode, &rr))
10826 return build_real (type, rmode);
10827 }
10828 }
10829 return NULL_TREE;
10830 }
10831
10832 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10833 number and no overflow/underflow occurred. INEXACT is true if M
10834 was not exactly calculated. TYPE is the tree type for the result.
10835 This function assumes that you cleared the MPFR flags and then
10836 calculated M to see if anything subsequently set a flag prior to
10837 entering this function. Return NULL_TREE if any checks fail, if
10838 FORCE_CONVERT is true, then bypass the checks. */
10839
10840 static tree
10841 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10842 {
10843 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10844 overflow/underflow occurred. If -frounding-math, proceed iff the
10845 result of calling FUNC was exact. */
10846 if (force_convert
10847 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10848 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10849 && (!flag_rounding_math || !inexact)))
10850 {
10851 REAL_VALUE_TYPE re, im;
10852
10853 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10854 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10855 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10856 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10857 but the mpft_t is not, then we underflowed in the
10858 conversion. */
10859 if (force_convert
10860 || (real_isfinite (&re) && real_isfinite (&im)
10861 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10862 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10863 {
10864 REAL_VALUE_TYPE re_mode, im_mode;
10865
10866 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10867 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10868 /* Proceed iff the specified mode can hold the value. */
10869 if (force_convert
10870 || (real_identical (&re_mode, &re)
10871 && real_identical (&im_mode, &im)))
10872 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10873 build_real (TREE_TYPE (type), im_mode));
10874 }
10875 }
10876 return NULL_TREE;
10877 }
10878
10879 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10880 the pointer *(ARG_QUO) and return the result. The type is taken
10881 from the type of ARG0 and is used for setting the precision of the
10882 calculation and results. */
10883
10884 static tree
10885 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10886 {
10887 tree const type = TREE_TYPE (arg0);
10888 tree result = NULL_TREE;
10889
10890 STRIP_NOPS (arg0);
10891 STRIP_NOPS (arg1);
10892
10893 /* To proceed, MPFR must exactly represent the target floating point
10894 format, which only happens when the target base equals two. */
10895 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10896 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10897 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10898 {
10899 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10900 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10901
10902 if (real_isfinite (ra0) && real_isfinite (ra1))
10903 {
10904 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10905 const int prec = fmt->p;
10906 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10907 tree result_rem;
10908 long integer_quo;
10909 mpfr_t m0, m1;
10910
10911 mpfr_inits2 (prec, m0, m1, NULL);
10912 mpfr_from_real (m0, ra0, GMP_RNDN);
10913 mpfr_from_real (m1, ra1, GMP_RNDN);
10914 mpfr_clear_flags ();
10915 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10916 /* Remquo is independent of the rounding mode, so pass
10917 inexact=0 to do_mpfr_ckconv(). */
10918 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10919 mpfr_clears (m0, m1, NULL);
10920 if (result_rem)
10921 {
10922 /* MPFR calculates quo in the host's long so it may
10923 return more bits in quo than the target int can hold
10924 if sizeof(host long) > sizeof(target int). This can
10925 happen even for native compilers in LP64 mode. In
10926 these cases, modulo the quo value with the largest
10927 number that the target int can hold while leaving one
10928 bit for the sign. */
10929 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10930 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10931
10932 /* Dereference the quo pointer argument. */
10933 arg_quo = build_fold_indirect_ref (arg_quo);
10934 /* Proceed iff a valid pointer type was passed in. */
10935 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10936 {
10937 /* Set the value. */
10938 tree result_quo
10939 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10940 build_int_cst (TREE_TYPE (arg_quo),
10941 integer_quo));
10942 TREE_SIDE_EFFECTS (result_quo) = 1;
10943 /* Combine the quo assignment with the rem. */
10944 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10945 result_quo, result_rem));
10946 }
10947 }
10948 }
10949 }
10950 return result;
10951 }
10952
10953 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10954 resulting value as a tree with type TYPE. The mpfr precision is
10955 set to the precision of TYPE. We assume that this mpfr function
10956 returns zero if the result could be calculated exactly within the
10957 requested precision. In addition, the integer pointer represented
10958 by ARG_SG will be dereferenced and set to the appropriate signgam
10959 (-1,1) value. */
10960
10961 static tree
10962 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10963 {
10964 tree result = NULL_TREE;
10965
10966 STRIP_NOPS (arg);
10967
10968 /* To proceed, MPFR must exactly represent the target floating point
10969 format, which only happens when the target base equals two. Also
10970 verify ARG is a constant and that ARG_SG is an int pointer. */
10971 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10972 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10973 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10974 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10975 {
10976 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10977
10978 /* In addition to NaN and Inf, the argument cannot be zero or a
10979 negative integer. */
10980 if (real_isfinite (ra)
10981 && ra->cl != rvc_zero
10982 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10983 {
10984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10985 const int prec = fmt->p;
10986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10987 int inexact, sg;
10988 mpfr_t m;
10989 tree result_lg;
10990
10991 mpfr_init2 (m, prec);
10992 mpfr_from_real (m, ra, GMP_RNDN);
10993 mpfr_clear_flags ();
10994 inexact = mpfr_lgamma (m, &sg, m, rnd);
10995 result_lg = do_mpfr_ckconv (m, type, inexact);
10996 mpfr_clear (m);
10997 if (result_lg)
10998 {
10999 tree result_sg;
11000
11001 /* Dereference the arg_sg pointer argument. */
11002 arg_sg = build_fold_indirect_ref (arg_sg);
11003 /* Assign the signgam value into *arg_sg. */
11004 result_sg = fold_build2 (MODIFY_EXPR,
11005 TREE_TYPE (arg_sg), arg_sg,
11006 build_int_cst (TREE_TYPE (arg_sg), sg));
11007 TREE_SIDE_EFFECTS (result_sg) = 1;
11008 /* Combine the signgam assignment with the lgamma result. */
11009 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11010 result_sg, result_lg));
11011 }
11012 }
11013 }
11014
11015 return result;
11016 }
11017
11018 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11019 mpc function FUNC on it and return the resulting value as a tree
11020 with type TYPE. The mpfr precision is set to the precision of
11021 TYPE. We assume that function FUNC returns zero if the result
11022 could be calculated exactly within the requested precision. If
11023 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11024 in the arguments and/or results. */
11025
11026 tree
11027 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11028 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11029 {
11030 tree result = NULL_TREE;
11031
11032 STRIP_NOPS (arg0);
11033 STRIP_NOPS (arg1);
11034
11035 /* To proceed, MPFR must exactly represent the target floating point
11036 format, which only happens when the target base equals two. */
11037 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11039 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11041 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11042 {
11043 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11044 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11045 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11046 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11047
11048 if (do_nonfinite
11049 || (real_isfinite (re0) && real_isfinite (im0)
11050 && real_isfinite (re1) && real_isfinite (im1)))
11051 {
11052 const struct real_format *const fmt =
11053 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11054 const int prec = fmt->p;
11055 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11056 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11057 int inexact;
11058 mpc_t m0, m1;
11059
11060 mpc_init2 (m0, prec);
11061 mpc_init2 (m1, prec);
11062 mpfr_from_real (mpc_realref (m0), re0, rnd);
11063 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11064 mpfr_from_real (mpc_realref (m1), re1, rnd);
11065 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11066 mpfr_clear_flags ();
11067 inexact = func (m0, m0, m1, crnd);
11068 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11069 mpc_clear (m0);
11070 mpc_clear (m1);
11071 }
11072 }
11073
11074 return result;
11075 }
11076
11077 /* A wrapper function for builtin folding that prevents warnings for
11078 "statement without effect" and the like, caused by removing the
11079 call node earlier than the warning is generated. */
11080
11081 tree
11082 fold_call_stmt (gcall *stmt, bool ignore)
11083 {
11084 tree ret = NULL_TREE;
11085 tree fndecl = gimple_call_fndecl (stmt);
11086 location_t loc = gimple_location (stmt);
11087 if (fndecl && fndecl_built_in_p (fndecl)
11088 && !gimple_call_va_arg_pack_p (stmt))
11089 {
11090 int nargs = gimple_call_num_args (stmt);
11091 tree *args = (nargs > 0
11092 ? gimple_call_arg_ptr (stmt, 0)
11093 : &error_mark_node);
11094
11095 if (avoid_folding_inline_builtin (fndecl))
11096 return NULL_TREE;
11097 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11098 {
11099 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11100 }
11101 else
11102 {
11103 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11104 if (ret)
11105 {
11106 /* Propagate location information from original call to
11107 expansion of builtin. Otherwise things like
11108 maybe_emit_chk_warning, that operate on the expansion
11109 of a builtin, will use the wrong location information. */
11110 if (gimple_has_location (stmt))
11111 {
11112 tree realret = ret;
11113 if (TREE_CODE (ret) == NOP_EXPR)
11114 realret = TREE_OPERAND (ret, 0);
11115 if (CAN_HAVE_LOCATION_P (realret)
11116 && !EXPR_HAS_LOCATION (realret))
11117 SET_EXPR_LOCATION (realret, loc);
11118 return realret;
11119 }
11120 return ret;
11121 }
11122 }
11123 }
11124 return NULL_TREE;
11125 }
11126
11127 /* Look up the function in builtin_decl that corresponds to DECL
11128 and set ASMSPEC as its user assembler name. DECL must be a
11129 function decl that declares a builtin. */
11130
11131 void
11132 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11133 {
11134 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11135 && asmspec != 0);
11136
11137 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11138 set_user_assembler_name (builtin, asmspec);
11139
11140 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11141 && INT_TYPE_SIZE < BITS_PER_WORD)
11142 {
11143 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11144 set_user_assembler_libfunc ("ffs", asmspec);
11145 set_optab_libfunc (ffs_optab, mode, "ffs");
11146 }
11147 }
11148
11149 /* Return true if DECL is a builtin that expands to a constant or similarly
11150 simple code. */
11151 bool
11152 is_simple_builtin (tree decl)
11153 {
11154 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11155 switch (DECL_FUNCTION_CODE (decl))
11156 {
11157 /* Builtins that expand to constants. */
11158 case BUILT_IN_CONSTANT_P:
11159 case BUILT_IN_EXPECT:
11160 case BUILT_IN_OBJECT_SIZE:
11161 case BUILT_IN_UNREACHABLE:
11162 /* Simple register moves or loads from stack. */
11163 case BUILT_IN_ASSUME_ALIGNED:
11164 case BUILT_IN_RETURN_ADDRESS:
11165 case BUILT_IN_EXTRACT_RETURN_ADDR:
11166 case BUILT_IN_FROB_RETURN_ADDR:
11167 case BUILT_IN_RETURN:
11168 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11169 case BUILT_IN_FRAME_ADDRESS:
11170 case BUILT_IN_VA_END:
11171 case BUILT_IN_STACK_SAVE:
11172 case BUILT_IN_STACK_RESTORE:
11173 /* Exception state returns or moves registers around. */
11174 case BUILT_IN_EH_FILTER:
11175 case BUILT_IN_EH_POINTER:
11176 case BUILT_IN_EH_COPY_VALUES:
11177 return true;
11178
11179 default:
11180 return false;
11181 }
11182
11183 return false;
11184 }
11185
11186 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11187 most probably expanded inline into reasonably simple code. This is a
11188 superset of is_simple_builtin. */
11189 bool
11190 is_inexpensive_builtin (tree decl)
11191 {
11192 if (!decl)
11193 return false;
11194 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11195 return true;
11196 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11197 switch (DECL_FUNCTION_CODE (decl))
11198 {
11199 case BUILT_IN_ABS:
11200 CASE_BUILT_IN_ALLOCA:
11201 case BUILT_IN_BSWAP16:
11202 case BUILT_IN_BSWAP32:
11203 case BUILT_IN_BSWAP64:
11204 case BUILT_IN_CLZ:
11205 case BUILT_IN_CLZIMAX:
11206 case BUILT_IN_CLZL:
11207 case BUILT_IN_CLZLL:
11208 case BUILT_IN_CTZ:
11209 case BUILT_IN_CTZIMAX:
11210 case BUILT_IN_CTZL:
11211 case BUILT_IN_CTZLL:
11212 case BUILT_IN_FFS:
11213 case BUILT_IN_FFSIMAX:
11214 case BUILT_IN_FFSL:
11215 case BUILT_IN_FFSLL:
11216 case BUILT_IN_IMAXABS:
11217 case BUILT_IN_FINITE:
11218 case BUILT_IN_FINITEF:
11219 case BUILT_IN_FINITEL:
11220 case BUILT_IN_FINITED32:
11221 case BUILT_IN_FINITED64:
11222 case BUILT_IN_FINITED128:
11223 case BUILT_IN_FPCLASSIFY:
11224 case BUILT_IN_ISFINITE:
11225 case BUILT_IN_ISINF_SIGN:
11226 case BUILT_IN_ISINF:
11227 case BUILT_IN_ISINFF:
11228 case BUILT_IN_ISINFL:
11229 case BUILT_IN_ISINFD32:
11230 case BUILT_IN_ISINFD64:
11231 case BUILT_IN_ISINFD128:
11232 case BUILT_IN_ISNAN:
11233 case BUILT_IN_ISNANF:
11234 case BUILT_IN_ISNANL:
11235 case BUILT_IN_ISNAND32:
11236 case BUILT_IN_ISNAND64:
11237 case BUILT_IN_ISNAND128:
11238 case BUILT_IN_ISNORMAL:
11239 case BUILT_IN_ISGREATER:
11240 case BUILT_IN_ISGREATEREQUAL:
11241 case BUILT_IN_ISLESS:
11242 case BUILT_IN_ISLESSEQUAL:
11243 case BUILT_IN_ISLESSGREATER:
11244 case BUILT_IN_ISUNORDERED:
11245 case BUILT_IN_VA_ARG_PACK:
11246 case BUILT_IN_VA_ARG_PACK_LEN:
11247 case BUILT_IN_VA_COPY:
11248 case BUILT_IN_TRAP:
11249 case BUILT_IN_SAVEREGS:
11250 case BUILT_IN_POPCOUNTL:
11251 case BUILT_IN_POPCOUNTLL:
11252 case BUILT_IN_POPCOUNTIMAX:
11253 case BUILT_IN_POPCOUNT:
11254 case BUILT_IN_PARITYL:
11255 case BUILT_IN_PARITYLL:
11256 case BUILT_IN_PARITYIMAX:
11257 case BUILT_IN_PARITY:
11258 case BUILT_IN_LABS:
11259 case BUILT_IN_LLABS:
11260 case BUILT_IN_PREFETCH:
11261 case BUILT_IN_ACC_ON_DEVICE:
11262 return true;
11263
11264 default:
11265 return is_simple_builtin (decl);
11266 }
11267
11268 return false;
11269 }
11270
11271 /* Return true if T is a constant and the value cast to a target char
11272 can be represented by a host char.
11273 Store the casted char constant in *P if so. */
11274
11275 bool
11276 target_char_cst_p (tree t, char *p)
11277 {
11278 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11279 return false;
11280
11281 *p = (char)tree_to_uhwi (t);
11282 return true;
11283 }
11284
11285 /* Return true if the builtin DECL is implemented in a standard library.
11286 Otherwise returns false which doesn't guarantee it is not (thus the list of
11287 handled builtins below may be incomplete). */
11288
11289 bool
11290 builtin_with_linkage_p (tree decl)
11291 {
11292 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11293 switch (DECL_FUNCTION_CODE (decl))
11294 {
11295 CASE_FLT_FN (BUILT_IN_ACOS):
11296 CASE_FLT_FN (BUILT_IN_ACOSH):
11297 CASE_FLT_FN (BUILT_IN_ASIN):
11298 CASE_FLT_FN (BUILT_IN_ASINH):
11299 CASE_FLT_FN (BUILT_IN_ATAN):
11300 CASE_FLT_FN (BUILT_IN_ATANH):
11301 CASE_FLT_FN (BUILT_IN_ATAN2):
11302 CASE_FLT_FN (BUILT_IN_CBRT):
11303 CASE_FLT_FN (BUILT_IN_CEIL):
11304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11305 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11306 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11307 CASE_FLT_FN (BUILT_IN_COS):
11308 CASE_FLT_FN (BUILT_IN_COSH):
11309 CASE_FLT_FN (BUILT_IN_ERF):
11310 CASE_FLT_FN (BUILT_IN_ERFC):
11311 CASE_FLT_FN (BUILT_IN_EXP):
11312 CASE_FLT_FN (BUILT_IN_EXP2):
11313 CASE_FLT_FN (BUILT_IN_EXPM1):
11314 CASE_FLT_FN (BUILT_IN_FABS):
11315 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11316 CASE_FLT_FN (BUILT_IN_FDIM):
11317 CASE_FLT_FN (BUILT_IN_FLOOR):
11318 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11319 CASE_FLT_FN (BUILT_IN_FMA):
11320 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11321 CASE_FLT_FN (BUILT_IN_FMAX):
11322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11323 CASE_FLT_FN (BUILT_IN_FMIN):
11324 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11325 CASE_FLT_FN (BUILT_IN_FMOD):
11326 CASE_FLT_FN (BUILT_IN_FREXP):
11327 CASE_FLT_FN (BUILT_IN_HYPOT):
11328 CASE_FLT_FN (BUILT_IN_ILOGB):
11329 CASE_FLT_FN (BUILT_IN_LDEXP):
11330 CASE_FLT_FN (BUILT_IN_LGAMMA):
11331 CASE_FLT_FN (BUILT_IN_LLRINT):
11332 CASE_FLT_FN (BUILT_IN_LLROUND):
11333 CASE_FLT_FN (BUILT_IN_LOG):
11334 CASE_FLT_FN (BUILT_IN_LOG10):
11335 CASE_FLT_FN (BUILT_IN_LOG1P):
11336 CASE_FLT_FN (BUILT_IN_LOG2):
11337 CASE_FLT_FN (BUILT_IN_LOGB):
11338 CASE_FLT_FN (BUILT_IN_LRINT):
11339 CASE_FLT_FN (BUILT_IN_LROUND):
11340 CASE_FLT_FN (BUILT_IN_MODF):
11341 CASE_FLT_FN (BUILT_IN_NAN):
11342 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11343 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11344 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11345 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11346 CASE_FLT_FN (BUILT_IN_POW):
11347 CASE_FLT_FN (BUILT_IN_REMAINDER):
11348 CASE_FLT_FN (BUILT_IN_REMQUO):
11349 CASE_FLT_FN (BUILT_IN_RINT):
11350 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11351 CASE_FLT_FN (BUILT_IN_ROUND):
11352 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11353 CASE_FLT_FN (BUILT_IN_SCALBLN):
11354 CASE_FLT_FN (BUILT_IN_SCALBN):
11355 CASE_FLT_FN (BUILT_IN_SIN):
11356 CASE_FLT_FN (BUILT_IN_SINH):
11357 CASE_FLT_FN (BUILT_IN_SINCOS):
11358 CASE_FLT_FN (BUILT_IN_SQRT):
11359 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11360 CASE_FLT_FN (BUILT_IN_TAN):
11361 CASE_FLT_FN (BUILT_IN_TANH):
11362 CASE_FLT_FN (BUILT_IN_TGAMMA):
11363 CASE_FLT_FN (BUILT_IN_TRUNC):
11364 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11365 return true;
11366 default:
11367 break;
11368 }
11369 return false;
11370 }