]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
[Ada] Two typo fixes
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
95
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
98
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode,
131 bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
175
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194
195 /* Return true if NAME starts with __builtin_ or __sync_. */
196
197 static bool
198 is_builtin_name (const char *name)
199 {
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
207 }
208
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
212
213 bool
214 called_as_built_in (tree node)
215 {
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
221 }
222
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
227
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
233 whereas foo() itself starts on an even address.
234
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
237
238 static bool
239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 {
242 poly_int64 bitsize, bitpos;
243 tree offset;
244 machine_mode mode;
245 int unsignedp, reversep, volatilep;
246 unsigned int align = BITS_PER_UNIT;
247 bool known_alignment = false;
248
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 &unsignedp, &reversep, &volatilep);
253
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
256 if (TREE_CODE (exp) == FUNCTION_DECL)
257 {
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
264 }
265 else if (TREE_CODE (exp) == LABEL_DECL)
266 ;
267 else if (TREE_CODE (exp) == CONST_DECL)
268 {
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
271 align = TYPE_ALIGN (TREE_TYPE (exp));
272 if (CONSTANT_CLASS_P (exp))
273 align = targetm.constant_alignment (exp, align);
274
275 known_alignment = true;
276 }
277 else if (DECL_P (exp))
278 {
279 align = DECL_ALIGN (exp);
280 known_alignment = true;
281 }
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
285 {
286 tree addr = TREE_OPERAND (exp, 0);
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290
291 /* If the address is explicitely aligned, handle that. */
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 {
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
297 align = least_bit_hwi (ptr_bitmask);
298 addr = TREE_OPERAND (addr, 0);
299 }
300
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 align = MAX (ptr_align, align);
304
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
307
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
310 if (TREE_CODE (exp) == TARGET_MEM_REF)
311 {
312 if (TMR_INDEX (exp))
313 {
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 }
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
322 }
323
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
327 alignment knowledge and if using that alignment would
328 improve the situation. */
329 unsigned int talign;
330 if (!addr_p && !known_alignment
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
334 else
335 {
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 }
342 }
343 else if (TREE_CODE (exp) == STRING_CST)
344 {
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 if (CONSTANT_CLASS_P (exp))
349 align = targetm.constant_alignment (exp, align);
350
351 known_alignment = true;
352 }
353
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
356 if (offset)
357 {
358 unsigned int trailing_zeros = tree_ctz (offset);
359 if (trailing_zeros < HOST_BITS_PER_INT)
360 {
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
364 }
365 }
366
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
371 {
372 align = alt_align;
373 known_alignment = false;
374 }
375
376 *alignp = align;
377 *bitposp = bitpos.coeffs[0] & (align - 1);
378 return known_alignment;
379 }
380
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389 {
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392
393 /* Return the alignment in bits of EXP, an object. */
394
395 unsigned int
396 get_object_alignment (tree exp)
397 {
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
401 get_object_alignment_1 (exp, &align, &bitpos);
402
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
409 }
410
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
415
416 If EXP is not a pointer, false is returned too. */
417
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
421 {
422 STRIP_NOPS (exp);
423
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 {
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
506
507 return align;
508 }
509
510 /* Return the number of leading non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513
514 unsigned
515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 {
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518
519 unsigned n;
520
521 if (eltsize == 1)
522 {
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
525 {
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
529 }
530 }
531 else
532 {
533 for (n = 0; n < maxelts; n++)
534 {
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
538 }
539 }
540 return n;
541 }
542
543 /* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
546
547 void
548 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
549 {
550 if (TREE_NO_WARNING (arg))
551 return;
552
553 loc = expansion_point_location_if_in_system_header (loc);
554
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
557 {
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
561 }
562 }
563
564 /* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
569
570 bool
571 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
572 {
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
578
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
582
583 if (bound)
584 {
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
589 {
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
593 }
594
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
597 }
598
599 if (expr && !TREE_NO_WARNING (expr))
600 {
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
604 }
605
606 return false;
607 }
608
609 /* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
614
615 tree
616 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
617 {
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
623 {
624 if (size)
625 {
626 len = lendata.minlen;
627 if (lendata.off)
628 {
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
632 *exact = true;
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
635 {
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
638 tree temp = TREE_OPERAND (lendata.off, 1);
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
641 }
642 else
643 *exact = false;
644 }
645 else
646 *exact = true;
647
648 *size = len;
649 }
650 return lendata.decl;
651 }
652
653 return NULL_TREE;
654 }
655
656 /* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
661
662 ONLY_VALUE should be nonzero if the result is not going to be emitted
663 into the instruction stream and zero if it is going to be expanded.
664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
665 is returned, otherwise NULL, since
666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
667 evaluate the side-effects.
668
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
672
673 Additional information about the string accessed may be recorded
674 in DATA. For example, if ARG references an unterminated string,
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
679
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
682
683 The value returned is of type `ssizetype'. */
684
685 tree
686 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
687 {
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
691 c_strlen_data local_strlen_data = { };
692 if (!data)
693 data = &local_strlen_data;
694
695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
696
697 tree src = STRIP_NOPS (arg);
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
700 {
701 tree len1, len2;
702
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
705 if (tree_int_cst_equal (len1, len2))
706 return len1;
707 }
708
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
712
713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
714
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
717 tree memsize;
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
720 if (src == 0)
721 return NULL_TREE;
722
723 /* Determine the size of the string element. */
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
726
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
734 strelts = strelts / eltsize;
735
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
738
739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
740
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
744
745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
746 {
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
750
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
754 start searching for it. */
755 unsigned len = string_length (ptr, eltsize, strelts);
756
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
760 if (len + 1 < strelts)
761 return NULL_TREE;
762 else if (len >= maxelts)
763 {
764 data->decl = decl;
765 data->off = byteoff;
766 data->minlen = ssize_int (len);
767 return NULL_TREE;
768 }
769
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
773
774 /* We don't know the starting offset, but we do know that the string
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
789 }
790
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
793
794 /* We have a known offset into the string. Start searching there for
795 a null character if we can represent it as a single HOST_WIDE_INT. */
796 if (byteoff == 0)
797 eltoff = 0;
798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
799 eltoff = -1;
800 else
801 eltoff = tree_to_uhwi (byteoff) / eltsize;
802
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
805 if (eltoff < 0 || eltoff >= maxelts)
806 {
807 /* Suppress multiple warnings for propagated constant strings. */
808 if (only_value != 2
809 && !TREE_NO_WARNING (arg)
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
813 {
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
817 }
818 return NULL_TREE;
819 }
820
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
825
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
829
830 Since ELTOFF is our starting index into the string, no further
831 calculation is needed. */
832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
833 strelts - eltoff);
834
835 /* Don't know what to return if there was no zero termination.
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
838 if (len >= maxelts - eltoff)
839 {
840 data->decl = decl;
841 data->off = byteoff;
842 data->minlen = ssize_int (len);
843 return NULL_TREE;
844 }
845
846 return ssize_int (len);
847 }
848
849 /* Return a constant integer corresponding to target reading
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
854
855 rtx
856 c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
858 {
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
862
863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
866
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
870
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
873 {
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
881
882 if (ch || !null_terminated_p)
883 ch = (unsigned char) str[i];
884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
885 }
886
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
889 }
890
891 /* Cast a target constant CST to target CHAR and if that value fits into
892 host char type, return zero and put that value into variable pointed to by
893 P. */
894
895 static int
896 target_char_cast (tree cst, char *p)
897 {
898 unsigned HOST_WIDE_INT val, hostval;
899
900 if (TREE_CODE (cst) != INTEGER_CST
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
903
904 /* Do not care if it fits or not right here. */
905 val = TREE_INT_CST_LOW (cst);
906
907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
909
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
913
914 if (val != hostval)
915 return 1;
916
917 *p = hostval;
918 return 0;
919 }
920
921 /* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
924
925 static tree
926 builtin_save_expr (tree exp)
927 {
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
932 return exp;
933
934 return save_expr (exp);
935 }
936
937 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
940
941 static rtx
942 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
943 {
944 int i;
945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
946 if (tem == NULL_RTX)
947 {
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
952
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
960 {
961 tem = hard_frame_pointer_rtx;
962
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
965 }
966 }
967
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
970
971 /* On the SPARC, the return address is not in the frame, it is in a
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
976 count--;
977
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
980 {
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
984 tem = memory_address (Pmode, tem);
985 tem = gen_frame_mem (Pmode, tem);
986 tem = copy_to_reg (tem);
987 }
988
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
992 return FRAME_ADDR_RTX (tem);
993
994 /* For __builtin_return_address, get the return address from that frame. */
995 #ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997 #else
998 tem = memory_address (Pmode,
999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1000 tem = gen_frame_mem (Pmode, tem);
1001 #endif
1002 return tem;
1003 }
1004
1005 /* Alias set used for setjmp buffer. */
1006 static alias_set_type setjmp_alias_set = -1;
1007
1008 /* Construct the leading half of a __builtin_setjmp call. Control will
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
1011
1012 void
1013 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1014 {
1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 rtx stack_save;
1017 rtx mem;
1018
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1021
1022 buf_addr = convert_memory_address (Pmode, buf_addr);
1023
1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1025
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
1029
1030 mem = gen_rtx_MEM (Pmode, buf_addr);
1031 set_mem_alias_set (mem, setjmp_alias_set);
1032 emit_move_insn (mem, hard_frame_pointer_rtx);
1033
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
1036 set_mem_alias_set (mem, setjmp_alias_set);
1037
1038 emit_move_insn (validize_mem (mem),
1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1040
1041 stack_save = gen_rtx_MEM (sa_mode,
1042 plus_constant (Pmode, buf_addr,
1043 2 * GET_MODE_SIZE (Pmode)));
1044 set_mem_alias_set (stack_save, setjmp_alias_set);
1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1046
1047 /* If there is further processing to do, do it. */
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1050
1051 /* We have a nonlocal label. */
1052 cfun->has_nonlocal_label = 1;
1053 }
1054
1055 /* Construct the trailing part of a __builtin_setjmp call. This is
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1058
1059 void
1060 expand_builtin_setjmp_receiver (rtx receiver_label)
1061 {
1062 rtx chain;
1063
1064 /* Mark the FP as used when we get here, so we have to make sure it's
1065 marked as used by this function. */
1066 emit_use (hard_frame_pointer_rtx);
1067
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
1070 chain = rtx_for_static_chain (current_function_decl, true);
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
1073
1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1075 {
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
1081 size_t i;
1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1083
1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1088
1089 if (i == ARRAY_SIZE (elim_regs))
1090 {
1091 /* Now restore our arg pointer from the address at which it
1092 was saved in our stack frame. */
1093 emit_move_insn (crtl->args.internal_arg_pointer,
1094 copy_to_reg (get_arg_pointer_save_area ()));
1095 }
1096 }
1097
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1102 else
1103 { /* Nothing */ }
1104
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
1107 happen immediately, not later. */
1108 emit_insn (gen_blockage ());
1109 }
1110
1111 /* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
1114 the code below is copied from the handling of non-local gotos. */
1115
1116 static void
1117 expand_builtin_longjmp (rtx buf_addr, rtx value)
1118 {
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1122
1123 /* DRAP is needed for stack realign if longjmp is expanded to current
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1127
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1130
1131 buf_addr = convert_memory_address (Pmode, buf_addr);
1132
1133 buf_addr = force_reg (Pmode, buf_addr);
1134
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
1137 gcc_assert (value == const1_rtx);
1138
1139 last = get_last_insn ();
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1142 else
1143 {
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1146 GET_MODE_SIZE (Pmode)));
1147
1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1149 2 * GET_MODE_SIZE (Pmode)));
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
1153
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
1156 if (targetm.have_nonlocal_goto ())
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1161 else
1162 {
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1165
1166 lab = copy_to_reg (lab);
1167
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
1171 emit_stack_restore (SAVE_NONLOCAL, stack);
1172
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
1177 emit_move_insn (hard_frame_pointer_rtx, fp);
1178
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
1181 emit_indirect_jump (lab);
1182 }
1183 }
1184
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1191 {
1192 gcc_assert (insn != last);
1193
1194 if (JUMP_P (insn))
1195 {
1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 break;
1198 }
1199 else if (CALL_P (insn))
1200 break;
1201 }
1202 }
1203
1204 static inline bool
1205 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1206 {
1207 return (iter->i < iter->n);
1208 }
1209
1210 /* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
1212 that represents an ellipsis, otherwise the last specifier must be a
1213 VOID_TYPE. */
1214
1215 static bool
1216 validate_arglist (const_tree callexpr, ...)
1217 {
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1223
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1226
1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1230
1231 for (unsigned argno = 1; ; ++argno)
1232 {
1233 code = (enum tree_code) va_arg (ap, int);
1234
1235 switch (code)
1236 {
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1252 {
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1257 }
1258 /* FALLTHRU */
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
1264 if (!validate_arg (arg, code))
1265 goto end;
1266 break;
1267 }
1268 }
1269
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1274
1275 BITMAP_FREE (argmap);
1276
1277 return res;
1278 }
1279
1280 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1282
1283 static rtx
1284 expand_builtin_nonlocal_goto (tree exp)
1285 {
1286 tree t_label, t_save_area;
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
1289
1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1291 return NULL_RTX;
1292
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
1295
1296 r_label = expand_normal (t_label);
1297 r_label = convert_memory_address (Pmode, r_label);
1298 r_save_area = expand_normal (t_save_area);
1299 r_save_area = convert_memory_address (Pmode, r_save_area);
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
1302 r_save_area = copy_to_reg (r_save_area);
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
1307
1308 crtl->has_nonlocal_goto = 1;
1309
1310 /* ??? We no longer need to pass the static chain value, afaik. */
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1313 else
1314 {
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1317
1318 r_label = copy_to_reg (r_label);
1319
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1324
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1330
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
1335
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1347 emit_use (pic_offset_table_rtx);
1348
1349 emit_indirect_jump (r_label);
1350 }
1351
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1355 {
1356 if (JUMP_P (insn))
1357 {
1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1359 break;
1360 }
1361 else if (CALL_P (insn))
1362 break;
1363 }
1364
1365 return const0_rtx;
1366 }
1367
1368 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
1372
1373 void
1374 expand_builtin_update_setjmp_buf (rtx buf_addr)
1375 {
1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1377 buf_addr = convert_memory_address (Pmode, buf_addr);
1378 rtx stack_save
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
1384
1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1386 }
1387
1388 /* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1391
1392 static void
1393 expand_builtin_prefetch (tree exp)
1394 {
1395 tree arg0, arg1, arg2;
1396 int nargs;
1397 rtx op0, op1, op2;
1398
1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
1400 return;
1401
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1403
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
1410 else
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
1415 arg2 = integer_three_node;
1416
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1419
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1422 {
1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
1424 arg1 = integer_zero_node;
1425 }
1426 op1 = expand_normal (arg1);
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1429 {
1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1431 " using zero");
1432 op1 = const0_rtx;
1433 }
1434
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1437 {
1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
1439 arg2 = integer_zero_node;
1440 }
1441 op2 = expand_normal (arg2);
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1444 {
1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1446 op2 = const0_rtx;
1447 }
1448
1449 if (targetm.have_prefetch ())
1450 {
1451 class expand_operand ops[3];
1452
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1457 return;
1458 }
1459
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
1462 if (!MEM_P (op0) && side_effects_p (op0))
1463 emit_insn (op0);
1464 }
1465
1466 /* Get a MEM rtx for expression EXP which is the address of an operand
1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
1470
1471 static rtx
1472 get_memory_rtx (tree exp, tree len)
1473 {
1474 tree orig_exp = exp;
1475 rtx addr, mem;
1476
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1481
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1484
1485 /* Get an expression we can use to find the attributes to assign to MEM.
1486 First remove any nops. */
1487 while (CONVERT_EXPR_P (exp)
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1490
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1498
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
1507 {
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
1515 set_mem_attributes (mem, exp, 0);
1516 }
1517 set_mem_alias_set (mem, 0);
1518 return mem;
1519 }
1520 \f
1521 /* Built-in functions to perform an untyped call and return. */
1522
1523 #define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525 #define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
1527
1528 /* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1530
1531 static int
1532 apply_args_size (void)
1533 {
1534 static int size = -1;
1535 int align;
1536 unsigned int regno;
1537
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1540 {
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1543
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1547 size += GET_MODE_SIZE (Pmode);
1548
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1551 {
1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1553
1554 gcc_assert (mode != VOIDmode);
1555
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1561 }
1562 else
1563 {
1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1565 }
1566 }
1567 return size;
1568 }
1569
1570 /* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1572
1573 static int
1574 apply_result_size (void)
1575 {
1576 static int size = -1;
1577 int align, regno;
1578
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1581 {
1582 size = 0;
1583
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if (targetm.calls.function_value_regno_p (regno))
1586 {
1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1588
1589 gcc_assert (mode != VOIDmode);
1590
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1596 }
1597 else
1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1599
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602 #ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604 #endif
1605 }
1606 return size;
1607 }
1608
1609 /* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1612
1613 static rtx
1614 result_vector (int savep, rtx result)
1615 {
1616 int regno, size, align, nelts;
1617 fixed_size_mode mode;
1618 rtx reg, mem;
1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1620
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1624 {
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1629 mem = adjust_address (result, mode, size);
1630 savevec[nelts++] = (savep
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
1633 size += GET_MODE_SIZE (mode);
1634 }
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1636 }
1637
1638 /* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1640
1641 static rtx
1642 expand_builtin_apply_args_1 (void)
1643 {
1644 rtx registers, tem;
1645 int size, align, regno;
1646 fixed_size_mode mode;
1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1648
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1667
1668 emit_move_insn (adjust_address (registers, mode, size), tem);
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Save the arg pointer to the block. */
1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1674 /* We need the pointer as the caller actually passed them to us, not
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
1677 if (STACK_GROWS_DOWNWARD)
1678 tem
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1683
1684 size = GET_MODE_SIZE (Pmode);
1685
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 if (struct_incoming_value)
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
1691
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1694 }
1695
1696 /* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
1702
1703 static rtx
1704 expand_builtin_apply_args (void)
1705 {
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1710 {
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
1715
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
1718 rtx_insn *seq = get_insns ();
1719 end_sequence ();
1720
1721 apply_args_value = temp;
1722
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
1725 chain current, so the code is placed at the start of the
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
1729 push_topmost_sequence ();
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1735 pop_topmost_sequence ();
1736 return temp;
1737 }
1738 }
1739
1740 /* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1742
1743 static rtx
1744 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1745 {
1746 int size, align, regno;
1747 fixed_size_mode mode;
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1753
1754 arguments = convert_memory_address (Pmode, arguments);
1755
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1758
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
1765
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
1769 do_pending_stack_adjust ();
1770 NO_DEFER_POP;
1771
1772 /* Save the stack with nonlocal if available. */
1773 if (targetm.have_save_stack_nonlocal ())
1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1775 else
1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1777
1778 /* Allocate a block of memory onto the stack and copy the memory
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1783
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1790
1791 dest = virtual_outgoing_args_rtx;
1792 if (!STACK_GROWS_DOWNWARD)
1793 {
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1798 }
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1804
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
1808 set_mem_align (arguments, PARM_BOUNDARY);
1809
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
1812 if (struct_value)
1813 size += GET_MODE_SIZE (Pmode);
1814
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1819 {
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1827 }
1828
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
1832 if (struct_value)
1833 {
1834 rtx value = gen_reg_rtx (Pmode);
1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1836 emit_move_insn (struct_value, value);
1837 if (REG_P (struct_value))
1838 use_reg (&call_fusage, struct_value);
1839 }
1840
1841 /* All arguments and registers used for the call are set up by now! */
1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1843
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1849
1850 /* Generate the actual call instruction and save the return value. */
1851 if (targetm.have_untyped_call ())
1852 {
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1856 }
1857 else if (targetm.have_call_value ())
1858 {
1859 rtx valreg = 0;
1860
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1867 {
1868 gcc_assert (!valreg); /* have_untyped_call required. */
1869
1870 valreg = gen_rtx_REG (mode, regno);
1871 }
1872
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
1876
1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1878 }
1879 else
1880 gcc_unreachable ();
1881
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
1886
1887 /* Restore the stack. */
1888 if (targetm.have_save_stack_nonlocal ())
1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1890 else
1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1893
1894 OK_DEFER_POP;
1895
1896 /* Return the address of the result block. */
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
1899 }
1900
1901 /* Perform an untyped return. */
1902
1903 static void
1904 expand_builtin_return (rtx result)
1905 {
1906 int size, align, regno;
1907 fixed_size_mode mode;
1908 rtx reg;
1909 rtx_insn *call_fusage = 0;
1910
1911 result = convert_memory_address (Pmode, result);
1912
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1915
1916 if (targetm.have_untyped_return ())
1917 {
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1920 emit_barrier ();
1921 return;
1922 }
1923
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1928 {
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1933 emit_move_insn (reg, adjust_address (result, mode, size));
1934
1935 push_to_sequence (call_fusage);
1936 emit_use (reg);
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1940 }
1941
1942 /* Put the USE insns before the return. */
1943 emit_insn (call_fusage);
1944
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
1947 expand_naked_return ();
1948 }
1949
1950 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1951
1952 static enum type_class
1953 type_to_class (tree type)
1954 {
1955 switch (TREE_CODE (type))
1956 {
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1975 }
1976 }
1977
1978 /* Expand a call EXP to __builtin_classify_type. */
1979
1980 static rtx
1981 expand_builtin_classify_type (tree exp)
1982 {
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1985 return GEN_INT (no_type_class);
1986 }
1987
1988 /* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
1994 #define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
1998 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000 #define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
2008 /* Similar to above, but appends _R after any F/L suffix. */
2009 #define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2015
2016 /* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
2020
2021 static built_in_function
2022 mathfn_built_in_2 (tree type, combined_fn fn)
2023 {
2024 tree mtype;
2025 built_in_function fcode, fcodef, fcodel;
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
2033
2034 switch (fn)
2035 {
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
2044 CASE_MATHFN_FLOATN (CEIL)
2045 CASE_MATHFN (CEXPI)
2046 CASE_MATHFN_FLOATN (COPYSIGN)
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
2058 CASE_MATHFN_FLOATN (FLOOR)
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
2097 CASE_MATHFN_FLOATN (NEARBYINT)
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
2116 CASE_MATHFN_FLOATN (SQRT)
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
2120 CASE_MATHFN_FLOATN (TRUNC)
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
2124
2125 default:
2126 return END_BUILTINS;
2127 }
2128
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
2131 return fcode;
2132 else if (mtype == float_type_node)
2133 return fcodef;
2134 else if (mtype == long_double_type_node)
2135 return fcodel;
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
2150 else
2151 return END_BUILTINS;
2152 }
2153
2154 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2158
2159 static tree
2160 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2161 {
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
2164 return NULL_TREE;
2165
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2168
2169 return builtin_decl_explicit (fcode2);
2170 }
2171
2172 /* Like mathfn_built_in_1, but always use the implicit array. */
2173
2174 tree
2175 mathfn_built_in (tree type, combined_fn fn)
2176 {
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2178 }
2179
2180 /* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2182
2183 tree
2184 mathfn_built_in (tree type, enum built_in_function fn)
2185 {
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2187 }
2188
2189 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2193
2194 internal_fn
2195 associated_internal_fn (tree fndecl)
2196 {
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2200 {
2201 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2203 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208 #include "internal-fn.def"
2209
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2212
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2215
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2221
2222 default:
2223 return IFN_LAST;
2224 }
2225 }
2226
2227 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2234
2235 internal_fn
2236 replacement_internal_fn (gcall *call)
2237 {
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2239 {
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2242 {
2243 tree_pair types = direct_internal_fn_types (ifn, call);
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2246 return ifn;
2247 }
2248 }
2249 return IFN_LAST;
2250 }
2251
2252 /* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2258
2259 static rtx
2260 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2261 {
2262 optab builtin_optab;
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
2267 machine_mode mode;
2268
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2271
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2275
2276 switch (DECL_FUNCTION_CODE (fndecl))
2277 {
2278 CASE_FLT_FN (BUILT_IN_FMA):
2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2287
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2291
2292 result = gen_reg_rtx (mode);
2293
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2298
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2302
2303 start_sequence ();
2304
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
2309
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
2313 if (result == 0)
2314 {
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2317 }
2318
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2323
2324 return result;
2325 }
2326
2327 /* Expand a call to the builtin sin and cos math functions.
2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2333
2334 static rtx
2335 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2336 {
2337 optab builtin_optab;
2338 rtx op0;
2339 rtx_insn *insns;
2340 tree fndecl = get_callee_fndecl (exp);
2341 machine_mode mode;
2342 tree arg;
2343
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
2346
2347 arg = CALL_EXPR_ARG (exp, 0);
2348
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = sincos_optab; break;
2354 default:
2355 gcc_unreachable ();
2356 }
2357
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2360
2361 /* Check if sincos insn is available, otherwise fallback
2362 to sin or cos insn. */
2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 builtin_optab = sin_optab; break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = cos_optab; break;
2370 default:
2371 gcc_unreachable ();
2372 }
2373
2374 /* Before working hard, check whether the instruction is available. */
2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2376 {
2377 rtx result = gen_reg_rtx (mode);
2378
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2383
2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2385
2386 start_sequence ();
2387
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
2390 if (builtin_optab == sincos_optab)
2391 {
2392 int ok;
2393
2394 switch (DECL_FUNCTION_CODE (fndecl))
2395 {
2396 CASE_FLT_FN (BUILT_IN_SIN):
2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2398 break;
2399 CASE_FLT_FN (BUILT_IN_COS):
2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2401 break;
2402 default:
2403 gcc_unreachable ();
2404 }
2405 gcc_assert (ok);
2406 }
2407 else
2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
2409
2410 if (result != 0)
2411 {
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
2416 return result;
2417 }
2418
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2423 }
2424
2425 return expand_call (exp, target, target == const0_rtx);
2426 }
2427
2428 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
2431
2432 static enum insn_code
2433 interclass_mathfn_icode (tree arg, tree fndecl)
2434 {
2435 bool errno_set = false;
2436 optab builtin_optab = unknown_optab;
2437 machine_mode mode;
2438
2439 switch (DECL_FUNCTION_CODE (fndecl))
2440 {
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
2455 break;
2456 default:
2457 gcc_unreachable ();
2458 }
2459
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
2462 return CODE_FOR_nothing;
2463
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2466
2467 if (builtin_optab)
2468 return optab_handler (builtin_optab, mode);
2469 return CODE_FOR_nothing;
2470 }
2471
2472 /* Expand a call to one of the builtin math functions that operate on
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function; if convenient, the result should be placed in TARGET. */
2478
2479 static rtx
2480 expand_builtin_interclass_mathfn (tree exp, rtx target)
2481 {
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
2485 machine_mode mode;
2486 tree arg;
2487
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2490
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2494
2495 if (icode != CODE_FOR_nothing)
2496 {
2497 class expand_operand ops[1];
2498 rtx_insn *last = get_last_insn ();
2499 tree orig_arg = arg;
2500
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2505
2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2507
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2510
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2515
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
2518 }
2519
2520 return NULL_RTX;
2521 }
2522
2523 /* Expand a call to the builtin sincos math function.
2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2527
2528 static rtx
2529 expand_builtin_sincos (tree exp)
2530 {
2531 rtx op0, op1, op2, target1, target2;
2532 machine_mode mode;
2533 tree arg, sinp, cosp;
2534 int result;
2535 location_t loc = EXPR_LOCATION (exp);
2536 tree alias_type, alias_off;
2537
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
2541
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
2545
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Check if sincos insn is available, otherwise emit the call. */
2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2551 return NULL_RTX;
2552
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2555
2556 op0 = expand_normal (arg);
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
2563
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2568
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2573
2574 return const0_rtx;
2575 }
2576
2577 /* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
2579 the result should be placed in TARGET. */
2580
2581 static rtx
2582 expand_builtin_cexpi (tree exp, rtx target)
2583 {
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, type;
2586 machine_mode mode;
2587 rtx op0, op1, op2;
2588 location_t loc = EXPR_LOCATION (exp);
2589
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
2592
2593 arg = CALL_EXPR_ARG (exp, 0);
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2596
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2601 {
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2604
2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2606
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2609 }
2610 else if (targetm.libc_has_function (function_sincos))
2611 {
2612 tree call, fn = NULL_TREE;
2613 tree top1, top2;
2614 rtx op1a, op2a;
2615
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2622 else
2623 gcc_unreachable ();
2624
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2631
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
2636 }
2637 else
2638 {
2639 tree call, fn = NULL_TREE, narg;
2640 tree ctype = build_complex_type (type);
2641
2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2648 else
2649 gcc_unreachable ();
2650
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2655 {
2656 tree fntype;
2657 const char *name = NULL;
2658
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2665
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2668 }
2669
2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2671 build_real (type, dconst0), arg);
2672
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
2676 target, VOIDmode, EXPAND_NORMAL);
2677 }
2678
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
2683 target, VOIDmode, EXPAND_NORMAL);
2684 }
2685
2686 /* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2690
2691 static tree
2692 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2693 {
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2697
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2703 }
2704
2705 /* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
2710 if convenient, the result should be placed in TARGET. */
2711
2712 static rtx
2713 expand_builtin_int_roundingfn (tree exp, rtx target)
2714 {
2715 convert_optab builtin_optab;
2716 rtx op0, tmp;
2717 rtx_insn *insns;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 machine_mode mode;
2722 tree arg;
2723
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 return NULL_RTX;
2726
2727 arg = CALL_EXPR_ARG (exp, 0);
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
2752 target = gen_reg_rtx (mode);
2753
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2758
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2760
2761 start_sequence ();
2762
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2771 }
2772
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2831
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841 }
2842
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2848
2849 static rtx
2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2851 {
2852 convert_optab builtin_optab;
2853 rtx op0;
2854 rtx_insn *insns;
2855 tree fndecl = get_callee_fndecl (exp);
2856 tree arg;
2857 machine_mode mode;
2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
2859
2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2861 return NULL_RTX;
2862
2863 arg = CALL_EXPR_ARG (exp, 0);
2864
2865 switch (DECL_FUNCTION_CODE (fndecl))
2866 {
2867 CASE_FLT_FN (BUILT_IN_IRINT):
2868 fallback_fn = BUILT_IN_LRINT;
2869 gcc_fallthrough ();
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
2872 builtin_optab = lrint_optab;
2873 break;
2874
2875 CASE_FLT_FN (BUILT_IN_IROUND):
2876 fallback_fn = BUILT_IN_LROUND;
2877 gcc_fallthrough ();
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
2880 builtin_optab = lround_optab;
2881 break;
2882
2883 default:
2884 gcc_unreachable ();
2885 }
2886
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2890
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2893
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2896 {
2897 rtx result = gen_reg_rtx (mode);
2898
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2903
2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2905
2906 start_sequence ();
2907
2908 if (expand_sfix_optab (result, op0, builtin_optab))
2909 {
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
2914 return result;
2915 }
2916
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
2920 end_sequence ();
2921 }
2922
2923 if (fallback_fn != BUILT_IN_NONE)
2924 {
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2933
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2936
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2939 return convert_to_mode (mode, target, 0);
2940 }
2941
2942 return expand_call (exp, target, target == const0_rtx);
2943 }
2944
2945 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2949
2950 static rtx
2951 expand_builtin_powi (tree exp, rtx target)
2952 {
2953 tree arg0, arg1;
2954 rtx op0, op1;
2955 machine_mode mode;
2956 machine_mode mode2;
2957
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2960
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2964
2965 /* Emit a libcall to libgcc. */
2966
2967 /* Mode of the 2nd argument must match that of an int. */
2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2969
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2972
2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
2979
2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2981 target, LCT_CONST, mode,
2982 op0, mode, op1, mode2);
2983
2984 return target;
2985 }
2986
2987 /* Expand expression EXP which is a call to the strlen builtin. Return
2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2989 try to get the result in TARGET, if convenient. */
2990
2991 static rtx
2992 expand_builtin_strlen (tree exp, rtx target,
2993 machine_mode target_mode)
2994 {
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2997
2998 class expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
3007
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3012
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3020 {
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 }
3024
3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3026
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3030
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3033 {
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3040
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3045
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3049
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3056
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3061
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3066 {
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3073 }
3074 pat = get_insns ();
3075 end_sequence ();
3076
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3081
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3089
3090 return target;
3091 }
3092
3093 /* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3095
3096 static rtx
3097 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3098 {
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3101
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3104
3105 if (!bound)
3106 return NULL_RTX;
3107
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3111
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3114
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3121
3122 if (TREE_CODE (bound) == INTEGER_CST)
3123 {
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
3130 TREE_NO_WARNING (exp) = true;
3131
3132 bool exact = true;
3133 if (!len || TREE_CODE (len) != INTEGER_CST)
3134 {
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
3142 return NULL_RTX;
3143 }
3144
3145 if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3146 {
3147 location_t warnloc
3148 = expansion_point_location_if_in_system_header (loc);
3149
3150 if (!TREE_NO_WARNING (exp)
3151 && warning_at (warnloc, OPT_Wstringop_overflow_,
3152 exact
3153 ? G_("%K%qD specified bound %E exceeds the size "
3154 "%E of unterminated array")
3155 : G_("%K%qD specified bound %E may exceed the "
3156 "size of at most %E of unterminated array"),
3157 exp, func, bound, len))
3158 {
3159 inform (DECL_SOURCE_LOCATION (lendata.decl),
3160 "referenced argument declared here");
3161 TREE_NO_WARNING (exp) = true;
3162 }
3163 return NULL_RTX;
3164 }
3165
3166 if (!len)
3167 return NULL_RTX;
3168
3169 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3171 }
3172
3173 if (TREE_CODE (bound) != SSA_NAME)
3174 return NULL_RTX;
3175
3176 wide_int min, max;
3177 enum value_range_kind rng = get_range_info (bound, &min, &max);
3178 if (rng != VR_RANGE)
3179 return NULL_RTX;
3180
3181 if (!TREE_NO_WARNING (exp)
3182 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3183 && warning_at (loc, OPT_Wstringop_overflow_,
3184 "%K%qD specified bound [%wu, %wu] "
3185 "exceeds maximum object size %E",
3186 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3187 TREE_NO_WARNING (exp) = true;
3188
3189 bool exact = true;
3190 if (!len || TREE_CODE (len) != INTEGER_CST)
3191 {
3192 lendata.decl = unterminated_array (src, &len, &exact);
3193 if (!lendata.decl)
3194 return NULL_RTX;
3195 }
3196
3197 if (lendata.decl
3198 && !TREE_NO_WARNING (exp)
3199 && (wi::ltu_p (wi::to_wide (len), min)
3200 || !exact))
3201 {
3202 location_t warnloc
3203 = expansion_point_location_if_in_system_header (loc);
3204
3205 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3206 exact
3207 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3208 "the size %E of unterminated array")
3209 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3210 "the size of at most %E of unterminated array"),
3211 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3212 {
3213 inform (DECL_SOURCE_LOCATION (lendata.decl),
3214 "referenced argument declared here");
3215 TREE_NO_WARNING (exp) = true;
3216 }
3217 }
3218
3219 if (lendata.decl)
3220 return NULL_RTX;
3221
3222 if (wi::gtu_p (min, wi::to_wide (len)))
3223 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3224
3225 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3226 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3227 }
3228
3229 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from constant string DATA + OFFSET and return it as target
3231 constant. */
3232
3233 static rtx
3234 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3235 scalar_int_mode mode)
3236 {
3237 const char *str = (const char *) data;
3238
3239 gcc_assert (offset >= 0
3240 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3241 <= strlen (str) + 1));
3242
3243 return c_readstr (str + offset, mode);
3244 }
3245
3246 /* LEN specify length of the block of memcpy/memset operation.
3247 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3248 In some cases we can make very likely guess on max size, then we
3249 set it into PROBABLE_MAX_SIZE. */
3250
3251 static void
3252 determine_block_size (tree len, rtx len_rtx,
3253 unsigned HOST_WIDE_INT *min_size,
3254 unsigned HOST_WIDE_INT *max_size,
3255 unsigned HOST_WIDE_INT *probable_max_size)
3256 {
3257 if (CONST_INT_P (len_rtx))
3258 {
3259 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3260 return;
3261 }
3262 else
3263 {
3264 wide_int min, max;
3265 enum value_range_kind range_type = VR_UNDEFINED;
3266
3267 /* Determine bounds from the type. */
3268 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3269 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3270 else
3271 *min_size = 0;
3272 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3273 *probable_max_size = *max_size
3274 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3275 else
3276 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3277
3278 if (TREE_CODE (len) == SSA_NAME)
3279 range_type = get_range_info (len, &min, &max);
3280 if (range_type == VR_RANGE)
3281 {
3282 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3283 *min_size = min.to_uhwi ();
3284 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3285 *probable_max_size = *max_size = max.to_uhwi ();
3286 }
3287 else if (range_type == VR_ANTI_RANGE)
3288 {
3289 /* Anti range 0...N lets us to determine minimal size to N+1. */
3290 if (min == 0)
3291 {
3292 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3293 *min_size = max.to_uhwi () + 1;
3294 }
3295 /* Code like
3296
3297 int n;
3298 if (n < 100)
3299 memcpy (a, b, n)
3300
3301 Produce anti range allowing negative values of N. We still
3302 can use the information and make a guess that N is not negative.
3303 */
3304 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3305 *probable_max_size = min.to_uhwi () - 1;
3306 }
3307 }
3308 gcc_checking_assert (*max_size <=
3309 (unsigned HOST_WIDE_INT)
3310 GET_MODE_MASK (GET_MODE (len_rtx)));
3311 }
3312
3313 /* Try to verify that the sizes and lengths of the arguments to a string
3314 manipulation function given by EXP are within valid bounds and that
3315 the operation does not lead to buffer overflow or read past the end.
3316 Arguments other than EXP may be null. When non-null, the arguments
3317 have the following meaning:
3318 DST is the destination of a copy call or NULL otherwise.
3319 SRC is the source of a copy call or NULL otherwise.
3320 DSTWRITE is the number of bytes written into the destination obtained
3321 from the user-supplied size argument to the function (such as in
3322 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3323 MAXREAD is the user-supplied bound on the length of the source sequence
3324 (such as in strncat(d, s, N). It specifies the upper limit on the number
3325 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3326 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3327 expression EXP is a string function call (as opposed to a memory call
3328 like memcpy). As an exception, SRCSTR can also be an integer denoting
3329 the precomputed size of the source string or object (for functions like
3330 memcpy).
3331 DSTSIZE is the size of the destination object specified by the last
3332 argument to the _chk builtins, typically resulting from the expansion
3333 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3334 DSTSIZE).
3335
3336 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3337 SIZE_MAX.
3338
3339 If the call is successfully verified as safe return true, otherwise
3340 return false. */
3341
3342 bool
3343 check_access (tree exp, tree, tree, tree dstwrite,
3344 tree maxread, tree srcstr, tree dstsize)
3345 {
3346 int opt = OPT_Wstringop_overflow_;
3347
3348 /* The size of the largest object is half the address space, or
3349 PTRDIFF_MAX. (This is way too permissive.) */
3350 tree maxobjsize = max_object_size ();
3351
3352 /* Either the length of the source string for string functions or
3353 the size of the source object for raw memory functions. */
3354 tree slen = NULL_TREE;
3355
3356 tree range[2] = { NULL_TREE, NULL_TREE };
3357
3358 /* Set to true when the exact number of bytes written by a string
3359 function like strcpy is not known and the only thing that is
3360 known is that it must be at least one (for the terminating nul). */
3361 bool at_least_one = false;
3362 if (srcstr)
3363 {
3364 /* SRCSTR is normally a pointer to string but as a special case
3365 it can be an integer denoting the length of a string. */
3366 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3367 {
3368 /* Try to determine the range of lengths the source string
3369 refers to. If it can be determined and is less than
3370 the upper bound given by MAXREAD add one to it for
3371 the terminating nul. Otherwise, set it to one for
3372 the same reason, or to MAXREAD as appropriate. */
3373 c_strlen_data lendata = { };
3374 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3375 range[0] = lendata.minlen;
3376 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3377 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3378 {
3379 if (maxread && tree_int_cst_le (maxread, range[0]))
3380 range[0] = range[1] = maxread;
3381 else
3382 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3383 range[0], size_one_node);
3384
3385 if (maxread && tree_int_cst_le (maxread, range[1]))
3386 range[1] = maxread;
3387 else if (!integer_all_onesp (range[1]))
3388 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3389 range[1], size_one_node);
3390
3391 slen = range[0];
3392 }
3393 else
3394 {
3395 at_least_one = true;
3396 slen = size_one_node;
3397 }
3398 }
3399 else
3400 slen = srcstr;
3401 }
3402
3403 if (!dstwrite && !maxread)
3404 {
3405 /* When the only available piece of data is the object size
3406 there is nothing to do. */
3407 if (!slen)
3408 return true;
3409
3410 /* Otherwise, when the length of the source sequence is known
3411 (as with strlen), set DSTWRITE to it. */
3412 if (!range[0])
3413 dstwrite = slen;
3414 }
3415
3416 if (!dstsize)
3417 dstsize = maxobjsize;
3418
3419 if (dstwrite)
3420 get_size_range (dstwrite, range);
3421
3422 tree func = get_callee_fndecl (exp);
3423
3424 /* First check the number of bytes to be written against the maximum
3425 object size. */
3426 if (range[0]
3427 && TREE_CODE (range[0]) == INTEGER_CST
3428 && tree_int_cst_lt (maxobjsize, range[0]))
3429 {
3430 if (TREE_NO_WARNING (exp))
3431 return false;
3432
3433 location_t loc = tree_nonartificial_location (exp);
3434 loc = expansion_point_location_if_in_system_header (loc);
3435
3436 bool warned;
3437 if (range[0] == range[1])
3438 warned = (func
3439 ? warning_at (loc, opt,
3440 "%K%qD specified size %E "
3441 "exceeds maximum object size %E",
3442 exp, func, range[0], maxobjsize)
3443 : warning_at (loc, opt,
3444 "%Kspecified size %E "
3445 "exceeds maximum object size %E",
3446 exp, range[0], maxobjsize));
3447 else
3448 warned = (func
3449 ? warning_at (loc, opt,
3450 "%K%qD specified size between %E and %E "
3451 "exceeds maximum object size %E",
3452 exp, func,
3453 range[0], range[1], maxobjsize)
3454 : warning_at (loc, opt,
3455 "%Kspecified size between %E and %E "
3456 "exceeds maximum object size %E",
3457 exp, range[0], range[1], maxobjsize));
3458 if (warned)
3459 TREE_NO_WARNING (exp) = true;
3460
3461 return false;
3462 }
3463
3464 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3465 constant, and in range of unsigned HOST_WIDE_INT. */
3466 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3467
3468 /* Next check the number of bytes to be written against the destination
3469 object size. */
3470 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3471 {
3472 if (range[0]
3473 && TREE_CODE (range[0]) == INTEGER_CST
3474 && ((tree_fits_uhwi_p (dstsize)
3475 && tree_int_cst_lt (dstsize, range[0]))
3476 || (dstwrite
3477 && tree_fits_uhwi_p (dstwrite)
3478 && tree_int_cst_lt (dstwrite, range[0]))))
3479 {
3480 if (TREE_NO_WARNING (exp))
3481 return false;
3482
3483 location_t loc = tree_nonartificial_location (exp);
3484 loc = expansion_point_location_if_in_system_header (loc);
3485
3486 bool warned = false;
3487 if (dstwrite == slen && at_least_one)
3488 {
3489 /* This is a call to strcpy with a destination of 0 size
3490 and a source of unknown length. The call will write
3491 at least one byte past the end of the destination. */
3492 warned = (func
3493 ? warning_at (loc, opt,
3494 "%K%qD writing %E or more bytes into "
3495 "a region of size %E overflows "
3496 "the destination",
3497 exp, func, range[0], dstsize)
3498 : warning_at (loc, opt,
3499 "%Kwriting %E or more bytes into "
3500 "a region of size %E overflows "
3501 "the destination",
3502 exp, range[0], dstsize));
3503 }
3504 else if (tree_int_cst_equal (range[0], range[1]))
3505 warned = (func
3506 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3507 "%K%qD writing %E byte into a region "
3508 "of size %E overflows the destination",
3509 "%K%qD writing %E bytes into a region "
3510 "of size %E overflows the destination",
3511 exp, func, range[0], dstsize)
3512 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3513 "%Kwriting %E byte into a region "
3514 "of size %E overflows the destination",
3515 "%Kwriting %E bytes into a region "
3516 "of size %E overflows the destination",
3517 exp, range[0], dstsize));
3518 else if (tree_int_cst_sign_bit (range[1]))
3519 {
3520 /* Avoid printing the upper bound if it's invalid. */
3521 warned = (func
3522 ? warning_at (loc, opt,
3523 "%K%qD writing %E or more bytes into "
3524 "a region of size %E overflows "
3525 "the destination",
3526 exp, func, range[0], dstsize)
3527 : warning_at (loc, opt,
3528 "%Kwriting %E or more bytes into "
3529 "a region of size %E overflows "
3530 "the destination",
3531 exp, range[0], dstsize));
3532 }
3533 else
3534 warned = (func
3535 ? warning_at (loc, opt,
3536 "%K%qD writing between %E and %E bytes "
3537 "into a region of size %E overflows "
3538 "the destination",
3539 exp, func, range[0], range[1],
3540 dstsize)
3541 : warning_at (loc, opt,
3542 "%Kwriting between %E and %E bytes "
3543 "into a region of size %E overflows "
3544 "the destination",
3545 exp, range[0], range[1],
3546 dstsize));
3547 if (warned)
3548 TREE_NO_WARNING (exp) = true;
3549
3550 /* Return error when an overflow has been detected. */
3551 return false;
3552 }
3553 }
3554
3555 /* Check the maximum length of the source sequence against the size
3556 of the destination object if known, or against the maximum size
3557 of an object. */
3558 if (maxread)
3559 {
3560 get_size_range (maxread, range);
3561 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3562 {
3563 location_t loc = tree_nonartificial_location (exp);
3564 loc = expansion_point_location_if_in_system_header (loc);
3565
3566 if (tree_int_cst_lt (maxobjsize, range[0]))
3567 {
3568 if (TREE_NO_WARNING (exp))
3569 return false;
3570
3571 bool warned = false;
3572
3573 /* Warn about crazy big sizes first since that's more
3574 likely to be meaningful than saying that the bound
3575 is greater than the object size if both are big. */
3576 if (range[0] == range[1])
3577 warned = (func
3578 ? warning_at (loc, opt,
3579 "%K%qD specified bound %E "
3580 "exceeds maximum object size %E",
3581 exp, func, range[0], maxobjsize)
3582 : warning_at (loc, opt,
3583 "%Kspecified bound %E "
3584 "exceeds maximum object size %E",
3585 exp, range[0], maxobjsize));
3586 else
3587 warned = (func
3588 ? warning_at (loc, opt,
3589 "%K%qD specified bound between "
3590 "%E and %E exceeds maximum object "
3591 "size %E",
3592 exp, func,
3593 range[0], range[1], maxobjsize)
3594 : warning_at (loc, opt,
3595 "%Kspecified bound between "
3596 "%E and %E exceeds maximum object "
3597 "size %E",
3598 exp, range[0], range[1], maxobjsize));
3599 if (warned)
3600 TREE_NO_WARNING (exp) = true;
3601
3602 return false;
3603 }
3604
3605 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3606 {
3607 if (TREE_NO_WARNING (exp))
3608 return false;
3609
3610 bool warned = false;
3611
3612 if (tree_int_cst_equal (range[0], range[1]))
3613 warned = (func
3614 ? warning_at (loc, opt,
3615 "%K%qD specified bound %E "
3616 "exceeds destination size %E",
3617 exp, func,
3618 range[0], dstsize)
3619 : warning_at (loc, opt,
3620 "%Kspecified bound %E "
3621 "exceeds destination size %E",
3622 exp, range[0], dstsize));
3623 else
3624 warned = (func
3625 ? warning_at (loc, opt,
3626 "%K%qD specified bound between %E "
3627 "and %E exceeds destination size %E",
3628 exp, func,
3629 range[0], range[1], dstsize)
3630 : warning_at (loc, opt,
3631 "%Kspecified bound between %E "
3632 "and %E exceeds destination size %E",
3633 exp,
3634 range[0], range[1], dstsize));
3635 if (warned)
3636 TREE_NO_WARNING (exp) = true;
3637
3638 return false;
3639 }
3640 }
3641 }
3642
3643 /* Check for reading past the end of SRC. */
3644 if (slen
3645 && slen == srcstr
3646 && dstwrite && range[0]
3647 && tree_int_cst_lt (slen, range[0]))
3648 {
3649 if (TREE_NO_WARNING (exp))
3650 return false;
3651
3652 bool warned = false;
3653 location_t loc = tree_nonartificial_location (exp);
3654 loc = expansion_point_location_if_in_system_header (loc);
3655
3656 if (tree_int_cst_equal (range[0], range[1]))
3657 warned = (func
3658 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3659 "%K%qD reading %E byte from a region of size %E",
3660 "%K%qD reading %E bytes from a region of size %E",
3661 exp, func, range[0], slen)
3662 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3663 "%Kreading %E byte from a region of size %E",
3664 "%Kreading %E bytes from a region of size %E",
3665 exp, range[0], slen));
3666 else if (tree_int_cst_sign_bit (range[1]))
3667 {
3668 /* Avoid printing the upper bound if it's invalid. */
3669 warned = (func
3670 ? warning_at (loc, opt,
3671 "%K%qD reading %E or more bytes from a region "
3672 "of size %E",
3673 exp, func, range[0], slen)
3674 : warning_at (loc, opt,
3675 "%Kreading %E or more bytes from a region "
3676 "of size %E",
3677 exp, range[0], slen));
3678 }
3679 else
3680 warned = (func
3681 ? warning_at (loc, opt,
3682 "%K%qD reading between %E and %E bytes from "
3683 "a region of size %E",
3684 exp, func, range[0], range[1], slen)
3685 : warning_at (loc, opt,
3686 "%Kreading between %E and %E bytes from "
3687 "a region of size %E",
3688 exp, range[0], range[1], slen));
3689 if (warned)
3690 TREE_NO_WARNING (exp) = true;
3691
3692 return false;
3693 }
3694
3695 return true;
3696 }
3697
3698 /* If STMT is a call to an allocation function, returns the constant
3699 size of the object allocated by the call represented as sizetype.
3700 If nonnull, sets RNG1[] to the range of the size. */
3701
3702 tree
3703 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3704 const vr_values *rvals /* = NULL */)
3705 {
3706 if (!stmt)
3707 return NULL_TREE;
3708
3709 tree allocfntype;
3710 if (tree fndecl = gimple_call_fndecl (stmt))
3711 allocfntype = TREE_TYPE (fndecl);
3712 else
3713 allocfntype = gimple_call_fntype (stmt);
3714
3715 if (!allocfntype)
3716 return NULL_TREE;
3717
3718 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3719 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3720 if (!at)
3721 {
3722 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3723 return NULL_TREE;
3724
3725 argidx1 = 0;
3726 }
3727
3728 unsigned nargs = gimple_call_num_args (stmt);
3729
3730 if (argidx1 == UINT_MAX)
3731 {
3732 tree atval = TREE_VALUE (at);
3733 if (!atval)
3734 return NULL_TREE;
3735
3736 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3737 if (nargs <= argidx1)
3738 return NULL_TREE;
3739
3740 atval = TREE_CHAIN (atval);
3741 if (atval)
3742 {
3743 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3744 if (nargs <= argidx2)
3745 return NULL_TREE;
3746 }
3747 }
3748
3749 tree size = gimple_call_arg (stmt, argidx1);
3750
3751 wide_int rng1_buf[2];
3752 /* If RNG1 is not set, use the buffer. */
3753 if (!rng1)
3754 rng1 = rng1_buf;
3755
3756 if (!get_range (size, rng1, rvals))
3757 return NULL_TREE;
3758
3759 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3760 return fold_convert (sizetype, size);
3761
3762 /* To handle ranges do the math in wide_int and return the product
3763 of the upper bounds as a constant. Ignore anti-ranges. */
3764 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3765 wide_int rng2[2];
3766 if (!get_range (n, rng2, rvals))
3767 return NULL_TREE;
3768
3769 /* Extend to the maximum precision to avoid overflow. */
3770 const int prec = ADDR_MAX_PRECISION;
3771 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3772 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3773 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3774 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3775
3776 /* Compute products of both bounds for the caller but return the lesser
3777 of SIZE_MAX and the product of the upper bounds as a constant. */
3778 rng1[0] = rng1[0] * rng2[0];
3779 rng1[1] = rng1[1] * rng2[1];
3780 tree size_max = TYPE_MAX_VALUE (sizetype);
3781 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3782 {
3783 rng1[1] = wi::to_wide (size_max);
3784 return size_max;
3785 }
3786
3787 return wide_int_to_tree (sizetype, rng1[1]);
3788 }
3789
3790 /* Helper for compute_objsize. Returns the constant size of the DEST
3791 if it refers to a variable or field and sets *PDECL to the DECL and
3792 *POFF to zero. Otherwise returns null for other nodes. */
3793
3794 static tree
3795 addr_decl_size (tree dest, tree *pdecl, tree *poff)
3796 {
3797 if (TREE_CODE (dest) == ADDR_EXPR)
3798 dest = TREE_OPERAND (dest, 0);
3799
3800 if (DECL_P (dest))
3801 {
3802 *pdecl = dest;
3803 *poff = integer_zero_node;
3804 if (tree size = DECL_SIZE_UNIT (dest))
3805 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3806 }
3807
3808 if (TREE_CODE (dest) == COMPONENT_REF)
3809 {
3810 *pdecl = TREE_OPERAND (dest, 1);
3811 *poff = integer_zero_node;
3812 /* Only return constant sizes for now while callers depend on it. */
3813 if (tree size = component_ref_size (dest))
3814 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3815 }
3816
3817 return NULL_TREE;
3818 }
3819
3820 /* Helper to compute the size of the object referenced by the DEST
3821 expression which must have pointer type, using Object Size type
3822 OSTYPE (only the least significant 2 bits are used).
3823 Returns an estimate of the size of the object represented as
3824 a sizetype constant if successful or NULL when the size cannot
3825 be determined.
3826 When the referenced object involves a non-constant offset in some
3827 range the returned value represents the largest size given the
3828 smallest non-negative offset in the range.
3829 If nonnull, sets *PDECL to the decl of the referenced subobject
3830 if it can be determined, or to null otherwise. Likewise, when
3831 POFF is nonnull *POFF is set to the offset into *PDECL.
3832
3833 The function is intended for diagnostics and should not be used
3834 to influence code generation or optimization. */
3835
3836 tree
3837 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
3838 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
3839 {
3840 tree dummy_decl = NULL_TREE;
3841 if (!pdecl)
3842 pdecl = &dummy_decl;
3843
3844 tree dummy_off = NULL_TREE;
3845 if (!poff)
3846 poff = &dummy_off;
3847
3848 /* Only the two least significant bits are meaningful. */
3849 ostype &= 3;
3850
3851 if (ostype)
3852 /* Except for overly permissive calls to memcpy and other raw
3853 memory functions with zero OSTYPE, detect the size from simple
3854 DECLs first to more reliably than compute_builtin_object_size
3855 set *PDECL and *POFF. */
3856 if (tree size = addr_decl_size (dest, pdecl, poff))
3857 return size;
3858
3859 unsigned HOST_WIDE_INT size;
3860 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
3861 return build_int_cst (sizetype, size);
3862
3863 if (TREE_CODE (dest) == SSA_NAME)
3864 {
3865 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3866 if (is_gimple_call (stmt))
3867 {
3868 /* If STMT is a call to an allocation function get the size
3869 from its argument(s). If successful, also set *PDECL to
3870 DEST for the caller to include in diagnostics. */
3871 if (tree size = gimple_call_alloc_size (stmt))
3872 {
3873 *pdecl = dest;
3874 *poff = integer_zero_node;
3875 return size;
3876 }
3877 return NULL_TREE;
3878 }
3879
3880 if (!is_gimple_assign (stmt))
3881 return NULL_TREE;
3882
3883 dest = gimple_assign_rhs1 (stmt);
3884
3885 tree_code code = gimple_assign_rhs_code (stmt);
3886 if (code == POINTER_PLUS_EXPR)
3887 {
3888 /* compute_builtin_object_size fails for addresses with
3889 non-constant offsets. Try to determine the range of
3890 such an offset here and use it to adjust the constant
3891 size. */
3892 tree off = gimple_assign_rhs2 (stmt);
3893 if (TREE_CODE (off) == INTEGER_CST)
3894 {
3895 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3896 {
3897 wide_int wioff = wi::to_wide (off);
3898 wide_int wisiz = wi::to_wide (size);
3899
3900 /* Ignore negative offsets for now. For others,
3901 use the lower bound as the most optimistic
3902 estimate of the (remaining) size. */
3903 if (wi::neg_p (wioff))
3904 ;
3905 else
3906 {
3907 if (*poff)
3908 {
3909 *poff = fold_convert (ptrdiff_type_node, *poff);
3910 off = fold_convert (ptrdiff_type_node, *poff);
3911 *poff = size_binop (PLUS_EXPR, *poff, off);
3912 }
3913 else
3914 *poff = off;
3915 if (wi::ltu_p (wioff, wisiz))
3916 return wide_int_to_tree (TREE_TYPE (size),
3917 wi::sub (wisiz, wioff));
3918 return size_zero_node;
3919 }
3920 }
3921 }
3922 else if (TREE_CODE (off) == SSA_NAME
3923 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3924 {
3925 wide_int min, max;
3926 enum value_range_kind rng = get_range_info (off, &min, &max);
3927
3928 if (rng == VR_RANGE)
3929 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3930 {
3931 wide_int wisiz = wi::to_wide (size);
3932
3933 /* Ignore negative offsets for now. For others,
3934 use the lower bound as the most optimistic
3935 estimate of the (remaining)size. */
3936 if (wi::neg_p (min) || wi::neg_p (max))
3937 ;
3938 else
3939 {
3940 /* FIXME: For now, since the offset is non-constant,
3941 clear *POFF to keep it from being "misused."
3942 Eventually *POFF will need to become a range that
3943 can be properly added to the outer offset if it
3944 too is one. */
3945 *poff = NULL_TREE;
3946 if (wi::ltu_p (min, wisiz))
3947 return wide_int_to_tree (TREE_TYPE (size),
3948 wi::sub (wisiz, min));
3949 return size_zero_node;
3950 }
3951 }
3952 }
3953 }
3954 else if (code != ADDR_EXPR)
3955 return NULL_TREE;
3956 }
3957
3958 /* Unless computing the largest size (for memcpy and other raw memory
3959 functions), try to determine the size of the object from its type. */
3960 if (!ostype)
3961 return NULL_TREE;
3962
3963 if (TREE_CODE (dest) == ARRAY_REF
3964 || TREE_CODE (dest) == MEM_REF)
3965 {
3966 tree ref = TREE_OPERAND (dest, 0);
3967 tree reftype = TREE_TYPE (ref);
3968 if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
3969 {
3970 /* Give up for MEM_REFs of vector types; those may be synthesized
3971 from multiple assignments to consecutive data members. See PR
3972 93200.
3973 FIXME: Deal with this more generally, e.g., by marking up such
3974 MEM_REFs at the time they're created. */
3975 reftype = TREE_TYPE (reftype);
3976 if (TREE_CODE (reftype) == VECTOR_TYPE)
3977 return NULL_TREE;
3978 }
3979 tree off = TREE_OPERAND (dest, 1);
3980 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
3981 {
3982 /* If the declaration of the destination object is known
3983 to have zero size, return zero. */
3984 if (integer_zerop (size)
3985 && *pdecl && DECL_P (*pdecl)
3986 && *poff && integer_zerop (*poff))
3987 return size_zero_node;
3988
3989 /* A valid offset into a declared object cannot be negative.
3990 A zero size with a zero "inner" offset is still zero size
3991 regardless of the "other" offset OFF. */
3992 if (*poff
3993 && ((integer_zerop (*poff) && integer_zerop (size))
3994 || (TREE_CODE (*poff) == INTEGER_CST
3995 && tree_int_cst_sgn (*poff) < 0)))
3996 return size_zero_node;
3997
3998 wide_int offrng[2];
3999 if (!get_range (off, offrng, rvals))
4000 return NULL_TREE;
4001
4002 /* Convert to the same precision to keep wide_int from "helpfully"
4003 crashing whenever it sees other arguments. */
4004 const unsigned sizprec = TYPE_PRECISION (sizetype);
4005 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4006 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4007
4008 /* Adjust SIZE either up or down by the sum of *POFF and OFF
4009 above. */
4010 if (TREE_CODE (dest) == ARRAY_REF)
4011 {
4012 tree lowbnd = array_ref_low_bound (dest);
4013 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4014 {
4015 /* Adjust the offset by the low bound of the array
4016 domain (normally zero but 1 in Fortran). */
4017 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4018 offrng[0] -= lb;
4019 offrng[1] -= lb;
4020 }
4021
4022 /* Convert the array index into a byte offset. */
4023 tree eltype = TREE_TYPE (dest);
4024 tree tpsize = TYPE_SIZE_UNIT (eltype);
4025 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
4026 {
4027 wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4028 offrng[0] *= wsz;
4029 offrng[1] *= wsz;
4030 }
4031 else
4032 return NULL_TREE;
4033 }
4034
4035 wide_int wisize = wi::to_wide (size);
4036
4037 if (!*poff)
4038 {
4039 /* If the "inner" offset is unknown and the "outer" offset
4040 is either negative or less than SIZE, return the size
4041 minus the offset. This may be overly optimistic in
4042 the first case if the inner offset happens to be less
4043 than the absolute value of the outer offset. */
4044 if (wi::neg_p (offrng[0]))
4045 return size;
4046 if (wi::ltu_p (offrng[0], wisize))
4047 return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4048 return size_zero_node;
4049 }
4050
4051 /* Convert to the same precision to keep wide_int from "helpfuly"
4052 crashing whenever it sees other argumments. */
4053 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4054 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4055
4056 tree dstoff = *poff;
4057 if (integer_zerop (*poff))
4058 *poff = off;
4059 else if (!integer_zerop (off))
4060 {
4061 *poff = fold_convert (ptrdiff_type_node, *poff);
4062 off = fold_convert (ptrdiff_type_node, off);
4063 *poff = size_binop (PLUS_EXPR, *poff, off);
4064 }
4065
4066 if (!wi::neg_p (offrng[0]))
4067 {
4068 if (TREE_CODE (size) != INTEGER_CST)
4069 return NULL_TREE;
4070
4071 /* Return the difference between the size and the offset
4072 or zero if the offset is greater. */
4073 wide_int wisize = wi::to_wide (size, sizprec);
4074 if (wi::ltu_p (wisize, offrng[0]))
4075 return size_zero_node;
4076
4077 return wide_int_to_tree (sizetype, wisize - offrng[0]);
4078 }
4079
4080 wide_int dstoffrng[2];
4081 if (TREE_CODE (dstoff) == INTEGER_CST)
4082 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4083 else if (TREE_CODE (dstoff) == SSA_NAME)
4084 {
4085 enum value_range_kind rng
4086 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4087 if (rng != VR_RANGE)
4088 return NULL_TREE;
4089 }
4090 else
4091 return NULL_TREE;
4092
4093 dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4094 dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
4095
4096 if (!wi::neg_p (dstoffrng[0]))
4097 wisize += dstoffrng[0];
4098
4099 offrng[1] += dstoffrng[1];
4100 if (wi::neg_p (offrng[1]))
4101 return size_zero_node;
4102
4103 return wide_int_to_tree (sizetype, wisize);
4104 }
4105
4106 return NULL_TREE;
4107 }
4108
4109 /* Try simple DECLs not handled above. */
4110 if (tree size = addr_decl_size (dest, pdecl, poff))
4111 return size;
4112
4113 tree type = TREE_TYPE (dest);
4114 if (TREE_CODE (type) == POINTER_TYPE)
4115 type = TREE_TYPE (type);
4116
4117 type = TYPE_MAIN_VARIANT (type);
4118 if (TREE_CODE (dest) == ADDR_EXPR)
4119 dest = TREE_OPERAND (dest, 0);
4120
4121 if (TREE_CODE (type) == ARRAY_TYPE
4122 && !array_at_struct_end_p (dest))
4123 {
4124 if (tree size = TYPE_SIZE_UNIT (type))
4125 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
4126 }
4127
4128 return NULL_TREE;
4129 }
4130
4131 /* Helper to determine and check the sizes of the source and the destination
4132 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4133 call expression, DEST is the destination argument, SRC is the source
4134 argument or null, and LEN is the number of bytes. Use Object Size type-0
4135 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4136 (no overflow or invalid sizes), false otherwise. */
4137
4138 static bool
4139 check_memop_access (tree exp, tree dest, tree src, tree size)
4140 {
4141 /* For functions like memset and memcpy that operate on raw memory
4142 try to determine the size of the largest source and destination
4143 object using type-0 Object Size regardless of the object size
4144 type specified by the option. */
4145 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4146 tree dstsize = compute_objsize (dest, 0);
4147
4148 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4149 srcsize, dstsize);
4150 }
4151
4152 /* Validate memchr arguments without performing any expansion.
4153 Return NULL_RTX. */
4154
4155 static rtx
4156 expand_builtin_memchr (tree exp, rtx)
4157 {
4158 if (!validate_arglist (exp,
4159 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4160 return NULL_RTX;
4161
4162 tree arg1 = CALL_EXPR_ARG (exp, 0);
4163 tree len = CALL_EXPR_ARG (exp, 2);
4164
4165 /* Diagnose calls where the specified length exceeds the size
4166 of the object. */
4167 if (warn_stringop_overflow)
4168 {
4169 tree size = compute_objsize (arg1, 0);
4170 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4171 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4172 }
4173
4174 return NULL_RTX;
4175 }
4176
4177 /* Expand a call EXP to the memcpy builtin.
4178 Return NULL_RTX if we failed, the caller should emit a normal call,
4179 otherwise try to get the result in TARGET, if convenient (and in
4180 mode MODE if that's convenient). */
4181
4182 static rtx
4183 expand_builtin_memcpy (tree exp, rtx target)
4184 {
4185 if (!validate_arglist (exp,
4186 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4187 return NULL_RTX;
4188
4189 tree dest = CALL_EXPR_ARG (exp, 0);
4190 tree src = CALL_EXPR_ARG (exp, 1);
4191 tree len = CALL_EXPR_ARG (exp, 2);
4192
4193 check_memop_access (exp, dest, src, len);
4194
4195 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4196 /*retmode=*/ RETURN_BEGIN, false);
4197 }
4198
4199 /* Check a call EXP to the memmove built-in for validity.
4200 Return NULL_RTX on both success and failure. */
4201
4202 static rtx
4203 expand_builtin_memmove (tree exp, rtx target)
4204 {
4205 if (!validate_arglist (exp,
4206 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4207 return NULL_RTX;
4208
4209 tree dest = CALL_EXPR_ARG (exp, 0);
4210 tree src = CALL_EXPR_ARG (exp, 1);
4211 tree len = CALL_EXPR_ARG (exp, 2);
4212
4213 check_memop_access (exp, dest, src, len);
4214
4215 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4216 /*retmode=*/ RETURN_BEGIN, true);
4217 }
4218
4219 /* Expand a call EXP to the mempcpy builtin.
4220 Return NULL_RTX if we failed; the caller should emit a normal call,
4221 otherwise try to get the result in TARGET, if convenient (and in
4222 mode MODE if that's convenient). */
4223
4224 static rtx
4225 expand_builtin_mempcpy (tree exp, rtx target)
4226 {
4227 if (!validate_arglist (exp,
4228 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4229 return NULL_RTX;
4230
4231 tree dest = CALL_EXPR_ARG (exp, 0);
4232 tree src = CALL_EXPR_ARG (exp, 1);
4233 tree len = CALL_EXPR_ARG (exp, 2);
4234
4235 /* Policy does not generally allow using compute_objsize (which
4236 is used internally by check_memop_size) to change code generation
4237 or drive optimization decisions.
4238
4239 In this instance it is safe because the code we generate has
4240 the same semantics regardless of the return value of
4241 check_memop_sizes. Exactly the same amount of data is copied
4242 and the return value is exactly the same in both cases.
4243
4244 Furthermore, check_memop_size always uses mode 0 for the call to
4245 compute_objsize, so the imprecise nature of compute_objsize is
4246 avoided. */
4247
4248 /* Avoid expanding mempcpy into memcpy when the call is determined
4249 to overflow the buffer. This also prevents the same overflow
4250 from being diagnosed again when expanding memcpy. */
4251 if (!check_memop_access (exp, dest, src, len))
4252 return NULL_RTX;
4253
4254 return expand_builtin_mempcpy_args (dest, src, len,
4255 target, exp, /*retmode=*/ RETURN_END);
4256 }
4257
4258 /* Helper function to do the actual work for expand of memory copy family
4259 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4260 of memory from SRC to DEST and assign to TARGET if convenient. Return
4261 value is based on RETMODE argument. */
4262
4263 static rtx
4264 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4265 rtx target, tree exp, memop_ret retmode,
4266 bool might_overlap)
4267 {
4268 const char *src_str;
4269 unsigned int src_align = get_pointer_alignment (src);
4270 unsigned int dest_align = get_pointer_alignment (dest);
4271 rtx dest_mem, src_mem, dest_addr, len_rtx;
4272 HOST_WIDE_INT expected_size = -1;
4273 unsigned int expected_align = 0;
4274 unsigned HOST_WIDE_INT min_size;
4275 unsigned HOST_WIDE_INT max_size;
4276 unsigned HOST_WIDE_INT probable_max_size;
4277
4278 bool is_move_done;
4279
4280 /* If DEST is not a pointer type, call the normal function. */
4281 if (dest_align == 0)
4282 return NULL_RTX;
4283
4284 /* If either SRC is not a pointer type, don't do this
4285 operation in-line. */
4286 if (src_align == 0)
4287 return NULL_RTX;
4288
4289 if (currently_expanding_gimple_stmt)
4290 stringop_block_profile (currently_expanding_gimple_stmt,
4291 &expected_align, &expected_size);
4292
4293 if (expected_align < dest_align)
4294 expected_align = dest_align;
4295 dest_mem = get_memory_rtx (dest, len);
4296 set_mem_align (dest_mem, dest_align);
4297 len_rtx = expand_normal (len);
4298 determine_block_size (len, len_rtx, &min_size, &max_size,
4299 &probable_max_size);
4300 src_str = c_getstr (src);
4301
4302 /* If SRC is a string constant and block move would be done by
4303 pieces, we can avoid loading the string from memory and only
4304 stored the computed constants. This works in the overlap
4305 (memmove) case as well because store_by_pieces just generates a
4306 series of stores of constants from the string constant returned
4307 by c_getstr(). */
4308 if (src_str
4309 && CONST_INT_P (len_rtx)
4310 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4311 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4312 CONST_CAST (char *, src_str),
4313 dest_align, false))
4314 {
4315 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4316 builtin_memcpy_read_str,
4317 CONST_CAST (char *, src_str),
4318 dest_align, false, retmode);
4319 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4320 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4321 return dest_mem;
4322 }
4323
4324 src_mem = get_memory_rtx (src, len);
4325 set_mem_align (src_mem, src_align);
4326
4327 /* Copy word part most expediently. */
4328 enum block_op_methods method = BLOCK_OP_NORMAL;
4329 if (CALL_EXPR_TAILCALL (exp)
4330 && (retmode == RETURN_BEGIN || target == const0_rtx))
4331 method = BLOCK_OP_TAILCALL;
4332 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4333 && retmode == RETURN_END
4334 && !might_overlap
4335 && target != const0_rtx);
4336 if (use_mempcpy_call)
4337 method = BLOCK_OP_NO_LIBCALL_RET;
4338 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4339 expected_align, expected_size,
4340 min_size, max_size, probable_max_size,
4341 use_mempcpy_call, &is_move_done, might_overlap);
4342
4343 /* Bail out when a mempcpy call would be expanded as libcall and when
4344 we have a target that provides a fast implementation
4345 of mempcpy routine. */
4346 if (!is_move_done)
4347 return NULL_RTX;
4348
4349 if (dest_addr == pc_rtx)
4350 return NULL_RTX;
4351
4352 if (dest_addr == 0)
4353 {
4354 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4355 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4356 }
4357
4358 if (retmode != RETURN_BEGIN && target != const0_rtx)
4359 {
4360 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4361 /* stpcpy pointer to last byte. */
4362 if (retmode == RETURN_END_MINUS_ONE)
4363 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4364 }
4365
4366 return dest_addr;
4367 }
4368
4369 static rtx
4370 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4371 rtx target, tree orig_exp, memop_ret retmode)
4372 {
4373 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4374 retmode, false);
4375 }
4376
4377 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4378 we failed, the caller should emit a normal call, otherwise try to
4379 get the result in TARGET, if convenient.
4380 Return value is based on RETMODE argument. */
4381
4382 static rtx
4383 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4384 {
4385 class expand_operand ops[3];
4386 rtx dest_mem;
4387 rtx src_mem;
4388
4389 if (!targetm.have_movstr ())
4390 return NULL_RTX;
4391
4392 dest_mem = get_memory_rtx (dest, NULL);
4393 src_mem = get_memory_rtx (src, NULL);
4394 if (retmode == RETURN_BEGIN)
4395 {
4396 target = force_reg (Pmode, XEXP (dest_mem, 0));
4397 dest_mem = replace_equiv_address (dest_mem, target);
4398 }
4399
4400 create_output_operand (&ops[0],
4401 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4402 create_fixed_operand (&ops[1], dest_mem);
4403 create_fixed_operand (&ops[2], src_mem);
4404 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4405 return NULL_RTX;
4406
4407 if (retmode != RETURN_BEGIN && target != const0_rtx)
4408 {
4409 target = ops[0].value;
4410 /* movstr is supposed to set end to the address of the NUL
4411 terminator. If the caller requested a mempcpy-like return value,
4412 adjust it. */
4413 if (retmode == RETURN_END)
4414 {
4415 rtx tem = plus_constant (GET_MODE (target),
4416 gen_lowpart (GET_MODE (target), target), 1);
4417 emit_move_insn (target, force_operand (tem, NULL_RTX));
4418 }
4419 }
4420 return target;
4421 }
4422
4423 /* Do some very basic size validation of a call to the strcpy builtin
4424 given by EXP. Return NULL_RTX to have the built-in expand to a call
4425 to the library function. */
4426
4427 static rtx
4428 expand_builtin_strcat (tree exp)
4429 {
4430 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4431 || !warn_stringop_overflow)
4432 return NULL_RTX;
4433
4434 tree dest = CALL_EXPR_ARG (exp, 0);
4435 tree src = CALL_EXPR_ARG (exp, 1);
4436
4437 /* Detect unterminated source (only). */
4438 if (!check_nul_terminated_array (exp, src))
4439 return NULL_RTX;
4440
4441 /* There is no way here to determine the length of the string in
4442 the destination to which the SRC string is being appended so
4443 just diagnose cases when the souce string is longer than
4444 the destination object. */
4445
4446 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4447
4448 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4449 destsize);
4450
4451 return NULL_RTX;
4452 }
4453
4454 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4455 NULL_RTX if we failed the caller should emit a normal call, otherwise
4456 try to get the result in TARGET, if convenient (and in mode MODE if that's
4457 convenient). */
4458
4459 static rtx
4460 expand_builtin_strcpy (tree exp, rtx target)
4461 {
4462 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4463 return NULL_RTX;
4464
4465 tree dest = CALL_EXPR_ARG (exp, 0);
4466 tree src = CALL_EXPR_ARG (exp, 1);
4467
4468 if (warn_stringop_overflow)
4469 {
4470 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4471 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4472 src, destsize);
4473 }
4474
4475 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4476 {
4477 /* Check to see if the argument was declared attribute nonstring
4478 and if so, issue a warning since at this point it's not known
4479 to be nul-terminated. */
4480 tree fndecl = get_callee_fndecl (exp);
4481 maybe_warn_nonstring_arg (fndecl, exp);
4482 return ret;
4483 }
4484
4485 return NULL_RTX;
4486 }
4487
4488 /* Helper function to do the actual work for expand_builtin_strcpy. The
4489 arguments to the builtin_strcpy call DEST and SRC are broken out
4490 so that this can also be called without constructing an actual CALL_EXPR.
4491 The other arguments and return value are the same as for
4492 expand_builtin_strcpy. */
4493
4494 static rtx
4495 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4496 {
4497 /* Detect strcpy calls with unterminated arrays.. */
4498 if (tree nonstr = unterminated_array (src))
4499 {
4500 /* NONSTR refers to the non-nul terminated constant array. */
4501 if (!TREE_NO_WARNING (exp))
4502 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4503 return NULL_RTX;
4504 }
4505
4506 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4507 }
4508
4509 /* Expand a call EXP to the stpcpy builtin.
4510 Return NULL_RTX if we failed the caller should emit a normal call,
4511 otherwise try to get the result in TARGET, if convenient (and in
4512 mode MODE if that's convenient). */
4513
4514 static rtx
4515 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4516 {
4517 tree dst, src;
4518 location_t loc = EXPR_LOCATION (exp);
4519
4520 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4522
4523 dst = CALL_EXPR_ARG (exp, 0);
4524 src = CALL_EXPR_ARG (exp, 1);
4525
4526 if (warn_stringop_overflow)
4527 {
4528 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4529 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4530 src, destsize);
4531 }
4532
4533 /* If return value is ignored, transform stpcpy into strcpy. */
4534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4535 {
4536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4538 return expand_expr (result, target, mode, EXPAND_NORMAL);
4539 }
4540 else
4541 {
4542 tree len, lenp1;
4543 rtx ret;
4544
4545 /* Ensure we get an actual string whose length can be evaluated at
4546 compile-time, not an expression containing a string. This is
4547 because the latter will potentially produce pessimized code
4548 when used to produce the return value. */
4549 c_strlen_data lendata = { };
4550 if (!c_getstr (src, NULL)
4551 || !(len = c_strlen (src, 0, &lendata, 1)))
4552 return expand_movstr (dst, src, target,
4553 /*retmode=*/ RETURN_END_MINUS_ONE);
4554
4555 if (lendata.decl && !TREE_NO_WARNING (exp))
4556 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4557
4558 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4559 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4560 target, exp,
4561 /*retmode=*/ RETURN_END_MINUS_ONE);
4562
4563 if (ret)
4564 return ret;
4565
4566 if (TREE_CODE (len) == INTEGER_CST)
4567 {
4568 rtx len_rtx = expand_normal (len);
4569
4570 if (CONST_INT_P (len_rtx))
4571 {
4572 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4573
4574 if (ret)
4575 {
4576 if (! target)
4577 {
4578 if (mode != VOIDmode)
4579 target = gen_reg_rtx (mode);
4580 else
4581 target = gen_reg_rtx (GET_MODE (ret));
4582 }
4583 if (GET_MODE (target) != GET_MODE (ret))
4584 ret = gen_lowpart (GET_MODE (target), ret);
4585
4586 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4587 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4588 gcc_assert (ret);
4589
4590 return target;
4591 }
4592 }
4593 }
4594
4595 return expand_movstr (dst, src, target,
4596 /*retmode=*/ RETURN_END_MINUS_ONE);
4597 }
4598 }
4599
4600 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4601 arguments while being careful to avoid duplicate warnings (which could
4602 be issued if the expander were to expand the call, resulting in it
4603 being emitted in expand_call(). */
4604
4605 static rtx
4606 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4607 {
4608 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4609 {
4610 /* The call has been successfully expanded. Check for nonstring
4611 arguments and issue warnings as appropriate. */
4612 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4613 return ret;
4614 }
4615
4616 return NULL_RTX;
4617 }
4618
4619 /* Check a call EXP to the stpncpy built-in for validity.
4620 Return NULL_RTX on both success and failure. */
4621
4622 static rtx
4623 expand_builtin_stpncpy (tree exp, rtx)
4624 {
4625 if (!validate_arglist (exp,
4626 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4627 || !warn_stringop_overflow)
4628 return NULL_RTX;
4629
4630 /* The source and destination of the call. */
4631 tree dest = CALL_EXPR_ARG (exp, 0);
4632 tree src = CALL_EXPR_ARG (exp, 1);
4633
4634 /* The exact number of bytes to write (not the maximum). */
4635 tree len = CALL_EXPR_ARG (exp, 2);
4636 if (!check_nul_terminated_array (exp, src, len))
4637 return NULL_RTX;
4638
4639 /* The size of the destination object. */
4640 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4641
4642 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4643
4644 return NULL_RTX;
4645 }
4646
4647 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4648 bytes from constant string DATA + OFFSET and return it as target
4649 constant. */
4650
4651 rtx
4652 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4653 scalar_int_mode mode)
4654 {
4655 const char *str = (const char *) data;
4656
4657 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4658 return const0_rtx;
4659
4660 return c_readstr (str + offset, mode);
4661 }
4662
4663 /* Helper to check the sizes of sequences and the destination of calls
4664 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4665 success (no overflow or invalid sizes), false otherwise. */
4666
4667 static bool
4668 check_strncat_sizes (tree exp, tree objsize)
4669 {
4670 tree dest = CALL_EXPR_ARG (exp, 0);
4671 tree src = CALL_EXPR_ARG (exp, 1);
4672 tree maxread = CALL_EXPR_ARG (exp, 2);
4673
4674 /* Try to determine the range of lengths that the source expression
4675 refers to. */
4676 c_strlen_data lendata = { };
4677 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4678
4679 /* Try to verify that the destination is big enough for the shortest
4680 string. */
4681
4682 if (!objsize && warn_stringop_overflow)
4683 {
4684 /* If it hasn't been provided by __strncat_chk, try to determine
4685 the size of the destination object into which the source is
4686 being copied. */
4687 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4688 }
4689
4690 /* Add one for the terminating nul. */
4691 tree srclen = (lendata.minlen
4692 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4693 size_one_node)
4694 : NULL_TREE);
4695
4696 /* The strncat function copies at most MAXREAD bytes and always appends
4697 the terminating nul so the specified upper bound should never be equal
4698 to (or greater than) the size of the destination. */
4699 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4700 && tree_int_cst_equal (objsize, maxread))
4701 {
4702 location_t loc = tree_nonartificial_location (exp);
4703 loc = expansion_point_location_if_in_system_header (loc);
4704
4705 warning_at (loc, OPT_Wstringop_overflow_,
4706 "%K%qD specified bound %E equals destination size",
4707 exp, get_callee_fndecl (exp), maxread);
4708
4709 return false;
4710 }
4711
4712 if (!srclen
4713 || (maxread && tree_fits_uhwi_p (maxread)
4714 && tree_fits_uhwi_p (srclen)
4715 && tree_int_cst_lt (maxread, srclen)))
4716 srclen = maxread;
4717
4718 /* The number of bytes to write is LEN but check_access will also
4719 check SRCLEN if LEN's value isn't known. */
4720 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4721 objsize);
4722 }
4723
4724 /* Similar to expand_builtin_strcat, do some very basic size validation
4725 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4726 the built-in expand to a call to the library function. */
4727
4728 static rtx
4729 expand_builtin_strncat (tree exp, rtx)
4730 {
4731 if (!validate_arglist (exp,
4732 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4733 || !warn_stringop_overflow)
4734 return NULL_RTX;
4735
4736 tree dest = CALL_EXPR_ARG (exp, 0);
4737 tree src = CALL_EXPR_ARG (exp, 1);
4738 /* The upper bound on the number of bytes to write. */
4739 tree maxread = CALL_EXPR_ARG (exp, 2);
4740
4741 /* Detect unterminated source (only). */
4742 if (!check_nul_terminated_array (exp, src, maxread))
4743 return NULL_RTX;
4744
4745 /* The length of the source sequence. */
4746 tree slen = c_strlen (src, 1);
4747
4748 /* Try to determine the range of lengths that the source expression
4749 refers to. Since the lengths are only used for warning and not
4750 for code generation disable strict mode below. */
4751 tree maxlen = slen;
4752 if (!maxlen)
4753 {
4754 c_strlen_data lendata = { };
4755 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4756 maxlen = lendata.maxbound;
4757 }
4758
4759 /* Try to verify that the destination is big enough for the shortest
4760 string. First try to determine the size of the destination object
4761 into which the source is being copied. */
4762 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4763
4764 /* Add one for the terminating nul. */
4765 tree srclen = (maxlen
4766 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4767 size_one_node)
4768 : NULL_TREE);
4769
4770 /* The strncat function copies at most MAXREAD bytes and always appends
4771 the terminating nul so the specified upper bound should never be equal
4772 to (or greater than) the size of the destination. */
4773 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4774 && tree_int_cst_equal (destsize, maxread))
4775 {
4776 location_t loc = tree_nonartificial_location (exp);
4777 loc = expansion_point_location_if_in_system_header (loc);
4778
4779 warning_at (loc, OPT_Wstringop_overflow_,
4780 "%K%qD specified bound %E equals destination size",
4781 exp, get_callee_fndecl (exp), maxread);
4782
4783 return NULL_RTX;
4784 }
4785
4786 if (!srclen
4787 || (maxread && tree_fits_uhwi_p (maxread)
4788 && tree_fits_uhwi_p (srclen)
4789 && tree_int_cst_lt (maxread, srclen)))
4790 srclen = maxread;
4791
4792 /* The number of bytes to write is SRCLEN. */
4793 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4794
4795 return NULL_RTX;
4796 }
4797
4798 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4799 NULL_RTX if we failed the caller should emit a normal call. */
4800
4801 static rtx
4802 expand_builtin_strncpy (tree exp, rtx target)
4803 {
4804 location_t loc = EXPR_LOCATION (exp);
4805
4806 if (!validate_arglist (exp,
4807 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4808 return NULL_RTX;
4809 tree dest = CALL_EXPR_ARG (exp, 0);
4810 tree src = CALL_EXPR_ARG (exp, 1);
4811 /* The number of bytes to write (not the maximum). */
4812 tree len = CALL_EXPR_ARG (exp, 2);
4813
4814 if (!check_nul_terminated_array (exp, src, len))
4815 return NULL_RTX;
4816
4817 /* The length of the source sequence. */
4818 tree slen = c_strlen (src, 1);
4819
4820 if (warn_stringop_overflow)
4821 {
4822 tree destsize = compute_objsize (dest,
4823 warn_stringop_overflow - 1);
4824
4825 /* The number of bytes to write is LEN but check_access will also
4826 check SLEN if LEN's value isn't known. */
4827 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4828 destsize);
4829 }
4830
4831 /* We must be passed a constant len and src parameter. */
4832 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4833 return NULL_RTX;
4834
4835 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4836
4837 /* We're required to pad with trailing zeros if the requested
4838 len is greater than strlen(s2)+1. In that case try to
4839 use store_by_pieces, if it fails, punt. */
4840 if (tree_int_cst_lt (slen, len))
4841 {
4842 unsigned int dest_align = get_pointer_alignment (dest);
4843 const char *p = c_getstr (src);
4844 rtx dest_mem;
4845
4846 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4847 || !can_store_by_pieces (tree_to_uhwi (len),
4848 builtin_strncpy_read_str,
4849 CONST_CAST (char *, p),
4850 dest_align, false))
4851 return NULL_RTX;
4852
4853 dest_mem = get_memory_rtx (dest, len);
4854 store_by_pieces (dest_mem, tree_to_uhwi (len),
4855 builtin_strncpy_read_str,
4856 CONST_CAST (char *, p), dest_align, false,
4857 RETURN_BEGIN);
4858 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4859 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4860 return dest_mem;
4861 }
4862
4863 return NULL_RTX;
4864 }
4865
4866 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4867 bytes from constant string DATA + OFFSET and return it as target
4868 constant. */
4869
4870 rtx
4871 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4872 scalar_int_mode mode)
4873 {
4874 const char *c = (const char *) data;
4875 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4876
4877 memset (p, *c, GET_MODE_SIZE (mode));
4878
4879 return c_readstr (p, mode);
4880 }
4881
4882 /* Callback routine for store_by_pieces. Return the RTL of a register
4883 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4884 char value given in the RTL register data. For example, if mode is
4885 4 bytes wide, return the RTL for 0x01010101*data. */
4886
4887 static rtx
4888 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4889 scalar_int_mode mode)
4890 {
4891 rtx target, coeff;
4892 size_t size;
4893 char *p;
4894
4895 size = GET_MODE_SIZE (mode);
4896 if (size == 1)
4897 return (rtx) data;
4898
4899 p = XALLOCAVEC (char, size);
4900 memset (p, 1, size);
4901 coeff = c_readstr (p, mode);
4902
4903 target = convert_to_mode (mode, (rtx) data, 1);
4904 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4905 return force_reg (mode, target);
4906 }
4907
4908 /* Expand expression EXP, which is a call to the memset builtin. Return
4909 NULL_RTX if we failed the caller should emit a normal call, otherwise
4910 try to get the result in TARGET, if convenient (and in mode MODE if that's
4911 convenient). */
4912
4913 static rtx
4914 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4915 {
4916 if (!validate_arglist (exp,
4917 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4918 return NULL_RTX;
4919
4920 tree dest = CALL_EXPR_ARG (exp, 0);
4921 tree val = CALL_EXPR_ARG (exp, 1);
4922 tree len = CALL_EXPR_ARG (exp, 2);
4923
4924 check_memop_access (exp, dest, NULL_TREE, len);
4925
4926 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4927 }
4928
4929 /* Helper function to do the actual work for expand_builtin_memset. The
4930 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4931 so that this can also be called without constructing an actual CALL_EXPR.
4932 The other arguments and return value are the same as for
4933 expand_builtin_memset. */
4934
4935 static rtx
4936 expand_builtin_memset_args (tree dest, tree val, tree len,
4937 rtx target, machine_mode mode, tree orig_exp)
4938 {
4939 tree fndecl, fn;
4940 enum built_in_function fcode;
4941 machine_mode val_mode;
4942 char c;
4943 unsigned int dest_align;
4944 rtx dest_mem, dest_addr, len_rtx;
4945 HOST_WIDE_INT expected_size = -1;
4946 unsigned int expected_align = 0;
4947 unsigned HOST_WIDE_INT min_size;
4948 unsigned HOST_WIDE_INT max_size;
4949 unsigned HOST_WIDE_INT probable_max_size;
4950
4951 dest_align = get_pointer_alignment (dest);
4952
4953 /* If DEST is not a pointer type, don't do this operation in-line. */
4954 if (dest_align == 0)
4955 return NULL_RTX;
4956
4957 if (currently_expanding_gimple_stmt)
4958 stringop_block_profile (currently_expanding_gimple_stmt,
4959 &expected_align, &expected_size);
4960
4961 if (expected_align < dest_align)
4962 expected_align = dest_align;
4963
4964 /* If the LEN parameter is zero, return DEST. */
4965 if (integer_zerop (len))
4966 {
4967 /* Evaluate and ignore VAL in case it has side-effects. */
4968 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4969 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4970 }
4971
4972 /* Stabilize the arguments in case we fail. */
4973 dest = builtin_save_expr (dest);
4974 val = builtin_save_expr (val);
4975 len = builtin_save_expr (len);
4976
4977 len_rtx = expand_normal (len);
4978 determine_block_size (len, len_rtx, &min_size, &max_size,
4979 &probable_max_size);
4980 dest_mem = get_memory_rtx (dest, len);
4981 val_mode = TYPE_MODE (unsigned_char_type_node);
4982
4983 if (TREE_CODE (val) != INTEGER_CST)
4984 {
4985 rtx val_rtx;
4986
4987 val_rtx = expand_normal (val);
4988 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4989
4990 /* Assume that we can memset by pieces if we can store
4991 * the coefficients by pieces (in the required modes).
4992 * We can't pass builtin_memset_gen_str as that emits RTL. */
4993 c = 1;
4994 if (tree_fits_uhwi_p (len)
4995 && can_store_by_pieces (tree_to_uhwi (len),
4996 builtin_memset_read_str, &c, dest_align,
4997 true))
4998 {
4999 val_rtx = force_reg (val_mode, val_rtx);
5000 store_by_pieces (dest_mem, tree_to_uhwi (len),
5001 builtin_memset_gen_str, val_rtx, dest_align,
5002 true, RETURN_BEGIN);
5003 }
5004 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5005 dest_align, expected_align,
5006 expected_size, min_size, max_size,
5007 probable_max_size))
5008 goto do_libcall;
5009
5010 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5011 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5012 return dest_mem;
5013 }
5014
5015 if (target_char_cast (val, &c))
5016 goto do_libcall;
5017
5018 if (c)
5019 {
5020 if (tree_fits_uhwi_p (len)
5021 && can_store_by_pieces (tree_to_uhwi (len),
5022 builtin_memset_read_str, &c, dest_align,
5023 true))
5024 store_by_pieces (dest_mem, tree_to_uhwi (len),
5025 builtin_memset_read_str, &c, dest_align, true,
5026 RETURN_BEGIN);
5027 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5028 gen_int_mode (c, val_mode),
5029 dest_align, expected_align,
5030 expected_size, min_size, max_size,
5031 probable_max_size))
5032 goto do_libcall;
5033
5034 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5035 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5036 return dest_mem;
5037 }
5038
5039 set_mem_align (dest_mem, dest_align);
5040 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5041 CALL_EXPR_TAILCALL (orig_exp)
5042 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5043 expected_align, expected_size,
5044 min_size, max_size,
5045 probable_max_size);
5046
5047 if (dest_addr == 0)
5048 {
5049 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5050 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5051 }
5052
5053 return dest_addr;
5054
5055 do_libcall:
5056 fndecl = get_callee_fndecl (orig_exp);
5057 fcode = DECL_FUNCTION_CODE (fndecl);
5058 if (fcode == BUILT_IN_MEMSET)
5059 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5060 dest, val, len);
5061 else if (fcode == BUILT_IN_BZERO)
5062 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5063 dest, len);
5064 else
5065 gcc_unreachable ();
5066 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5067 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5068 return expand_call (fn, target, target == const0_rtx);
5069 }
5070
5071 /* Expand expression EXP, which is a call to the bzero builtin. Return
5072 NULL_RTX if we failed the caller should emit a normal call. */
5073
5074 static rtx
5075 expand_builtin_bzero (tree exp)
5076 {
5077 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5078 return NULL_RTX;
5079
5080 tree dest = CALL_EXPR_ARG (exp, 0);
5081 tree size = CALL_EXPR_ARG (exp, 1);
5082
5083 check_memop_access (exp, dest, NULL_TREE, size);
5084
5085 /* New argument list transforming bzero(ptr x, int y) to
5086 memset(ptr x, int 0, size_t y). This is done this way
5087 so that if it isn't expanded inline, we fallback to
5088 calling bzero instead of memset. */
5089
5090 location_t loc = EXPR_LOCATION (exp);
5091
5092 return expand_builtin_memset_args (dest, integer_zero_node,
5093 fold_convert_loc (loc,
5094 size_type_node, size),
5095 const0_rtx, VOIDmode, exp);
5096 }
5097
5098 /* Try to expand cmpstr operation ICODE with the given operands.
5099 Return the result rtx on success, otherwise return null. */
5100
5101 static rtx
5102 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5103 HOST_WIDE_INT align)
5104 {
5105 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5106
5107 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5108 target = NULL_RTX;
5109
5110 class expand_operand ops[4];
5111 create_output_operand (&ops[0], target, insn_mode);
5112 create_fixed_operand (&ops[1], arg1_rtx);
5113 create_fixed_operand (&ops[2], arg2_rtx);
5114 create_integer_operand (&ops[3], align);
5115 if (maybe_expand_insn (icode, 4, ops))
5116 return ops[0].value;
5117 return NULL_RTX;
5118 }
5119
5120 /* Expand expression EXP, which is a call to the memcmp built-in function.
5121 Return NULL_RTX if we failed and the caller should emit a normal call,
5122 otherwise try to get the result in TARGET, if convenient.
5123 RESULT_EQ is true if we can relax the returned value to be either zero
5124 or nonzero, without caring about the sign. */
5125
5126 static rtx
5127 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5128 {
5129 if (!validate_arglist (exp,
5130 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5131 return NULL_RTX;
5132
5133 tree arg1 = CALL_EXPR_ARG (exp, 0);
5134 tree arg2 = CALL_EXPR_ARG (exp, 1);
5135 tree len = CALL_EXPR_ARG (exp, 2);
5136 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5137 bool no_overflow = true;
5138
5139 /* Diagnose calls where the specified length exceeds the size of either
5140 object. */
5141 tree size = compute_objsize (arg1, 0);
5142 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5143 len, /*maxread=*/NULL_TREE, size,
5144 /*objsize=*/NULL_TREE);
5145 if (no_overflow)
5146 {
5147 size = compute_objsize (arg2, 0);
5148 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5149 len, /*maxread=*/NULL_TREE, size,
5150 /*objsize=*/NULL_TREE);
5151 }
5152
5153 /* If the specified length exceeds the size of either object,
5154 call the function. */
5155 if (!no_overflow)
5156 return NULL_RTX;
5157
5158 /* Due to the performance benefit, always inline the calls first
5159 when result_eq is false. */
5160 rtx result = NULL_RTX;
5161
5162 if (!result_eq && fcode != BUILT_IN_BCMP)
5163 {
5164 result = inline_expand_builtin_string_cmp (exp, target);
5165 if (result)
5166 return result;
5167 }
5168
5169 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5170 location_t loc = EXPR_LOCATION (exp);
5171
5172 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5173 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5174
5175 /* If we don't have POINTER_TYPE, call the function. */
5176 if (arg1_align == 0 || arg2_align == 0)
5177 return NULL_RTX;
5178
5179 rtx arg1_rtx = get_memory_rtx (arg1, len);
5180 rtx arg2_rtx = get_memory_rtx (arg2, len);
5181 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5182
5183 /* Set MEM_SIZE as appropriate. */
5184 if (CONST_INT_P (len_rtx))
5185 {
5186 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5187 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5188 }
5189
5190 by_pieces_constfn constfn = NULL;
5191
5192 const char *src_str = c_getstr (arg2);
5193 if (result_eq && src_str == NULL)
5194 {
5195 src_str = c_getstr (arg1);
5196 if (src_str != NULL)
5197 std::swap (arg1_rtx, arg2_rtx);
5198 }
5199
5200 /* If SRC is a string constant and block move would be done
5201 by pieces, we can avoid loading the string from memory
5202 and only stored the computed constants. */
5203 if (src_str
5204 && CONST_INT_P (len_rtx)
5205 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5206 constfn = builtin_memcpy_read_str;
5207
5208 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5209 TREE_TYPE (len), target,
5210 result_eq, constfn,
5211 CONST_CAST (char *, src_str));
5212
5213 if (result)
5214 {
5215 /* Return the value in the proper mode for this function. */
5216 if (GET_MODE (result) == mode)
5217 return result;
5218
5219 if (target != 0)
5220 {
5221 convert_move (target, result, 0);
5222 return target;
5223 }
5224
5225 return convert_to_mode (mode, result, 0);
5226 }
5227
5228 return NULL_RTX;
5229 }
5230
5231 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5232 if we failed the caller should emit a normal call, otherwise try to get
5233 the result in TARGET, if convenient. */
5234
5235 static rtx
5236 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5237 {
5238 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5239 return NULL_RTX;
5240
5241 tree arg1 = CALL_EXPR_ARG (exp, 0);
5242 tree arg2 = CALL_EXPR_ARG (exp, 1);
5243
5244 if (!check_nul_terminated_array (exp, arg1)
5245 || !check_nul_terminated_array (exp, arg2))
5246 return NULL_RTX;
5247
5248 /* Due to the performance benefit, always inline the calls first. */
5249 rtx result = NULL_RTX;
5250 result = inline_expand_builtin_string_cmp (exp, target);
5251 if (result)
5252 return result;
5253
5254 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5255 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5256 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5257 return NULL_RTX;
5258
5259 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5260 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5261
5262 /* If we don't have POINTER_TYPE, call the function. */
5263 if (arg1_align == 0 || arg2_align == 0)
5264 return NULL_RTX;
5265
5266 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5267 arg1 = builtin_save_expr (arg1);
5268 arg2 = builtin_save_expr (arg2);
5269
5270 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5271 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5272
5273 /* Try to call cmpstrsi. */
5274 if (cmpstr_icode != CODE_FOR_nothing)
5275 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5276 MIN (arg1_align, arg2_align));
5277
5278 /* Try to determine at least one length and call cmpstrnsi. */
5279 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5280 {
5281 tree len;
5282 rtx arg3_rtx;
5283
5284 tree len1 = c_strlen (arg1, 1);
5285 tree len2 = c_strlen (arg2, 1);
5286
5287 if (len1)
5288 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5289 if (len2)
5290 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5291
5292 /* If we don't have a constant length for the first, use the length
5293 of the second, if we know it. We don't require a constant for
5294 this case; some cost analysis could be done if both are available
5295 but neither is constant. For now, assume they're equally cheap,
5296 unless one has side effects. If both strings have constant lengths,
5297 use the smaller. */
5298
5299 if (!len1)
5300 len = len2;
5301 else if (!len2)
5302 len = len1;
5303 else if (TREE_SIDE_EFFECTS (len1))
5304 len = len2;
5305 else if (TREE_SIDE_EFFECTS (len2))
5306 len = len1;
5307 else if (TREE_CODE (len1) != INTEGER_CST)
5308 len = len2;
5309 else if (TREE_CODE (len2) != INTEGER_CST)
5310 len = len1;
5311 else if (tree_int_cst_lt (len1, len2))
5312 len = len1;
5313 else
5314 len = len2;
5315
5316 /* If both arguments have side effects, we cannot optimize. */
5317 if (len && !TREE_SIDE_EFFECTS (len))
5318 {
5319 arg3_rtx = expand_normal (len);
5320 result = expand_cmpstrn_or_cmpmem
5321 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5322 arg3_rtx, MIN (arg1_align, arg2_align));
5323 }
5324 }
5325
5326 tree fndecl = get_callee_fndecl (exp);
5327 if (result)
5328 {
5329 /* Check to see if the argument was declared attribute nonstring
5330 and if so, issue a warning since at this point it's not known
5331 to be nul-terminated. */
5332 maybe_warn_nonstring_arg (fndecl, exp);
5333
5334 /* Return the value in the proper mode for this function. */
5335 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5336 if (GET_MODE (result) == mode)
5337 return result;
5338 if (target == 0)
5339 return convert_to_mode (mode, result, 0);
5340 convert_move (target, result, 0);
5341 return target;
5342 }
5343
5344 /* Expand the library call ourselves using a stabilized argument
5345 list to avoid re-evaluating the function's arguments twice. */
5346 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5347 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5348 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5349 return expand_call (fn, target, target == const0_rtx);
5350 }
5351
5352 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5353 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5354 the result in TARGET, if convenient. */
5355
5356 static rtx
5357 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5358 ATTRIBUTE_UNUSED machine_mode mode)
5359 {
5360 if (!validate_arglist (exp,
5361 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5362 return NULL_RTX;
5363
5364 tree arg1 = CALL_EXPR_ARG (exp, 0);
5365 tree arg2 = CALL_EXPR_ARG (exp, 1);
5366 tree arg3 = CALL_EXPR_ARG (exp, 2);
5367
5368 if (!check_nul_terminated_array (exp, arg1, arg3)
5369 || !check_nul_terminated_array (exp, arg2, arg3))
5370 return NULL_RTX;
5371
5372 /* Due to the performance benefit, always inline the calls first. */
5373 rtx result = NULL_RTX;
5374 result = inline_expand_builtin_string_cmp (exp, target);
5375 if (result)
5376 return result;
5377
5378 /* If c_strlen can determine an expression for one of the string
5379 lengths, and it doesn't have side effects, then emit cmpstrnsi
5380 using length MIN(strlen(string)+1, arg3). */
5381 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5382 if (cmpstrn_icode == CODE_FOR_nothing)
5383 return NULL_RTX;
5384
5385 tree len;
5386
5387 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5388 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5389
5390 tree len1 = c_strlen (arg1, 1);
5391 tree len2 = c_strlen (arg2, 1);
5392
5393 location_t loc = EXPR_LOCATION (exp);
5394
5395 if (len1)
5396 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5397 if (len2)
5398 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5399
5400 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5401
5402 /* If we don't have a constant length for the first, use the length
5403 of the second, if we know it. If neither string is constant length,
5404 use the given length argument. We don't require a constant for
5405 this case; some cost analysis could be done if both are available
5406 but neither is constant. For now, assume they're equally cheap,
5407 unless one has side effects. If both strings have constant lengths,
5408 use the smaller. */
5409
5410 if (!len1 && !len2)
5411 len = len3;
5412 else if (!len1)
5413 len = len2;
5414 else if (!len2)
5415 len = len1;
5416 else if (TREE_SIDE_EFFECTS (len1))
5417 len = len2;
5418 else if (TREE_SIDE_EFFECTS (len2))
5419 len = len1;
5420 else if (TREE_CODE (len1) != INTEGER_CST)
5421 len = len2;
5422 else if (TREE_CODE (len2) != INTEGER_CST)
5423 len = len1;
5424 else if (tree_int_cst_lt (len1, len2))
5425 len = len1;
5426 else
5427 len = len2;
5428
5429 /* If we are not using the given length, we must incorporate it here.
5430 The actual new length parameter will be MIN(len,arg3) in this case. */
5431 if (len != len3)
5432 {
5433 len = fold_convert_loc (loc, sizetype, len);
5434 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5435 }
5436 rtx arg1_rtx = get_memory_rtx (arg1, len);
5437 rtx arg2_rtx = get_memory_rtx (arg2, len);
5438 rtx arg3_rtx = expand_normal (len);
5439 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5440 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5441 MIN (arg1_align, arg2_align));
5442
5443 tree fndecl = get_callee_fndecl (exp);
5444 if (result)
5445 {
5446 /* Check to see if the argument was declared attribute nonstring
5447 and if so, issue a warning since at this point it's not known
5448 to be nul-terminated. */
5449 maybe_warn_nonstring_arg (fndecl, exp);
5450
5451 /* Return the value in the proper mode for this function. */
5452 mode = TYPE_MODE (TREE_TYPE (exp));
5453 if (GET_MODE (result) == mode)
5454 return result;
5455 if (target == 0)
5456 return convert_to_mode (mode, result, 0);
5457 convert_move (target, result, 0);
5458 return target;
5459 }
5460
5461 /* Expand the library call ourselves using a stabilized argument
5462 list to avoid re-evaluating the function's arguments twice. */
5463 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5464 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5465 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5466 return expand_call (fn, target, target == const0_rtx);
5467 }
5468
5469 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5470 if that's convenient. */
5471
5472 rtx
5473 expand_builtin_saveregs (void)
5474 {
5475 rtx val;
5476 rtx_insn *seq;
5477
5478 /* Don't do __builtin_saveregs more than once in a function.
5479 Save the result of the first call and reuse it. */
5480 if (saveregs_value != 0)
5481 return saveregs_value;
5482
5483 /* When this function is called, it means that registers must be
5484 saved on entry to this function. So we migrate the call to the
5485 first insn of this function. */
5486
5487 start_sequence ();
5488
5489 /* Do whatever the machine needs done in this case. */
5490 val = targetm.calls.expand_builtin_saveregs ();
5491
5492 seq = get_insns ();
5493 end_sequence ();
5494
5495 saveregs_value = val;
5496
5497 /* Put the insns after the NOTE that starts the function. If this
5498 is inside a start_sequence, make the outer-level insn chain current, so
5499 the code is placed at the start of the function. */
5500 push_topmost_sequence ();
5501 emit_insn_after (seq, entry_of_function ());
5502 pop_topmost_sequence ();
5503
5504 return val;
5505 }
5506
5507 /* Expand a call to __builtin_next_arg. */
5508
5509 static rtx
5510 expand_builtin_next_arg (void)
5511 {
5512 /* Checking arguments is already done in fold_builtin_next_arg
5513 that must be called before this function. */
5514 return expand_binop (ptr_mode, add_optab,
5515 crtl->args.internal_arg_pointer,
5516 crtl->args.arg_offset_rtx,
5517 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5518 }
5519
5520 /* Make it easier for the backends by protecting the valist argument
5521 from multiple evaluations. */
5522
5523 static tree
5524 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5525 {
5526 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5527
5528 /* The current way of determining the type of valist is completely
5529 bogus. We should have the information on the va builtin instead. */
5530 if (!vatype)
5531 vatype = targetm.fn_abi_va_list (cfun->decl);
5532
5533 if (TREE_CODE (vatype) == ARRAY_TYPE)
5534 {
5535 if (TREE_SIDE_EFFECTS (valist))
5536 valist = save_expr (valist);
5537
5538 /* For this case, the backends will be expecting a pointer to
5539 vatype, but it's possible we've actually been given an array
5540 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5541 So fix it. */
5542 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5543 {
5544 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5545 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5546 }
5547 }
5548 else
5549 {
5550 tree pt = build_pointer_type (vatype);
5551
5552 if (! needs_lvalue)
5553 {
5554 if (! TREE_SIDE_EFFECTS (valist))
5555 return valist;
5556
5557 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5558 TREE_SIDE_EFFECTS (valist) = 1;
5559 }
5560
5561 if (TREE_SIDE_EFFECTS (valist))
5562 valist = save_expr (valist);
5563 valist = fold_build2_loc (loc, MEM_REF,
5564 vatype, valist, build_int_cst (pt, 0));
5565 }
5566
5567 return valist;
5568 }
5569
5570 /* The "standard" definition of va_list is void*. */
5571
5572 tree
5573 std_build_builtin_va_list (void)
5574 {
5575 return ptr_type_node;
5576 }
5577
5578 /* The "standard" abi va_list is va_list_type_node. */
5579
5580 tree
5581 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5582 {
5583 return va_list_type_node;
5584 }
5585
5586 /* The "standard" type of va_list is va_list_type_node. */
5587
5588 tree
5589 std_canonical_va_list_type (tree type)
5590 {
5591 tree wtype, htype;
5592
5593 wtype = va_list_type_node;
5594 htype = type;
5595
5596 if (TREE_CODE (wtype) == ARRAY_TYPE)
5597 {
5598 /* If va_list is an array type, the argument may have decayed
5599 to a pointer type, e.g. by being passed to another function.
5600 In that case, unwrap both types so that we can compare the
5601 underlying records. */
5602 if (TREE_CODE (htype) == ARRAY_TYPE
5603 || POINTER_TYPE_P (htype))
5604 {
5605 wtype = TREE_TYPE (wtype);
5606 htype = TREE_TYPE (htype);
5607 }
5608 }
5609 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5610 return va_list_type_node;
5611
5612 return NULL_TREE;
5613 }
5614
5615 /* The "standard" implementation of va_start: just assign `nextarg' to
5616 the variable. */
5617
5618 void
5619 std_expand_builtin_va_start (tree valist, rtx nextarg)
5620 {
5621 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5622 convert_move (va_r, nextarg, 0);
5623 }
5624
5625 /* Expand EXP, a call to __builtin_va_start. */
5626
5627 static rtx
5628 expand_builtin_va_start (tree exp)
5629 {
5630 rtx nextarg;
5631 tree valist;
5632 location_t loc = EXPR_LOCATION (exp);
5633
5634 if (call_expr_nargs (exp) < 2)
5635 {
5636 error_at (loc, "too few arguments to function %<va_start%>");
5637 return const0_rtx;
5638 }
5639
5640 if (fold_builtin_next_arg (exp, true))
5641 return const0_rtx;
5642
5643 nextarg = expand_builtin_next_arg ();
5644 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5645
5646 if (targetm.expand_builtin_va_start)
5647 targetm.expand_builtin_va_start (valist, nextarg);
5648 else
5649 std_expand_builtin_va_start (valist, nextarg);
5650
5651 return const0_rtx;
5652 }
5653
5654 /* Expand EXP, a call to __builtin_va_end. */
5655
5656 static rtx
5657 expand_builtin_va_end (tree exp)
5658 {
5659 tree valist = CALL_EXPR_ARG (exp, 0);
5660
5661 /* Evaluate for side effects, if needed. I hate macros that don't
5662 do that. */
5663 if (TREE_SIDE_EFFECTS (valist))
5664 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5665
5666 return const0_rtx;
5667 }
5668
5669 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5670 builtin rather than just as an assignment in stdarg.h because of the
5671 nastiness of array-type va_list types. */
5672
5673 static rtx
5674 expand_builtin_va_copy (tree exp)
5675 {
5676 tree dst, src, t;
5677 location_t loc = EXPR_LOCATION (exp);
5678
5679 dst = CALL_EXPR_ARG (exp, 0);
5680 src = CALL_EXPR_ARG (exp, 1);
5681
5682 dst = stabilize_va_list_loc (loc, dst, 1);
5683 src = stabilize_va_list_loc (loc, src, 0);
5684
5685 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5686
5687 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5688 {
5689 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5690 TREE_SIDE_EFFECTS (t) = 1;
5691 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5692 }
5693 else
5694 {
5695 rtx dstb, srcb, size;
5696
5697 /* Evaluate to pointers. */
5698 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5699 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5700 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5701 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5702
5703 dstb = convert_memory_address (Pmode, dstb);
5704 srcb = convert_memory_address (Pmode, srcb);
5705
5706 /* "Dereference" to BLKmode memories. */
5707 dstb = gen_rtx_MEM (BLKmode, dstb);
5708 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5709 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5710 srcb = gen_rtx_MEM (BLKmode, srcb);
5711 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5712 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5713
5714 /* Copy. */
5715 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5716 }
5717
5718 return const0_rtx;
5719 }
5720
5721 /* Expand a call to one of the builtin functions __builtin_frame_address or
5722 __builtin_return_address. */
5723
5724 static rtx
5725 expand_builtin_frame_address (tree fndecl, tree exp)
5726 {
5727 /* The argument must be a nonnegative integer constant.
5728 It counts the number of frames to scan up the stack.
5729 The value is either the frame pointer value or the return
5730 address saved in that frame. */
5731 if (call_expr_nargs (exp) == 0)
5732 /* Warning about missing arg was already issued. */
5733 return const0_rtx;
5734 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5735 {
5736 error ("invalid argument to %qD", fndecl);
5737 return const0_rtx;
5738 }
5739 else
5740 {
5741 /* Number of frames to scan up the stack. */
5742 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5743
5744 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5745
5746 /* Some ports cannot access arbitrary stack frames. */
5747 if (tem == NULL)
5748 {
5749 warning (0, "unsupported argument to %qD", fndecl);
5750 return const0_rtx;
5751 }
5752
5753 if (count)
5754 {
5755 /* Warn since no effort is made to ensure that any frame
5756 beyond the current one exists or can be safely reached. */
5757 warning (OPT_Wframe_address, "calling %qD with "
5758 "a nonzero argument is unsafe", fndecl);
5759 }
5760
5761 /* For __builtin_frame_address, return what we've got. */
5762 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5763 return tem;
5764
5765 if (!REG_P (tem)
5766 && ! CONSTANT_P (tem))
5767 tem = copy_addr_to_reg (tem);
5768 return tem;
5769 }
5770 }
5771
5772 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5773 failed and the caller should emit a normal call. */
5774
5775 static rtx
5776 expand_builtin_alloca (tree exp)
5777 {
5778 rtx op0;
5779 rtx result;
5780 unsigned int align;
5781 tree fndecl = get_callee_fndecl (exp);
5782 HOST_WIDE_INT max_size;
5783 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5784 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5785 bool valid_arglist
5786 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5787 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5788 VOID_TYPE)
5789 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5790 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5791 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5792
5793 if (!valid_arglist)
5794 return NULL_RTX;
5795
5796 if ((alloca_for_var
5797 && warn_vla_limit >= HOST_WIDE_INT_MAX
5798 && warn_alloc_size_limit < warn_vla_limit)
5799 || (!alloca_for_var
5800 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5801 && warn_alloc_size_limit < warn_alloca_limit
5802 ))
5803 {
5804 /* -Walloca-larger-than and -Wvla-larger-than settings of
5805 less than HOST_WIDE_INT_MAX override the more general
5806 -Walloc-size-larger-than so unless either of the former
5807 options is smaller than the last one (wchich would imply
5808 that the call was already checked), check the alloca
5809 arguments for overflow. */
5810 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5811 int idx[] = { 0, -1 };
5812 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5813 }
5814
5815 /* Compute the argument. */
5816 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5817
5818 /* Compute the alignment. */
5819 align = (fcode == BUILT_IN_ALLOCA
5820 ? BIGGEST_ALIGNMENT
5821 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5822
5823 /* Compute the maximum size. */
5824 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5825 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5826 : -1);
5827
5828 /* Allocate the desired space. If the allocation stems from the declaration
5829 of a variable-sized object, it cannot accumulate. */
5830 result
5831 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5832 result = convert_memory_address (ptr_mode, result);
5833
5834 /* Dynamic allocations for variables are recorded during gimplification. */
5835 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5836 record_dynamic_alloc (exp);
5837
5838 return result;
5839 }
5840
5841 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5842 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5843 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5844 handle_builtin_stack_restore function. */
5845
5846 static rtx
5847 expand_asan_emit_allocas_unpoison (tree exp)
5848 {
5849 tree arg0 = CALL_EXPR_ARG (exp, 0);
5850 tree arg1 = CALL_EXPR_ARG (exp, 1);
5851 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5852 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5853 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5854 stack_pointer_rtx, NULL_RTX, 0,
5855 OPTAB_LIB_WIDEN);
5856 off = convert_modes (ptr_mode, Pmode, off, 0);
5857 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5858 OPTAB_LIB_WIDEN);
5859 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5860 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5861 top, ptr_mode, bot, ptr_mode);
5862 return ret;
5863 }
5864
5865 /* Expand a call to bswap builtin in EXP.
5866 Return NULL_RTX if a normal call should be emitted rather than expanding the
5867 function in-line. If convenient, the result should be placed in TARGET.
5868 SUBTARGET may be used as the target for computing one of EXP's operands. */
5869
5870 static rtx
5871 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5872 rtx subtarget)
5873 {
5874 tree arg;
5875 rtx op0;
5876
5877 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5878 return NULL_RTX;
5879
5880 arg = CALL_EXPR_ARG (exp, 0);
5881 op0 = expand_expr (arg,
5882 subtarget && GET_MODE (subtarget) == target_mode
5883 ? subtarget : NULL_RTX,
5884 target_mode, EXPAND_NORMAL);
5885 if (GET_MODE (op0) != target_mode)
5886 op0 = convert_to_mode (target_mode, op0, 1);
5887
5888 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5889
5890 gcc_assert (target);
5891
5892 return convert_to_mode (target_mode, target, 1);
5893 }
5894
5895 /* Expand a call to a unary builtin in EXP.
5896 Return NULL_RTX if a normal call should be emitted rather than expanding the
5897 function in-line. If convenient, the result should be placed in TARGET.
5898 SUBTARGET may be used as the target for computing one of EXP's operands. */
5899
5900 static rtx
5901 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5902 rtx subtarget, optab op_optab)
5903 {
5904 rtx op0;
5905
5906 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5907 return NULL_RTX;
5908
5909 /* Compute the argument. */
5910 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5911 (subtarget
5912 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5913 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5914 VOIDmode, EXPAND_NORMAL);
5915 /* Compute op, into TARGET if possible.
5916 Set TARGET to wherever the result comes back. */
5917 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5918 op_optab, op0, target, op_optab != clrsb_optab);
5919 gcc_assert (target);
5920
5921 return convert_to_mode (target_mode, target, 0);
5922 }
5923
5924 /* Expand a call to __builtin_expect. We just return our argument
5925 as the builtin_expect semantic should've been already executed by
5926 tree branch prediction pass. */
5927
5928 static rtx
5929 expand_builtin_expect (tree exp, rtx target)
5930 {
5931 tree arg;
5932
5933 if (call_expr_nargs (exp) < 2)
5934 return const0_rtx;
5935 arg = CALL_EXPR_ARG (exp, 0);
5936
5937 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5938 /* When guessing was done, the hints should be already stripped away. */
5939 gcc_assert (!flag_guess_branch_prob
5940 || optimize == 0 || seen_error ());
5941 return target;
5942 }
5943
5944 /* Expand a call to __builtin_expect_with_probability. We just return our
5945 argument as the builtin_expect semantic should've been already executed by
5946 tree branch prediction pass. */
5947
5948 static rtx
5949 expand_builtin_expect_with_probability (tree exp, rtx target)
5950 {
5951 tree arg;
5952
5953 if (call_expr_nargs (exp) < 3)
5954 return const0_rtx;
5955 arg = CALL_EXPR_ARG (exp, 0);
5956
5957 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5958 /* When guessing was done, the hints should be already stripped away. */
5959 gcc_assert (!flag_guess_branch_prob
5960 || optimize == 0 || seen_error ());
5961 return target;
5962 }
5963
5964
5965 /* Expand a call to __builtin_assume_aligned. We just return our first
5966 argument as the builtin_assume_aligned semantic should've been already
5967 executed by CCP. */
5968
5969 static rtx
5970 expand_builtin_assume_aligned (tree exp, rtx target)
5971 {
5972 if (call_expr_nargs (exp) < 2)
5973 return const0_rtx;
5974 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5975 EXPAND_NORMAL);
5976 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5977 && (call_expr_nargs (exp) < 3
5978 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5979 return target;
5980 }
5981
5982 void
5983 expand_builtin_trap (void)
5984 {
5985 if (targetm.have_trap ())
5986 {
5987 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5988 /* For trap insns when not accumulating outgoing args force
5989 REG_ARGS_SIZE note to prevent crossjumping of calls with
5990 different args sizes. */
5991 if (!ACCUMULATE_OUTGOING_ARGS)
5992 add_args_size_note (insn, stack_pointer_delta);
5993 }
5994 else
5995 {
5996 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5997 tree call_expr = build_call_expr (fn, 0);
5998 expand_call (call_expr, NULL_RTX, false);
5999 }
6000
6001 emit_barrier ();
6002 }
6003
6004 /* Expand a call to __builtin_unreachable. We do nothing except emit
6005 a barrier saying that control flow will not pass here.
6006
6007 It is the responsibility of the program being compiled to ensure
6008 that control flow does never reach __builtin_unreachable. */
6009 static void
6010 expand_builtin_unreachable (void)
6011 {
6012 emit_barrier ();
6013 }
6014
6015 /* Expand EXP, a call to fabs, fabsf or fabsl.
6016 Return NULL_RTX if a normal call should be emitted rather than expanding
6017 the function inline. If convenient, the result should be placed
6018 in TARGET. SUBTARGET may be used as the target for computing
6019 the operand. */
6020
6021 static rtx
6022 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6023 {
6024 machine_mode mode;
6025 tree arg;
6026 rtx op0;
6027
6028 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6029 return NULL_RTX;
6030
6031 arg = CALL_EXPR_ARG (exp, 0);
6032 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6033 mode = TYPE_MODE (TREE_TYPE (arg));
6034 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6035 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6036 }
6037
6038 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6039 Return NULL is a normal call should be emitted rather than expanding the
6040 function inline. If convenient, the result should be placed in TARGET.
6041 SUBTARGET may be used as the target for computing the operand. */
6042
6043 static rtx
6044 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6045 {
6046 rtx op0, op1;
6047 tree arg;
6048
6049 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6050 return NULL_RTX;
6051
6052 arg = CALL_EXPR_ARG (exp, 0);
6053 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6054
6055 arg = CALL_EXPR_ARG (exp, 1);
6056 op1 = expand_normal (arg);
6057
6058 return expand_copysign (op0, op1, target);
6059 }
6060
6061 /* Expand a call to __builtin___clear_cache. */
6062
6063 static rtx
6064 expand_builtin___clear_cache (tree exp)
6065 {
6066 if (!targetm.code_for_clear_cache)
6067 {
6068 #ifdef CLEAR_INSN_CACHE
6069 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6070 does something. Just do the default expansion to a call to
6071 __clear_cache(). */
6072 return NULL_RTX;
6073 #else
6074 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6075 does nothing. There is no need to call it. Do nothing. */
6076 return const0_rtx;
6077 #endif /* CLEAR_INSN_CACHE */
6078 }
6079
6080 /* We have a "clear_cache" insn, and it will handle everything. */
6081 tree begin, end;
6082 rtx begin_rtx, end_rtx;
6083
6084 /* We must not expand to a library call. If we did, any
6085 fallback library function in libgcc that might contain a call to
6086 __builtin___clear_cache() would recurse infinitely. */
6087 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6088 {
6089 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6090 return const0_rtx;
6091 }
6092
6093 if (targetm.have_clear_cache ())
6094 {
6095 class expand_operand ops[2];
6096
6097 begin = CALL_EXPR_ARG (exp, 0);
6098 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6099
6100 end = CALL_EXPR_ARG (exp, 1);
6101 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6102
6103 create_address_operand (&ops[0], begin_rtx);
6104 create_address_operand (&ops[1], end_rtx);
6105 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6106 return const0_rtx;
6107 }
6108 return const0_rtx;
6109 }
6110
6111 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6112
6113 static rtx
6114 round_trampoline_addr (rtx tramp)
6115 {
6116 rtx temp, addend, mask;
6117
6118 /* If we don't need too much alignment, we'll have been guaranteed
6119 proper alignment by get_trampoline_type. */
6120 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6121 return tramp;
6122
6123 /* Round address up to desired boundary. */
6124 temp = gen_reg_rtx (Pmode);
6125 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6126 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6127
6128 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6129 temp, 0, OPTAB_LIB_WIDEN);
6130 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6131 temp, 0, OPTAB_LIB_WIDEN);
6132
6133 return tramp;
6134 }
6135
6136 static rtx
6137 expand_builtin_init_trampoline (tree exp, bool onstack)
6138 {
6139 tree t_tramp, t_func, t_chain;
6140 rtx m_tramp, r_tramp, r_chain, tmp;
6141
6142 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6143 POINTER_TYPE, VOID_TYPE))
6144 return NULL_RTX;
6145
6146 t_tramp = CALL_EXPR_ARG (exp, 0);
6147 t_func = CALL_EXPR_ARG (exp, 1);
6148 t_chain = CALL_EXPR_ARG (exp, 2);
6149
6150 r_tramp = expand_normal (t_tramp);
6151 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6152 MEM_NOTRAP_P (m_tramp) = 1;
6153
6154 /* If ONSTACK, the TRAMP argument should be the address of a field
6155 within the local function's FRAME decl. Either way, let's see if
6156 we can fill in the MEM_ATTRs for this memory. */
6157 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6158 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6159
6160 /* Creator of a heap trampoline is responsible for making sure the
6161 address is aligned to at least STACK_BOUNDARY. Normally malloc
6162 will ensure this anyhow. */
6163 tmp = round_trampoline_addr (r_tramp);
6164 if (tmp != r_tramp)
6165 {
6166 m_tramp = change_address (m_tramp, BLKmode, tmp);
6167 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6168 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6169 }
6170
6171 /* The FUNC argument should be the address of the nested function.
6172 Extract the actual function decl to pass to the hook. */
6173 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6174 t_func = TREE_OPERAND (t_func, 0);
6175 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6176
6177 r_chain = expand_normal (t_chain);
6178
6179 /* Generate insns to initialize the trampoline. */
6180 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6181
6182 if (onstack)
6183 {
6184 trampolines_created = 1;
6185
6186 if (targetm.calls.custom_function_descriptors != 0)
6187 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6188 "trampoline generated for nested function %qD", t_func);
6189 }
6190
6191 return const0_rtx;
6192 }
6193
6194 static rtx
6195 expand_builtin_adjust_trampoline (tree exp)
6196 {
6197 rtx tramp;
6198
6199 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6200 return NULL_RTX;
6201
6202 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6203 tramp = round_trampoline_addr (tramp);
6204 if (targetm.calls.trampoline_adjust_address)
6205 tramp = targetm.calls.trampoline_adjust_address (tramp);
6206
6207 return tramp;
6208 }
6209
6210 /* Expand a call to the builtin descriptor initialization routine.
6211 A descriptor is made up of a couple of pointers to the static
6212 chain and the code entry in this order. */
6213
6214 static rtx
6215 expand_builtin_init_descriptor (tree exp)
6216 {
6217 tree t_descr, t_func, t_chain;
6218 rtx m_descr, r_descr, r_func, r_chain;
6219
6220 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6221 VOID_TYPE))
6222 return NULL_RTX;
6223
6224 t_descr = CALL_EXPR_ARG (exp, 0);
6225 t_func = CALL_EXPR_ARG (exp, 1);
6226 t_chain = CALL_EXPR_ARG (exp, 2);
6227
6228 r_descr = expand_normal (t_descr);
6229 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6230 MEM_NOTRAP_P (m_descr) = 1;
6231 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6232
6233 r_func = expand_normal (t_func);
6234 r_chain = expand_normal (t_chain);
6235
6236 /* Generate insns to initialize the descriptor. */
6237 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6238 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6239 POINTER_SIZE / BITS_PER_UNIT), r_func);
6240
6241 return const0_rtx;
6242 }
6243
6244 /* Expand a call to the builtin descriptor adjustment routine. */
6245
6246 static rtx
6247 expand_builtin_adjust_descriptor (tree exp)
6248 {
6249 rtx tramp;
6250
6251 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6252 return NULL_RTX;
6253
6254 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6255
6256 /* Unalign the descriptor to allow runtime identification. */
6257 tramp = plus_constant (ptr_mode, tramp,
6258 targetm.calls.custom_function_descriptors);
6259
6260 return force_operand (tramp, NULL_RTX);
6261 }
6262
6263 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6264 function. The function first checks whether the back end provides
6265 an insn to implement signbit for the respective mode. If not, it
6266 checks whether the floating point format of the value is such that
6267 the sign bit can be extracted. If that is not the case, error out.
6268 EXP is the expression that is a call to the builtin function; if
6269 convenient, the result should be placed in TARGET. */
6270 static rtx
6271 expand_builtin_signbit (tree exp, rtx target)
6272 {
6273 const struct real_format *fmt;
6274 scalar_float_mode fmode;
6275 scalar_int_mode rmode, imode;
6276 tree arg;
6277 int word, bitpos;
6278 enum insn_code icode;
6279 rtx temp;
6280 location_t loc = EXPR_LOCATION (exp);
6281
6282 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6283 return NULL_RTX;
6284
6285 arg = CALL_EXPR_ARG (exp, 0);
6286 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6287 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6288 fmt = REAL_MODE_FORMAT (fmode);
6289
6290 arg = builtin_save_expr (arg);
6291
6292 /* Expand the argument yielding a RTX expression. */
6293 temp = expand_normal (arg);
6294
6295 /* Check if the back end provides an insn that handles signbit for the
6296 argument's mode. */
6297 icode = optab_handler (signbit_optab, fmode);
6298 if (icode != CODE_FOR_nothing)
6299 {
6300 rtx_insn *last = get_last_insn ();
6301 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6302 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6303 return target;
6304 delete_insns_since (last);
6305 }
6306
6307 /* For floating point formats without a sign bit, implement signbit
6308 as "ARG < 0.0". */
6309 bitpos = fmt->signbit_ro;
6310 if (bitpos < 0)
6311 {
6312 /* But we can't do this if the format supports signed zero. */
6313 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6314
6315 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6316 build_real (TREE_TYPE (arg), dconst0));
6317 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6318 }
6319
6320 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6321 {
6322 imode = int_mode_for_mode (fmode).require ();
6323 temp = gen_lowpart (imode, temp);
6324 }
6325 else
6326 {
6327 imode = word_mode;
6328 /* Handle targets with different FP word orders. */
6329 if (FLOAT_WORDS_BIG_ENDIAN)
6330 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6331 else
6332 word = bitpos / BITS_PER_WORD;
6333 temp = operand_subword_force (temp, word, fmode);
6334 bitpos = bitpos % BITS_PER_WORD;
6335 }
6336
6337 /* Force the intermediate word_mode (or narrower) result into a
6338 register. This avoids attempting to create paradoxical SUBREGs
6339 of floating point modes below. */
6340 temp = force_reg (imode, temp);
6341
6342 /* If the bitpos is within the "result mode" lowpart, the operation
6343 can be implement with a single bitwise AND. Otherwise, we need
6344 a right shift and an AND. */
6345
6346 if (bitpos < GET_MODE_BITSIZE (rmode))
6347 {
6348 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6349
6350 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6351 temp = gen_lowpart (rmode, temp);
6352 temp = expand_binop (rmode, and_optab, temp,
6353 immed_wide_int_const (mask, rmode),
6354 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6355 }
6356 else
6357 {
6358 /* Perform a logical right shift to place the signbit in the least
6359 significant bit, then truncate the result to the desired mode
6360 and mask just this bit. */
6361 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6362 temp = gen_lowpart (rmode, temp);
6363 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6364 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6365 }
6366
6367 return temp;
6368 }
6369
6370 /* Expand fork or exec calls. TARGET is the desired target of the
6371 call. EXP is the call. FN is the
6372 identificator of the actual function. IGNORE is nonzero if the
6373 value is to be ignored. */
6374
6375 static rtx
6376 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6377 {
6378 tree id, decl;
6379 tree call;
6380
6381 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6382 {
6383 /* Detect unterminated path. */
6384 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6385 return NULL_RTX;
6386
6387 /* Also detect unterminated first argument. */
6388 switch (DECL_FUNCTION_CODE (fn))
6389 {
6390 case BUILT_IN_EXECL:
6391 case BUILT_IN_EXECLE:
6392 case BUILT_IN_EXECLP:
6393 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6394 return NULL_RTX;
6395 default:
6396 break;
6397 }
6398 }
6399
6400
6401 /* If we are not profiling, just call the function. */
6402 if (!profile_arc_flag)
6403 return NULL_RTX;
6404
6405 /* Otherwise call the wrapper. This should be equivalent for the rest of
6406 compiler, so the code does not diverge, and the wrapper may run the
6407 code necessary for keeping the profiling sane. */
6408
6409 switch (DECL_FUNCTION_CODE (fn))
6410 {
6411 case BUILT_IN_FORK:
6412 id = get_identifier ("__gcov_fork");
6413 break;
6414
6415 case BUILT_IN_EXECL:
6416 id = get_identifier ("__gcov_execl");
6417 break;
6418
6419 case BUILT_IN_EXECV:
6420 id = get_identifier ("__gcov_execv");
6421 break;
6422
6423 case BUILT_IN_EXECLP:
6424 id = get_identifier ("__gcov_execlp");
6425 break;
6426
6427 case BUILT_IN_EXECLE:
6428 id = get_identifier ("__gcov_execle");
6429 break;
6430
6431 case BUILT_IN_EXECVP:
6432 id = get_identifier ("__gcov_execvp");
6433 break;
6434
6435 case BUILT_IN_EXECVE:
6436 id = get_identifier ("__gcov_execve");
6437 break;
6438
6439 default:
6440 gcc_unreachable ();
6441 }
6442
6443 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6444 FUNCTION_DECL, id, TREE_TYPE (fn));
6445 DECL_EXTERNAL (decl) = 1;
6446 TREE_PUBLIC (decl) = 1;
6447 DECL_ARTIFICIAL (decl) = 1;
6448 TREE_NOTHROW (decl) = 1;
6449 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6450 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6451 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6452 return expand_call (call, target, ignore);
6453 }
6454
6455
6456 \f
6457 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6458 the pointer in these functions is void*, the tree optimizers may remove
6459 casts. The mode computed in expand_builtin isn't reliable either, due
6460 to __sync_bool_compare_and_swap.
6461
6462 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6463 group of builtins. This gives us log2 of the mode size. */
6464
6465 static inline machine_mode
6466 get_builtin_sync_mode (int fcode_diff)
6467 {
6468 /* The size is not negotiable, so ask not to get BLKmode in return
6469 if the target indicates that a smaller size would be better. */
6470 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6471 }
6472
6473 /* Expand the memory expression LOC and return the appropriate memory operand
6474 for the builtin_sync operations. */
6475
6476 static rtx
6477 get_builtin_sync_mem (tree loc, machine_mode mode)
6478 {
6479 rtx addr, mem;
6480 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6481 ? TREE_TYPE (TREE_TYPE (loc))
6482 : TREE_TYPE (loc));
6483 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6484
6485 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6486 addr = convert_memory_address (addr_mode, addr);
6487
6488 /* Note that we explicitly do not want any alias information for this
6489 memory, so that we kill all other live memories. Otherwise we don't
6490 satisfy the full barrier semantics of the intrinsic. */
6491 mem = gen_rtx_MEM (mode, addr);
6492
6493 set_mem_addr_space (mem, addr_space);
6494
6495 mem = validize_mem (mem);
6496
6497 /* The alignment needs to be at least according to that of the mode. */
6498 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6499 get_pointer_alignment (loc)));
6500 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6501 MEM_VOLATILE_P (mem) = 1;
6502
6503 return mem;
6504 }
6505
6506 /* Make sure an argument is in the right mode.
6507 EXP is the tree argument.
6508 MODE is the mode it should be in. */
6509
6510 static rtx
6511 expand_expr_force_mode (tree exp, machine_mode mode)
6512 {
6513 rtx val;
6514 machine_mode old_mode;
6515
6516 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6517 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6518 of CONST_INTs, where we know the old_mode only from the call argument. */
6519
6520 old_mode = GET_MODE (val);
6521 if (old_mode == VOIDmode)
6522 old_mode = TYPE_MODE (TREE_TYPE (exp));
6523 val = convert_modes (mode, old_mode, val, 1);
6524 return val;
6525 }
6526
6527
6528 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6529 EXP is the CALL_EXPR. CODE is the rtx code
6530 that corresponds to the arithmetic or logical operation from the name;
6531 an exception here is that NOT actually means NAND. TARGET is an optional
6532 place for us to store the results; AFTER is true if this is the
6533 fetch_and_xxx form. */
6534
6535 static rtx
6536 expand_builtin_sync_operation (machine_mode mode, tree exp,
6537 enum rtx_code code, bool after,
6538 rtx target)
6539 {
6540 rtx val, mem;
6541 location_t loc = EXPR_LOCATION (exp);
6542
6543 if (code == NOT && warn_sync_nand)
6544 {
6545 tree fndecl = get_callee_fndecl (exp);
6546 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6547
6548 static bool warned_f_a_n, warned_n_a_f;
6549
6550 switch (fcode)
6551 {
6552 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6553 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6554 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6555 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6556 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6557 if (warned_f_a_n)
6558 break;
6559
6560 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6561 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6562 warned_f_a_n = true;
6563 break;
6564
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6567 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6568 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6569 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6570 if (warned_n_a_f)
6571 break;
6572
6573 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6574 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6575 warned_n_a_f = true;
6576 break;
6577
6578 default:
6579 gcc_unreachable ();
6580 }
6581 }
6582
6583 /* Expand the operands. */
6584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6585 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6586
6587 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6588 after);
6589 }
6590
6591 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6592 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6593 true if this is the boolean form. TARGET is a place for us to store the
6594 results; this is NOT optional if IS_BOOL is true. */
6595
6596 static rtx
6597 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6598 bool is_bool, rtx target)
6599 {
6600 rtx old_val, new_val, mem;
6601 rtx *pbool, *poval;
6602
6603 /* Expand the operands. */
6604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6605 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6606 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6607
6608 pbool = poval = NULL;
6609 if (target != const0_rtx)
6610 {
6611 if (is_bool)
6612 pbool = &target;
6613 else
6614 poval = &target;
6615 }
6616 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6617 false, MEMMODEL_SYNC_SEQ_CST,
6618 MEMMODEL_SYNC_SEQ_CST))
6619 return NULL_RTX;
6620
6621 return target;
6622 }
6623
6624 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6625 general form is actually an atomic exchange, and some targets only
6626 support a reduced form with the second argument being a constant 1.
6627 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6628 the results. */
6629
6630 static rtx
6631 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6632 rtx target)
6633 {
6634 rtx val, mem;
6635
6636 /* Expand the operands. */
6637 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6638 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6639
6640 return expand_sync_lock_test_and_set (target, mem, val);
6641 }
6642
6643 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6644
6645 static void
6646 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6647 {
6648 rtx mem;
6649
6650 /* Expand the operands. */
6651 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6652
6653 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6654 }
6655
6656 /* Given an integer representing an ``enum memmodel'', verify its
6657 correctness and return the memory model enum. */
6658
6659 static enum memmodel
6660 get_memmodel (tree exp)
6661 {
6662 rtx op;
6663 unsigned HOST_WIDE_INT val;
6664 location_t loc
6665 = expansion_point_location_if_in_system_header (input_location);
6666
6667 /* If the parameter is not a constant, it's a run time value so we'll just
6668 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6669 if (TREE_CODE (exp) != INTEGER_CST)
6670 return MEMMODEL_SEQ_CST;
6671
6672 op = expand_normal (exp);
6673
6674 val = INTVAL (op);
6675 if (targetm.memmodel_check)
6676 val = targetm.memmodel_check (val);
6677 else if (val & ~MEMMODEL_MASK)
6678 {
6679 warning_at (loc, OPT_Winvalid_memory_model,
6680 "unknown architecture specifier in memory model to builtin");
6681 return MEMMODEL_SEQ_CST;
6682 }
6683
6684 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6685 if (memmodel_base (val) >= MEMMODEL_LAST)
6686 {
6687 warning_at (loc, OPT_Winvalid_memory_model,
6688 "invalid memory model argument to builtin");
6689 return MEMMODEL_SEQ_CST;
6690 }
6691
6692 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6693 be conservative and promote consume to acquire. */
6694 if (val == MEMMODEL_CONSUME)
6695 val = MEMMODEL_ACQUIRE;
6696
6697 return (enum memmodel) val;
6698 }
6699
6700 /* Expand the __atomic_exchange intrinsic:
6701 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6702 EXP is the CALL_EXPR.
6703 TARGET is an optional place for us to store the results. */
6704
6705 static rtx
6706 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6707 {
6708 rtx val, mem;
6709 enum memmodel model;
6710
6711 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6712
6713 if (!flag_inline_atomics)
6714 return NULL_RTX;
6715
6716 /* Expand the operands. */
6717 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6718 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6719
6720 return expand_atomic_exchange (target, mem, val, model);
6721 }
6722
6723 /* Expand the __atomic_compare_exchange intrinsic:
6724 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6725 TYPE desired, BOOL weak,
6726 enum memmodel success,
6727 enum memmodel failure)
6728 EXP is the CALL_EXPR.
6729 TARGET is an optional place for us to store the results. */
6730
6731 static rtx
6732 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6733 rtx target)
6734 {
6735 rtx expect, desired, mem, oldval;
6736 rtx_code_label *label;
6737 enum memmodel success, failure;
6738 tree weak;
6739 bool is_weak;
6740 location_t loc
6741 = expansion_point_location_if_in_system_header (input_location);
6742
6743 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6744 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6745
6746 if (failure > success)
6747 {
6748 warning_at (loc, OPT_Winvalid_memory_model,
6749 "failure memory model cannot be stronger than success "
6750 "memory model for %<__atomic_compare_exchange%>");
6751 success = MEMMODEL_SEQ_CST;
6752 }
6753
6754 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6755 {
6756 warning_at (loc, OPT_Winvalid_memory_model,
6757 "invalid failure memory model for "
6758 "%<__atomic_compare_exchange%>");
6759 failure = MEMMODEL_SEQ_CST;
6760 success = MEMMODEL_SEQ_CST;
6761 }
6762
6763
6764 if (!flag_inline_atomics)
6765 return NULL_RTX;
6766
6767 /* Expand the operands. */
6768 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6769
6770 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6771 expect = convert_memory_address (Pmode, expect);
6772 expect = gen_rtx_MEM (mode, expect);
6773 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6774
6775 weak = CALL_EXPR_ARG (exp, 3);
6776 is_weak = false;
6777 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6778 is_weak = true;
6779
6780 if (target == const0_rtx)
6781 target = NULL;
6782
6783 /* Lest the rtl backend create a race condition with an imporoper store
6784 to memory, always create a new pseudo for OLDVAL. */
6785 oldval = NULL;
6786
6787 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6788 is_weak, success, failure))
6789 return NULL_RTX;
6790
6791 /* Conditionally store back to EXPECT, lest we create a race condition
6792 with an improper store to memory. */
6793 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6794 the normal case where EXPECT is totally private, i.e. a register. At
6795 which point the store can be unconditional. */
6796 label = gen_label_rtx ();
6797 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6798 GET_MODE (target), 1, label);
6799 emit_move_insn (expect, oldval);
6800 emit_label (label);
6801
6802 return target;
6803 }
6804
6805 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6806 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6807 call. The weak parameter must be dropped to match the expected parameter
6808 list and the expected argument changed from value to pointer to memory
6809 slot. */
6810
6811 static void
6812 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6813 {
6814 unsigned int z;
6815 vec<tree, va_gc> *vec;
6816
6817 vec_alloc (vec, 5);
6818 vec->quick_push (gimple_call_arg (call, 0));
6819 tree expected = gimple_call_arg (call, 1);
6820 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6821 TREE_TYPE (expected));
6822 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6823 if (expd != x)
6824 emit_move_insn (x, expd);
6825 tree v = make_tree (TREE_TYPE (expected), x);
6826 vec->quick_push (build1 (ADDR_EXPR,
6827 build_pointer_type (TREE_TYPE (expected)), v));
6828 vec->quick_push (gimple_call_arg (call, 2));
6829 /* Skip the boolean weak parameter. */
6830 for (z = 4; z < 6; z++)
6831 vec->quick_push (gimple_call_arg (call, z));
6832 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6833 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6834 gcc_assert (bytes_log2 < 5);
6835 built_in_function fncode
6836 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6837 + bytes_log2);
6838 tree fndecl = builtin_decl_explicit (fncode);
6839 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6840 fndecl);
6841 tree exp = build_call_vec (boolean_type_node, fn, vec);
6842 tree lhs = gimple_call_lhs (call);
6843 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6844 if (lhs)
6845 {
6846 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6847 if (GET_MODE (boolret) != mode)
6848 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6849 x = force_reg (mode, x);
6850 write_complex_part (target, boolret, true);
6851 write_complex_part (target, x, false);
6852 }
6853 }
6854
6855 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6856
6857 void
6858 expand_ifn_atomic_compare_exchange (gcall *call)
6859 {
6860 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6861 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6862 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6863 rtx expect, desired, mem, oldval, boolret;
6864 enum memmodel success, failure;
6865 tree lhs;
6866 bool is_weak;
6867 location_t loc
6868 = expansion_point_location_if_in_system_header (gimple_location (call));
6869
6870 success = get_memmodel (gimple_call_arg (call, 4));
6871 failure = get_memmodel (gimple_call_arg (call, 5));
6872
6873 if (failure > success)
6874 {
6875 warning_at (loc, OPT_Winvalid_memory_model,
6876 "failure memory model cannot be stronger than success "
6877 "memory model for %<__atomic_compare_exchange%>");
6878 success = MEMMODEL_SEQ_CST;
6879 }
6880
6881 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6882 {
6883 warning_at (loc, OPT_Winvalid_memory_model,
6884 "invalid failure memory model for "
6885 "%<__atomic_compare_exchange%>");
6886 failure = MEMMODEL_SEQ_CST;
6887 success = MEMMODEL_SEQ_CST;
6888 }
6889
6890 if (!flag_inline_atomics)
6891 {
6892 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6893 return;
6894 }
6895
6896 /* Expand the operands. */
6897 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6898
6899 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6900 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6901
6902 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6903
6904 boolret = NULL;
6905 oldval = NULL;
6906
6907 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6908 is_weak, success, failure))
6909 {
6910 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6911 return;
6912 }
6913
6914 lhs = gimple_call_lhs (call);
6915 if (lhs)
6916 {
6917 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6918 if (GET_MODE (boolret) != mode)
6919 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6920 write_complex_part (target, boolret, true);
6921 write_complex_part (target, oldval, false);
6922 }
6923 }
6924
6925 /* Expand the __atomic_load intrinsic:
6926 TYPE __atomic_load (TYPE *object, enum memmodel)
6927 EXP is the CALL_EXPR.
6928 TARGET is an optional place for us to store the results. */
6929
6930 static rtx
6931 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6932 {
6933 rtx mem;
6934 enum memmodel model;
6935
6936 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6937 if (is_mm_release (model) || is_mm_acq_rel (model))
6938 {
6939 location_t loc
6940 = expansion_point_location_if_in_system_header (input_location);
6941 warning_at (loc, OPT_Winvalid_memory_model,
6942 "invalid memory model for %<__atomic_load%>");
6943 model = MEMMODEL_SEQ_CST;
6944 }
6945
6946 if (!flag_inline_atomics)
6947 return NULL_RTX;
6948
6949 /* Expand the operand. */
6950 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6951
6952 return expand_atomic_load (target, mem, model);
6953 }
6954
6955
6956 /* Expand the __atomic_store intrinsic:
6957 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6958 EXP is the CALL_EXPR.
6959 TARGET is an optional place for us to store the results. */
6960
6961 static rtx
6962 expand_builtin_atomic_store (machine_mode mode, tree exp)
6963 {
6964 rtx mem, val;
6965 enum memmodel model;
6966
6967 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6968 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6969 || is_mm_release (model)))
6970 {
6971 location_t loc
6972 = expansion_point_location_if_in_system_header (input_location);
6973 warning_at (loc, OPT_Winvalid_memory_model,
6974 "invalid memory model for %<__atomic_store%>");
6975 model = MEMMODEL_SEQ_CST;
6976 }
6977
6978 if (!flag_inline_atomics)
6979 return NULL_RTX;
6980
6981 /* Expand the operands. */
6982 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6983 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6984
6985 return expand_atomic_store (mem, val, model, false);
6986 }
6987
6988 /* Expand the __atomic_fetch_XXX intrinsic:
6989 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6990 EXP is the CALL_EXPR.
6991 TARGET is an optional place for us to store the results.
6992 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6993 FETCH_AFTER is true if returning the result of the operation.
6994 FETCH_AFTER is false if returning the value before the operation.
6995 IGNORE is true if the result is not used.
6996 EXT_CALL is the correct builtin for an external call if this cannot be
6997 resolved to an instruction sequence. */
6998
6999 static rtx
7000 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7001 enum rtx_code code, bool fetch_after,
7002 bool ignore, enum built_in_function ext_call)
7003 {
7004 rtx val, mem, ret;
7005 enum memmodel model;
7006 tree fndecl;
7007 tree addr;
7008
7009 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7010
7011 /* Expand the operands. */
7012 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7013 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7014
7015 /* Only try generating instructions if inlining is turned on. */
7016 if (flag_inline_atomics)
7017 {
7018 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7019 if (ret)
7020 return ret;
7021 }
7022
7023 /* Return if a different routine isn't needed for the library call. */
7024 if (ext_call == BUILT_IN_NONE)
7025 return NULL_RTX;
7026
7027 /* Change the call to the specified function. */
7028 fndecl = get_callee_fndecl (exp);
7029 addr = CALL_EXPR_FN (exp);
7030 STRIP_NOPS (addr);
7031
7032 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7033 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7034
7035 /* If we will emit code after the call, the call cannot be a tail call.
7036 If it is emitted as a tail call, a barrier is emitted after it, and
7037 then all trailing code is removed. */
7038 if (!ignore)
7039 CALL_EXPR_TAILCALL (exp) = 0;
7040
7041 /* Expand the call here so we can emit trailing code. */
7042 ret = expand_call (exp, target, ignore);
7043
7044 /* Replace the original function just in case it matters. */
7045 TREE_OPERAND (addr, 0) = fndecl;
7046
7047 /* Then issue the arithmetic correction to return the right result. */
7048 if (!ignore)
7049 {
7050 if (code == NOT)
7051 {
7052 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7053 OPTAB_LIB_WIDEN);
7054 ret = expand_simple_unop (mode, NOT, ret, target, true);
7055 }
7056 else
7057 ret = expand_simple_binop (mode, code, ret, val, target, true,
7058 OPTAB_LIB_WIDEN);
7059 }
7060 return ret;
7061 }
7062
7063 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7064
7065 void
7066 expand_ifn_atomic_bit_test_and (gcall *call)
7067 {
7068 tree ptr = gimple_call_arg (call, 0);
7069 tree bit = gimple_call_arg (call, 1);
7070 tree flag = gimple_call_arg (call, 2);
7071 tree lhs = gimple_call_lhs (call);
7072 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7073 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7074 enum rtx_code code;
7075 optab optab;
7076 class expand_operand ops[5];
7077
7078 gcc_assert (flag_inline_atomics);
7079
7080 if (gimple_call_num_args (call) == 4)
7081 model = get_memmodel (gimple_call_arg (call, 3));
7082
7083 rtx mem = get_builtin_sync_mem (ptr, mode);
7084 rtx val = expand_expr_force_mode (bit, mode);
7085
7086 switch (gimple_call_internal_fn (call))
7087 {
7088 case IFN_ATOMIC_BIT_TEST_AND_SET:
7089 code = IOR;
7090 optab = atomic_bit_test_and_set_optab;
7091 break;
7092 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7093 code = XOR;
7094 optab = atomic_bit_test_and_complement_optab;
7095 break;
7096 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7097 code = AND;
7098 optab = atomic_bit_test_and_reset_optab;
7099 break;
7100 default:
7101 gcc_unreachable ();
7102 }
7103
7104 if (lhs == NULL_TREE)
7105 {
7106 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7107 val, NULL_RTX, true, OPTAB_DIRECT);
7108 if (code == AND)
7109 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7110 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7111 return;
7112 }
7113
7114 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7115 enum insn_code icode = direct_optab_handler (optab, mode);
7116 gcc_assert (icode != CODE_FOR_nothing);
7117 create_output_operand (&ops[0], target, mode);
7118 create_fixed_operand (&ops[1], mem);
7119 create_convert_operand_to (&ops[2], val, mode, true);
7120 create_integer_operand (&ops[3], model);
7121 create_integer_operand (&ops[4], integer_onep (flag));
7122 if (maybe_expand_insn (icode, 5, ops))
7123 return;
7124
7125 rtx bitval = val;
7126 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7127 val, NULL_RTX, true, OPTAB_DIRECT);
7128 rtx maskval = val;
7129 if (code == AND)
7130 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7131 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7132 code, model, false);
7133 if (integer_onep (flag))
7134 {
7135 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7136 NULL_RTX, true, OPTAB_DIRECT);
7137 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7138 true, OPTAB_DIRECT);
7139 }
7140 else
7141 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7142 OPTAB_DIRECT);
7143 if (result != target)
7144 emit_move_insn (target, result);
7145 }
7146
7147 /* Expand an atomic clear operation.
7148 void _atomic_clear (BOOL *obj, enum memmodel)
7149 EXP is the call expression. */
7150
7151 static rtx
7152 expand_builtin_atomic_clear (tree exp)
7153 {
7154 machine_mode mode;
7155 rtx mem, ret;
7156 enum memmodel model;
7157
7158 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7160 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7161
7162 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7163 {
7164 location_t loc
7165 = expansion_point_location_if_in_system_header (input_location);
7166 warning_at (loc, OPT_Winvalid_memory_model,
7167 "invalid memory model for %<__atomic_store%>");
7168 model = MEMMODEL_SEQ_CST;
7169 }
7170
7171 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7172 Failing that, a store is issued by __atomic_store. The only way this can
7173 fail is if the bool type is larger than a word size. Unlikely, but
7174 handle it anyway for completeness. Assume a single threaded model since
7175 there is no atomic support in this case, and no barriers are required. */
7176 ret = expand_atomic_store (mem, const0_rtx, model, true);
7177 if (!ret)
7178 emit_move_insn (mem, const0_rtx);
7179 return const0_rtx;
7180 }
7181
7182 /* Expand an atomic test_and_set operation.
7183 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7184 EXP is the call expression. */
7185
7186 static rtx
7187 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7188 {
7189 rtx mem;
7190 enum memmodel model;
7191 machine_mode mode;
7192
7193 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7194 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7195 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7196
7197 return expand_atomic_test_and_set (target, mem, model);
7198 }
7199
7200
7201 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7202 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7203
7204 static tree
7205 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7206 {
7207 int size;
7208 machine_mode mode;
7209 unsigned int mode_align, type_align;
7210
7211 if (TREE_CODE (arg0) != INTEGER_CST)
7212 return NULL_TREE;
7213
7214 /* We need a corresponding integer mode for the access to be lock-free. */
7215 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7216 if (!int_mode_for_size (size, 0).exists (&mode))
7217 return boolean_false_node;
7218
7219 mode_align = GET_MODE_ALIGNMENT (mode);
7220
7221 if (TREE_CODE (arg1) == INTEGER_CST)
7222 {
7223 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7224
7225 /* Either this argument is null, or it's a fake pointer encoding
7226 the alignment of the object. */
7227 val = least_bit_hwi (val);
7228 val *= BITS_PER_UNIT;
7229
7230 if (val == 0 || mode_align < val)
7231 type_align = mode_align;
7232 else
7233 type_align = val;
7234 }
7235 else
7236 {
7237 tree ttype = TREE_TYPE (arg1);
7238
7239 /* This function is usually invoked and folded immediately by the front
7240 end before anything else has a chance to look at it. The pointer
7241 parameter at this point is usually cast to a void *, so check for that
7242 and look past the cast. */
7243 if (CONVERT_EXPR_P (arg1)
7244 && POINTER_TYPE_P (ttype)
7245 && VOID_TYPE_P (TREE_TYPE (ttype))
7246 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7247 arg1 = TREE_OPERAND (arg1, 0);
7248
7249 ttype = TREE_TYPE (arg1);
7250 gcc_assert (POINTER_TYPE_P (ttype));
7251
7252 /* Get the underlying type of the object. */
7253 ttype = TREE_TYPE (ttype);
7254 type_align = TYPE_ALIGN (ttype);
7255 }
7256
7257 /* If the object has smaller alignment, the lock free routines cannot
7258 be used. */
7259 if (type_align < mode_align)
7260 return boolean_false_node;
7261
7262 /* Check if a compare_and_swap pattern exists for the mode which represents
7263 the required size. The pattern is not allowed to fail, so the existence
7264 of the pattern indicates support is present. Also require that an
7265 atomic load exists for the required size. */
7266 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7267 return boolean_true_node;
7268 else
7269 return boolean_false_node;
7270 }
7271
7272 /* Return true if the parameters to call EXP represent an object which will
7273 always generate lock free instructions. The first argument represents the
7274 size of the object, and the second parameter is a pointer to the object
7275 itself. If NULL is passed for the object, then the result is based on
7276 typical alignment for an object of the specified size. Otherwise return
7277 false. */
7278
7279 static rtx
7280 expand_builtin_atomic_always_lock_free (tree exp)
7281 {
7282 tree size;
7283 tree arg0 = CALL_EXPR_ARG (exp, 0);
7284 tree arg1 = CALL_EXPR_ARG (exp, 1);
7285
7286 if (TREE_CODE (arg0) != INTEGER_CST)
7287 {
7288 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7289 return const0_rtx;
7290 }
7291
7292 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7293 if (size == boolean_true_node)
7294 return const1_rtx;
7295 return const0_rtx;
7296 }
7297
7298 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7299 is lock free on this architecture. */
7300
7301 static tree
7302 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7303 {
7304 if (!flag_inline_atomics)
7305 return NULL_TREE;
7306
7307 /* If it isn't always lock free, don't generate a result. */
7308 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7309 return boolean_true_node;
7310
7311 return NULL_TREE;
7312 }
7313
7314 /* Return true if the parameters to call EXP represent an object which will
7315 always generate lock free instructions. The first argument represents the
7316 size of the object, and the second parameter is a pointer to the object
7317 itself. If NULL is passed for the object, then the result is based on
7318 typical alignment for an object of the specified size. Otherwise return
7319 NULL*/
7320
7321 static rtx
7322 expand_builtin_atomic_is_lock_free (tree exp)
7323 {
7324 tree size;
7325 tree arg0 = CALL_EXPR_ARG (exp, 0);
7326 tree arg1 = CALL_EXPR_ARG (exp, 1);
7327
7328 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7329 {
7330 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7331 return NULL_RTX;
7332 }
7333
7334 if (!flag_inline_atomics)
7335 return NULL_RTX;
7336
7337 /* If the value is known at compile time, return the RTX for it. */
7338 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7339 if (size == boolean_true_node)
7340 return const1_rtx;
7341
7342 return NULL_RTX;
7343 }
7344
7345 /* Expand the __atomic_thread_fence intrinsic:
7346 void __atomic_thread_fence (enum memmodel)
7347 EXP is the CALL_EXPR. */
7348
7349 static void
7350 expand_builtin_atomic_thread_fence (tree exp)
7351 {
7352 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7353 expand_mem_thread_fence (model);
7354 }
7355
7356 /* Expand the __atomic_signal_fence intrinsic:
7357 void __atomic_signal_fence (enum memmodel)
7358 EXP is the CALL_EXPR. */
7359
7360 static void
7361 expand_builtin_atomic_signal_fence (tree exp)
7362 {
7363 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7364 expand_mem_signal_fence (model);
7365 }
7366
7367 /* Expand the __sync_synchronize intrinsic. */
7368
7369 static void
7370 expand_builtin_sync_synchronize (void)
7371 {
7372 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7373 }
7374
7375 static rtx
7376 expand_builtin_thread_pointer (tree exp, rtx target)
7377 {
7378 enum insn_code icode;
7379 if (!validate_arglist (exp, VOID_TYPE))
7380 return const0_rtx;
7381 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7382 if (icode != CODE_FOR_nothing)
7383 {
7384 class expand_operand op;
7385 /* If the target is not sutitable then create a new target. */
7386 if (target == NULL_RTX
7387 || !REG_P (target)
7388 || GET_MODE (target) != Pmode)
7389 target = gen_reg_rtx (Pmode);
7390 create_output_operand (&op, target, Pmode);
7391 expand_insn (icode, 1, &op);
7392 return target;
7393 }
7394 error ("%<__builtin_thread_pointer%> is not supported on this target");
7395 return const0_rtx;
7396 }
7397
7398 static void
7399 expand_builtin_set_thread_pointer (tree exp)
7400 {
7401 enum insn_code icode;
7402 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7403 return;
7404 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7405 if (icode != CODE_FOR_nothing)
7406 {
7407 class expand_operand op;
7408 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7409 Pmode, EXPAND_NORMAL);
7410 create_input_operand (&op, val, Pmode);
7411 expand_insn (icode, 1, &op);
7412 return;
7413 }
7414 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7415 }
7416
7417 \f
7418 /* Emit code to restore the current value of stack. */
7419
7420 static void
7421 expand_stack_restore (tree var)
7422 {
7423 rtx_insn *prev;
7424 rtx sa = expand_normal (var);
7425
7426 sa = convert_memory_address (Pmode, sa);
7427
7428 prev = get_last_insn ();
7429 emit_stack_restore (SAVE_BLOCK, sa);
7430
7431 record_new_stack_level ();
7432
7433 fixup_args_size_notes (prev, get_last_insn (), 0);
7434 }
7435
7436 /* Emit code to save the current value of stack. */
7437
7438 static rtx
7439 expand_stack_save (void)
7440 {
7441 rtx ret = NULL_RTX;
7442
7443 emit_stack_save (SAVE_BLOCK, &ret);
7444 return ret;
7445 }
7446
7447 /* Emit code to get the openacc gang, worker or vector id or size. */
7448
7449 static rtx
7450 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7451 {
7452 const char *name;
7453 rtx fallback_retval;
7454 rtx_insn *(*gen_fn) (rtx, rtx);
7455 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7456 {
7457 case BUILT_IN_GOACC_PARLEVEL_ID:
7458 name = "__builtin_goacc_parlevel_id";
7459 fallback_retval = const0_rtx;
7460 gen_fn = targetm.gen_oacc_dim_pos;
7461 break;
7462 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7463 name = "__builtin_goacc_parlevel_size";
7464 fallback_retval = const1_rtx;
7465 gen_fn = targetm.gen_oacc_dim_size;
7466 break;
7467 default:
7468 gcc_unreachable ();
7469 }
7470
7471 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7472 {
7473 error ("%qs only supported in OpenACC code", name);
7474 return const0_rtx;
7475 }
7476
7477 tree arg = CALL_EXPR_ARG (exp, 0);
7478 if (TREE_CODE (arg) != INTEGER_CST)
7479 {
7480 error ("non-constant argument 0 to %qs", name);
7481 return const0_rtx;
7482 }
7483
7484 int dim = TREE_INT_CST_LOW (arg);
7485 switch (dim)
7486 {
7487 case GOMP_DIM_GANG:
7488 case GOMP_DIM_WORKER:
7489 case GOMP_DIM_VECTOR:
7490 break;
7491 default:
7492 error ("illegal argument 0 to %qs", name);
7493 return const0_rtx;
7494 }
7495
7496 if (ignore)
7497 return target;
7498
7499 if (target == NULL_RTX)
7500 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7501
7502 if (!targetm.have_oacc_dim_size ())
7503 {
7504 emit_move_insn (target, fallback_retval);
7505 return target;
7506 }
7507
7508 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7509 emit_insn (gen_fn (reg, GEN_INT (dim)));
7510 if (reg != target)
7511 emit_move_insn (target, reg);
7512
7513 return target;
7514 }
7515
7516 /* Expand a string compare operation using a sequence of char comparison
7517 to get rid of the calling overhead, with result going to TARGET if
7518 that's convenient.
7519
7520 VAR_STR is the variable string source;
7521 CONST_STR is the constant string source;
7522 LENGTH is the number of chars to compare;
7523 CONST_STR_N indicates which source string is the constant string;
7524 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7525
7526 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7527
7528 target = (int) (unsigned char) var_str[0]
7529 - (int) (unsigned char) const_str[0];
7530 if (target != 0)
7531 goto ne_label;
7532 ...
7533 target = (int) (unsigned char) var_str[length - 2]
7534 - (int) (unsigned char) const_str[length - 2];
7535 if (target != 0)
7536 goto ne_label;
7537 target = (int) (unsigned char) var_str[length - 1]
7538 - (int) (unsigned char) const_str[length - 1];
7539 ne_label:
7540 */
7541
7542 static rtx
7543 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7544 unsigned HOST_WIDE_INT length,
7545 int const_str_n, machine_mode mode)
7546 {
7547 HOST_WIDE_INT offset = 0;
7548 rtx var_rtx_array
7549 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7550 rtx var_rtx = NULL_RTX;
7551 rtx const_rtx = NULL_RTX;
7552 rtx result = target ? target : gen_reg_rtx (mode);
7553 rtx_code_label *ne_label = gen_label_rtx ();
7554 tree unit_type_node = unsigned_char_type_node;
7555 scalar_int_mode unit_mode
7556 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7557
7558 start_sequence ();
7559
7560 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7561 {
7562 var_rtx
7563 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7564 const_rtx = c_readstr (const_str + offset, unit_mode);
7565 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7566 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7567
7568 op0 = convert_modes (mode, unit_mode, op0, 1);
7569 op1 = convert_modes (mode, unit_mode, op1, 1);
7570 result = expand_simple_binop (mode, MINUS, op0, op1,
7571 result, 1, OPTAB_WIDEN);
7572 if (i < length - 1)
7573 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7574 mode, true, ne_label);
7575 offset += GET_MODE_SIZE (unit_mode);
7576 }
7577
7578 emit_label (ne_label);
7579 rtx_insn *insns = get_insns ();
7580 end_sequence ();
7581 emit_insn (insns);
7582
7583 return result;
7584 }
7585
7586 /* Inline expansion a call to str(n)cmp, with result going to
7587 TARGET if that's convenient.
7588 If the call is not been inlined, return NULL_RTX. */
7589 static rtx
7590 inline_expand_builtin_string_cmp (tree exp, rtx target)
7591 {
7592 tree fndecl = get_callee_fndecl (exp);
7593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7594 unsigned HOST_WIDE_INT length = 0;
7595 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7596
7597 /* Do NOT apply this inlining expansion when optimizing for size or
7598 optimization level below 2. */
7599 if (optimize < 2 || optimize_insn_for_size_p ())
7600 return NULL_RTX;
7601
7602 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7603 || fcode == BUILT_IN_STRNCMP
7604 || fcode == BUILT_IN_MEMCMP);
7605
7606 /* On a target where the type of the call (int) has same or narrower presicion
7607 than unsigned char, give up the inlining expansion. */
7608 if (TYPE_PRECISION (unsigned_char_type_node)
7609 >= TYPE_PRECISION (TREE_TYPE (exp)))
7610 return NULL_RTX;
7611
7612 tree arg1 = CALL_EXPR_ARG (exp, 0);
7613 tree arg2 = CALL_EXPR_ARG (exp, 1);
7614 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7615
7616 unsigned HOST_WIDE_INT len1 = 0;
7617 unsigned HOST_WIDE_INT len2 = 0;
7618 unsigned HOST_WIDE_INT len3 = 0;
7619
7620 const char *src_str1 = c_getstr (arg1, &len1);
7621 const char *src_str2 = c_getstr (arg2, &len2);
7622
7623 /* If neither strings is constant string, the call is not qualify. */
7624 if (!src_str1 && !src_str2)
7625 return NULL_RTX;
7626
7627 /* For strncmp, if the length is not a const, not qualify. */
7628 if (is_ncmp)
7629 {
7630 if (!tree_fits_uhwi_p (len3_tree))
7631 return NULL_RTX;
7632 else
7633 len3 = tree_to_uhwi (len3_tree);
7634 }
7635
7636 if (src_str1 != NULL)
7637 len1 = strnlen (src_str1, len1) + 1;
7638
7639 if (src_str2 != NULL)
7640 len2 = strnlen (src_str2, len2) + 1;
7641
7642 int const_str_n = 0;
7643 if (!len1)
7644 const_str_n = 2;
7645 else if (!len2)
7646 const_str_n = 1;
7647 else if (len2 > len1)
7648 const_str_n = 1;
7649 else
7650 const_str_n = 2;
7651
7652 gcc_checking_assert (const_str_n > 0);
7653 length = (const_str_n == 1) ? len1 : len2;
7654
7655 if (is_ncmp && len3 < length)
7656 length = len3;
7657
7658 /* If the length of the comparision is larger than the threshold,
7659 do nothing. */
7660 if (length > (unsigned HOST_WIDE_INT)
7661 param_builtin_string_cmp_inline_length)
7662 return NULL_RTX;
7663
7664 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7665
7666 /* Now, start inline expansion the call. */
7667 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7668 (const_str_n == 1) ? src_str1 : src_str2, length,
7669 const_str_n, mode);
7670 }
7671
7672 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7673 represents the size of the first argument to that call, or VOIDmode
7674 if the argument is a pointer. IGNORE will be true if the result
7675 isn't used. */
7676 static rtx
7677 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7678 bool ignore)
7679 {
7680 rtx val, failsafe;
7681 unsigned nargs = call_expr_nargs (exp);
7682
7683 tree arg0 = CALL_EXPR_ARG (exp, 0);
7684
7685 if (mode == VOIDmode)
7686 {
7687 mode = TYPE_MODE (TREE_TYPE (arg0));
7688 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7689 }
7690
7691 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7692
7693 /* An optional second argument can be used as a failsafe value on
7694 some machines. If it isn't present, then the failsafe value is
7695 assumed to be 0. */
7696 if (nargs > 1)
7697 {
7698 tree arg1 = CALL_EXPR_ARG (exp, 1);
7699 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7700 }
7701 else
7702 failsafe = const0_rtx;
7703
7704 /* If the result isn't used, the behavior is undefined. It would be
7705 nice to emit a warning here, but path splitting means this might
7706 happen with legitimate code. So simply drop the builtin
7707 expansion in that case; we've handled any side-effects above. */
7708 if (ignore)
7709 return const0_rtx;
7710
7711 /* If we don't have a suitable target, create one to hold the result. */
7712 if (target == NULL || GET_MODE (target) != mode)
7713 target = gen_reg_rtx (mode);
7714
7715 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7716 val = convert_modes (mode, VOIDmode, val, false);
7717
7718 return targetm.speculation_safe_value (mode, target, val, failsafe);
7719 }
7720
7721 /* Expand an expression EXP that calls a built-in function,
7722 with result going to TARGET if that's convenient
7723 (and in mode MODE if that's convenient).
7724 SUBTARGET may be used as the target for computing one of EXP's operands.
7725 IGNORE is nonzero if the value is to be ignored. */
7726
7727 rtx
7728 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7729 int ignore)
7730 {
7731 tree fndecl = get_callee_fndecl (exp);
7732 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7733 int flags;
7734
7735 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7736 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7737
7738 /* When ASan is enabled, we don't want to expand some memory/string
7739 builtins and rely on libsanitizer's hooks. This allows us to avoid
7740 redundant checks and be sure, that possible overflow will be detected
7741 by ASan. */
7742
7743 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7744 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7745 return expand_call (exp, target, ignore);
7746
7747 /* When not optimizing, generate calls to library functions for a certain
7748 set of builtins. */
7749 if (!optimize
7750 && !called_as_built_in (fndecl)
7751 && fcode != BUILT_IN_FORK
7752 && fcode != BUILT_IN_EXECL
7753 && fcode != BUILT_IN_EXECV
7754 && fcode != BUILT_IN_EXECLP
7755 && fcode != BUILT_IN_EXECLE
7756 && fcode != BUILT_IN_EXECVP
7757 && fcode != BUILT_IN_EXECVE
7758 && !ALLOCA_FUNCTION_CODE_P (fcode)
7759 && fcode != BUILT_IN_FREE)
7760 return expand_call (exp, target, ignore);
7761
7762 /* The built-in function expanders test for target == const0_rtx
7763 to determine whether the function's result will be ignored. */
7764 if (ignore)
7765 target = const0_rtx;
7766
7767 /* If the result of a pure or const built-in function is ignored, and
7768 none of its arguments are volatile, we can avoid expanding the
7769 built-in call and just evaluate the arguments for side-effects. */
7770 if (target == const0_rtx
7771 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7772 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7773 {
7774 bool volatilep = false;
7775 tree arg;
7776 call_expr_arg_iterator iter;
7777
7778 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7779 if (TREE_THIS_VOLATILE (arg))
7780 {
7781 volatilep = true;
7782 break;
7783 }
7784
7785 if (! volatilep)
7786 {
7787 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7788 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7789 return const0_rtx;
7790 }
7791 }
7792
7793 switch (fcode)
7794 {
7795 CASE_FLT_FN (BUILT_IN_FABS):
7796 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7797 case BUILT_IN_FABSD32:
7798 case BUILT_IN_FABSD64:
7799 case BUILT_IN_FABSD128:
7800 target = expand_builtin_fabs (exp, target, subtarget);
7801 if (target)
7802 return target;
7803 break;
7804
7805 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7806 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7807 target = expand_builtin_copysign (exp, target, subtarget);
7808 if (target)
7809 return target;
7810 break;
7811
7812 /* Just do a normal library call if we were unable to fold
7813 the values. */
7814 CASE_FLT_FN (BUILT_IN_CABS):
7815 break;
7816
7817 CASE_FLT_FN (BUILT_IN_FMA):
7818 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7819 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7820 if (target)
7821 return target;
7822 break;
7823
7824 CASE_FLT_FN (BUILT_IN_ILOGB):
7825 if (! flag_unsafe_math_optimizations)
7826 break;
7827 gcc_fallthrough ();
7828 CASE_FLT_FN (BUILT_IN_ISINF):
7829 CASE_FLT_FN (BUILT_IN_FINITE):
7830 case BUILT_IN_ISFINITE:
7831 case BUILT_IN_ISNORMAL:
7832 target = expand_builtin_interclass_mathfn (exp, target);
7833 if (target)
7834 return target;
7835 break;
7836
7837 CASE_FLT_FN (BUILT_IN_ICEIL):
7838 CASE_FLT_FN (BUILT_IN_LCEIL):
7839 CASE_FLT_FN (BUILT_IN_LLCEIL):
7840 CASE_FLT_FN (BUILT_IN_LFLOOR):
7841 CASE_FLT_FN (BUILT_IN_IFLOOR):
7842 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7843 target = expand_builtin_int_roundingfn (exp, target);
7844 if (target)
7845 return target;
7846 break;
7847
7848 CASE_FLT_FN (BUILT_IN_IRINT):
7849 CASE_FLT_FN (BUILT_IN_LRINT):
7850 CASE_FLT_FN (BUILT_IN_LLRINT):
7851 CASE_FLT_FN (BUILT_IN_IROUND):
7852 CASE_FLT_FN (BUILT_IN_LROUND):
7853 CASE_FLT_FN (BUILT_IN_LLROUND):
7854 target = expand_builtin_int_roundingfn_2 (exp, target);
7855 if (target)
7856 return target;
7857 break;
7858
7859 CASE_FLT_FN (BUILT_IN_POWI):
7860 target = expand_builtin_powi (exp, target);
7861 if (target)
7862 return target;
7863 break;
7864
7865 CASE_FLT_FN (BUILT_IN_CEXPI):
7866 target = expand_builtin_cexpi (exp, target);
7867 gcc_assert (target);
7868 return target;
7869
7870 CASE_FLT_FN (BUILT_IN_SIN):
7871 CASE_FLT_FN (BUILT_IN_COS):
7872 if (! flag_unsafe_math_optimizations)
7873 break;
7874 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7875 if (target)
7876 return target;
7877 break;
7878
7879 CASE_FLT_FN (BUILT_IN_SINCOS):
7880 if (! flag_unsafe_math_optimizations)
7881 break;
7882 target = expand_builtin_sincos (exp);
7883 if (target)
7884 return target;
7885 break;
7886
7887 case BUILT_IN_APPLY_ARGS:
7888 return expand_builtin_apply_args ();
7889
7890 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7891 FUNCTION with a copy of the parameters described by
7892 ARGUMENTS, and ARGSIZE. It returns a block of memory
7893 allocated on the stack into which is stored all the registers
7894 that might possibly be used for returning the result of a
7895 function. ARGUMENTS is the value returned by
7896 __builtin_apply_args. ARGSIZE is the number of bytes of
7897 arguments that must be copied. ??? How should this value be
7898 computed? We'll also need a safe worst case value for varargs
7899 functions. */
7900 case BUILT_IN_APPLY:
7901 if (!validate_arglist (exp, POINTER_TYPE,
7902 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7903 && !validate_arglist (exp, REFERENCE_TYPE,
7904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7905 return const0_rtx;
7906 else
7907 {
7908 rtx ops[3];
7909
7910 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7911 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7912 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7913
7914 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7915 }
7916
7917 /* __builtin_return (RESULT) causes the function to return the
7918 value described by RESULT. RESULT is address of the block of
7919 memory returned by __builtin_apply. */
7920 case BUILT_IN_RETURN:
7921 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7922 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7923 return const0_rtx;
7924
7925 case BUILT_IN_SAVEREGS:
7926 return expand_builtin_saveregs ();
7927
7928 case BUILT_IN_VA_ARG_PACK:
7929 /* All valid uses of __builtin_va_arg_pack () are removed during
7930 inlining. */
7931 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7932 return const0_rtx;
7933
7934 case BUILT_IN_VA_ARG_PACK_LEN:
7935 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7936 inlining. */
7937 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7938 return const0_rtx;
7939
7940 /* Return the address of the first anonymous stack arg. */
7941 case BUILT_IN_NEXT_ARG:
7942 if (fold_builtin_next_arg (exp, false))
7943 return const0_rtx;
7944 return expand_builtin_next_arg ();
7945
7946 case BUILT_IN_CLEAR_CACHE:
7947 target = expand_builtin___clear_cache (exp);
7948 if (target)
7949 return target;
7950 break;
7951
7952 case BUILT_IN_CLASSIFY_TYPE:
7953 return expand_builtin_classify_type (exp);
7954
7955 case BUILT_IN_CONSTANT_P:
7956 return const0_rtx;
7957
7958 case BUILT_IN_FRAME_ADDRESS:
7959 case BUILT_IN_RETURN_ADDRESS:
7960 return expand_builtin_frame_address (fndecl, exp);
7961
7962 /* Returns the address of the area where the structure is returned.
7963 0 otherwise. */
7964 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7965 if (call_expr_nargs (exp) != 0
7966 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7967 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7968 return const0_rtx;
7969 else
7970 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7971
7972 CASE_BUILT_IN_ALLOCA:
7973 target = expand_builtin_alloca (exp);
7974 if (target)
7975 return target;
7976 break;
7977
7978 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7979 return expand_asan_emit_allocas_unpoison (exp);
7980
7981 case BUILT_IN_STACK_SAVE:
7982 return expand_stack_save ();
7983
7984 case BUILT_IN_STACK_RESTORE:
7985 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7986 return const0_rtx;
7987
7988 case BUILT_IN_BSWAP16:
7989 case BUILT_IN_BSWAP32:
7990 case BUILT_IN_BSWAP64:
7991 case BUILT_IN_BSWAP128:
7992 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7993 if (target)
7994 return target;
7995 break;
7996
7997 CASE_INT_FN (BUILT_IN_FFS):
7998 target = expand_builtin_unop (target_mode, exp, target,
7999 subtarget, ffs_optab);
8000 if (target)
8001 return target;
8002 break;
8003
8004 CASE_INT_FN (BUILT_IN_CLZ):
8005 target = expand_builtin_unop (target_mode, exp, target,
8006 subtarget, clz_optab);
8007 if (target)
8008 return target;
8009 break;
8010
8011 CASE_INT_FN (BUILT_IN_CTZ):
8012 target = expand_builtin_unop (target_mode, exp, target,
8013 subtarget, ctz_optab);
8014 if (target)
8015 return target;
8016 break;
8017
8018 CASE_INT_FN (BUILT_IN_CLRSB):
8019 target = expand_builtin_unop (target_mode, exp, target,
8020 subtarget, clrsb_optab);
8021 if (target)
8022 return target;
8023 break;
8024
8025 CASE_INT_FN (BUILT_IN_POPCOUNT):
8026 target = expand_builtin_unop (target_mode, exp, target,
8027 subtarget, popcount_optab);
8028 if (target)
8029 return target;
8030 break;
8031
8032 CASE_INT_FN (BUILT_IN_PARITY):
8033 target = expand_builtin_unop (target_mode, exp, target,
8034 subtarget, parity_optab);
8035 if (target)
8036 return target;
8037 break;
8038
8039 case BUILT_IN_STRLEN:
8040 target = expand_builtin_strlen (exp, target, target_mode);
8041 if (target)
8042 return target;
8043 break;
8044
8045 case BUILT_IN_STRNLEN:
8046 target = expand_builtin_strnlen (exp, target, target_mode);
8047 if (target)
8048 return target;
8049 break;
8050
8051 case BUILT_IN_STRCAT:
8052 target = expand_builtin_strcat (exp);
8053 if (target)
8054 return target;
8055 break;
8056
8057 case BUILT_IN_GETTEXT:
8058 case BUILT_IN_PUTS:
8059 case BUILT_IN_PUTS_UNLOCKED:
8060 case BUILT_IN_STRDUP:
8061 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8062 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8063 break;
8064
8065 case BUILT_IN_INDEX:
8066 case BUILT_IN_RINDEX:
8067 case BUILT_IN_STRCHR:
8068 case BUILT_IN_STRRCHR:
8069 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8070 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8071 break;
8072
8073 case BUILT_IN_FPUTS:
8074 case BUILT_IN_FPUTS_UNLOCKED:
8075 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8076 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8077 break;
8078
8079 case BUILT_IN_STRNDUP:
8080 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8081 check_nul_terminated_array (exp,
8082 CALL_EXPR_ARG (exp, 0),
8083 CALL_EXPR_ARG (exp, 1));
8084 break;
8085
8086 case BUILT_IN_STRCASECMP:
8087 case BUILT_IN_STRSTR:
8088 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8089 {
8090 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8091 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8092 }
8093 break;
8094
8095 case BUILT_IN_STRCPY:
8096 target = expand_builtin_strcpy (exp, target);
8097 if (target)
8098 return target;
8099 break;
8100
8101 case BUILT_IN_STRNCAT:
8102 target = expand_builtin_strncat (exp, target);
8103 if (target)
8104 return target;
8105 break;
8106
8107 case BUILT_IN_STRNCPY:
8108 target = expand_builtin_strncpy (exp, target);
8109 if (target)
8110 return target;
8111 break;
8112
8113 case BUILT_IN_STPCPY:
8114 target = expand_builtin_stpcpy (exp, target, mode);
8115 if (target)
8116 return target;
8117 break;
8118
8119 case BUILT_IN_STPNCPY:
8120 target = expand_builtin_stpncpy (exp, target);
8121 if (target)
8122 return target;
8123 break;
8124
8125 case BUILT_IN_MEMCHR:
8126 target = expand_builtin_memchr (exp, target);
8127 if (target)
8128 return target;
8129 break;
8130
8131 case BUILT_IN_MEMCPY:
8132 target = expand_builtin_memcpy (exp, target);
8133 if (target)
8134 return target;
8135 break;
8136
8137 case BUILT_IN_MEMMOVE:
8138 target = expand_builtin_memmove (exp, target);
8139 if (target)
8140 return target;
8141 break;
8142
8143 case BUILT_IN_MEMPCPY:
8144 target = expand_builtin_mempcpy (exp, target);
8145 if (target)
8146 return target;
8147 break;
8148
8149 case BUILT_IN_MEMSET:
8150 target = expand_builtin_memset (exp, target, mode);
8151 if (target)
8152 return target;
8153 break;
8154
8155 case BUILT_IN_BZERO:
8156 target = expand_builtin_bzero (exp);
8157 if (target)
8158 return target;
8159 break;
8160
8161 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8162 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8163 when changing it to a strcmp call. */
8164 case BUILT_IN_STRCMP_EQ:
8165 target = expand_builtin_memcmp (exp, target, true);
8166 if (target)
8167 return target;
8168
8169 /* Change this call back to a BUILT_IN_STRCMP. */
8170 TREE_OPERAND (exp, 1)
8171 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8172
8173 /* Delete the last parameter. */
8174 unsigned int i;
8175 vec<tree, va_gc> *arg_vec;
8176 vec_alloc (arg_vec, 2);
8177 for (i = 0; i < 2; i++)
8178 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8179 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8180 /* FALLTHROUGH */
8181
8182 case BUILT_IN_STRCMP:
8183 target = expand_builtin_strcmp (exp, target);
8184 if (target)
8185 return target;
8186 break;
8187
8188 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8189 back to a BUILT_IN_STRNCMP. */
8190 case BUILT_IN_STRNCMP_EQ:
8191 target = expand_builtin_memcmp (exp, target, true);
8192 if (target)
8193 return target;
8194
8195 /* Change it back to a BUILT_IN_STRNCMP. */
8196 TREE_OPERAND (exp, 1)
8197 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8198 /* FALLTHROUGH */
8199
8200 case BUILT_IN_STRNCMP:
8201 target = expand_builtin_strncmp (exp, target, mode);
8202 if (target)
8203 return target;
8204 break;
8205
8206 case BUILT_IN_BCMP:
8207 case BUILT_IN_MEMCMP:
8208 case BUILT_IN_MEMCMP_EQ:
8209 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8210 if (target)
8211 return target;
8212 if (fcode == BUILT_IN_MEMCMP_EQ)
8213 {
8214 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8215 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8216 }
8217 break;
8218
8219 case BUILT_IN_SETJMP:
8220 /* This should have been lowered to the builtins below. */
8221 gcc_unreachable ();
8222
8223 case BUILT_IN_SETJMP_SETUP:
8224 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8225 and the receiver label. */
8226 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8227 {
8228 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8229 VOIDmode, EXPAND_NORMAL);
8230 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8231 rtx_insn *label_r = label_rtx (label);
8232
8233 /* This is copied from the handling of non-local gotos. */
8234 expand_builtin_setjmp_setup (buf_addr, label_r);
8235 nonlocal_goto_handler_labels
8236 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8237 nonlocal_goto_handler_labels);
8238 /* ??? Do not let expand_label treat us as such since we would
8239 not want to be both on the list of non-local labels and on
8240 the list of forced labels. */
8241 FORCED_LABEL (label) = 0;
8242 return const0_rtx;
8243 }
8244 break;
8245
8246 case BUILT_IN_SETJMP_RECEIVER:
8247 /* __builtin_setjmp_receiver is passed the receiver label. */
8248 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8249 {
8250 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8251 rtx_insn *label_r = label_rtx (label);
8252
8253 expand_builtin_setjmp_receiver (label_r);
8254 return const0_rtx;
8255 }
8256 break;
8257
8258 /* __builtin_longjmp is passed a pointer to an array of five words.
8259 It's similar to the C library longjmp function but works with
8260 __builtin_setjmp above. */
8261 case BUILT_IN_LONGJMP:
8262 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8263 {
8264 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8265 VOIDmode, EXPAND_NORMAL);
8266 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8267
8268 if (value != const1_rtx)
8269 {
8270 error ("%<__builtin_longjmp%> second argument must be 1");
8271 return const0_rtx;
8272 }
8273
8274 expand_builtin_longjmp (buf_addr, value);
8275 return const0_rtx;
8276 }
8277 break;
8278
8279 case BUILT_IN_NONLOCAL_GOTO:
8280 target = expand_builtin_nonlocal_goto (exp);
8281 if (target)
8282 return target;
8283 break;
8284
8285 /* This updates the setjmp buffer that is its argument with the value
8286 of the current stack pointer. */
8287 case BUILT_IN_UPDATE_SETJMP_BUF:
8288 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8289 {
8290 rtx buf_addr
8291 = expand_normal (CALL_EXPR_ARG (exp, 0));
8292
8293 expand_builtin_update_setjmp_buf (buf_addr);
8294 return const0_rtx;
8295 }
8296 break;
8297
8298 case BUILT_IN_TRAP:
8299 expand_builtin_trap ();
8300 return const0_rtx;
8301
8302 case BUILT_IN_UNREACHABLE:
8303 expand_builtin_unreachable ();
8304 return const0_rtx;
8305
8306 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8307 case BUILT_IN_SIGNBITD32:
8308 case BUILT_IN_SIGNBITD64:
8309 case BUILT_IN_SIGNBITD128:
8310 target = expand_builtin_signbit (exp, target);
8311 if (target)
8312 return target;
8313 break;
8314
8315 /* Various hooks for the DWARF 2 __throw routine. */
8316 case BUILT_IN_UNWIND_INIT:
8317 expand_builtin_unwind_init ();
8318 return const0_rtx;
8319 case BUILT_IN_DWARF_CFA:
8320 return virtual_cfa_rtx;
8321 #ifdef DWARF2_UNWIND_INFO
8322 case BUILT_IN_DWARF_SP_COLUMN:
8323 return expand_builtin_dwarf_sp_column ();
8324 case BUILT_IN_INIT_DWARF_REG_SIZES:
8325 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8326 return const0_rtx;
8327 #endif
8328 case BUILT_IN_FROB_RETURN_ADDR:
8329 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8330 case BUILT_IN_EXTRACT_RETURN_ADDR:
8331 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8332 case BUILT_IN_EH_RETURN:
8333 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8334 CALL_EXPR_ARG (exp, 1));
8335 return const0_rtx;
8336 case BUILT_IN_EH_RETURN_DATA_REGNO:
8337 return expand_builtin_eh_return_data_regno (exp);
8338 case BUILT_IN_EXTEND_POINTER:
8339 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8340 case BUILT_IN_EH_POINTER:
8341 return expand_builtin_eh_pointer (exp);
8342 case BUILT_IN_EH_FILTER:
8343 return expand_builtin_eh_filter (exp);
8344 case BUILT_IN_EH_COPY_VALUES:
8345 return expand_builtin_eh_copy_values (exp);
8346
8347 case BUILT_IN_VA_START:
8348 return expand_builtin_va_start (exp);
8349 case BUILT_IN_VA_END:
8350 return expand_builtin_va_end (exp);
8351 case BUILT_IN_VA_COPY:
8352 return expand_builtin_va_copy (exp);
8353 case BUILT_IN_EXPECT:
8354 return expand_builtin_expect (exp, target);
8355 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8356 return expand_builtin_expect_with_probability (exp, target);
8357 case BUILT_IN_ASSUME_ALIGNED:
8358 return expand_builtin_assume_aligned (exp, target);
8359 case BUILT_IN_PREFETCH:
8360 expand_builtin_prefetch (exp);
8361 return const0_rtx;
8362
8363 case BUILT_IN_INIT_TRAMPOLINE:
8364 return expand_builtin_init_trampoline (exp, true);
8365 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8366 return expand_builtin_init_trampoline (exp, false);
8367 case BUILT_IN_ADJUST_TRAMPOLINE:
8368 return expand_builtin_adjust_trampoline (exp);
8369
8370 case BUILT_IN_INIT_DESCRIPTOR:
8371 return expand_builtin_init_descriptor (exp);
8372 case BUILT_IN_ADJUST_DESCRIPTOR:
8373 return expand_builtin_adjust_descriptor (exp);
8374
8375 case BUILT_IN_FORK:
8376 case BUILT_IN_EXECL:
8377 case BUILT_IN_EXECV:
8378 case BUILT_IN_EXECLP:
8379 case BUILT_IN_EXECLE:
8380 case BUILT_IN_EXECVP:
8381 case BUILT_IN_EXECVE:
8382 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8383 if (target)
8384 return target;
8385 break;
8386
8387 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8388 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8389 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8390 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8391 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8392 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8393 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8394 if (target)
8395 return target;
8396 break;
8397
8398 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8399 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8400 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8401 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8402 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8403 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8404 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8405 if (target)
8406 return target;
8407 break;
8408
8409 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8410 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8411 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8412 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8413 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8414 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8415 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8416 if (target)
8417 return target;
8418 break;
8419
8420 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8421 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8422 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8423 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8424 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8425 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8426 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8427 if (target)
8428 return target;
8429 break;
8430
8431 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8432 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8433 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8434 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8435 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8436 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8437 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8438 if (target)
8439 return target;
8440 break;
8441
8442 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8443 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8444 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8445 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8446 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8447 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8448 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8449 if (target)
8450 return target;
8451 break;
8452
8453 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8454 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8455 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8456 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8457 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8458 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8459 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8460 if (target)
8461 return target;
8462 break;
8463
8464 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8465 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8466 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8467 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8468 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8469 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8470 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8471 if (target)
8472 return target;
8473 break;
8474
8475 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8476 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8477 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8478 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8479 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8481 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8482 if (target)
8483 return target;
8484 break;
8485
8486 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8487 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8488 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8489 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8490 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8492 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8493 if (target)
8494 return target;
8495 break;
8496
8497 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8498 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8499 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8500 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8501 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8503 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8504 if (target)
8505 return target;
8506 break;
8507
8508 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8509 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8510 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8511 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8512 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8514 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8515 if (target)
8516 return target;
8517 break;
8518
8519 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8520 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8521 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8522 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8523 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8524 if (mode == VOIDmode)
8525 mode = TYPE_MODE (boolean_type_node);
8526 if (!target || !register_operand (target, mode))
8527 target = gen_reg_rtx (mode);
8528
8529 mode = get_builtin_sync_mode
8530 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8531 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8532 if (target)
8533 return target;
8534 break;
8535
8536 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8537 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8538 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8539 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8540 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8541 mode = get_builtin_sync_mode
8542 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8543 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8544 if (target)
8545 return target;
8546 break;
8547
8548 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8549 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8550 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8551 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8552 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8554 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8555 if (target)
8556 return target;
8557 break;
8558
8559 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8560 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8561 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8562 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8563 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8565 expand_builtin_sync_lock_release (mode, exp);
8566 return const0_rtx;
8567
8568 case BUILT_IN_SYNC_SYNCHRONIZE:
8569 expand_builtin_sync_synchronize ();
8570 return const0_rtx;
8571
8572 case BUILT_IN_ATOMIC_EXCHANGE_1:
8573 case BUILT_IN_ATOMIC_EXCHANGE_2:
8574 case BUILT_IN_ATOMIC_EXCHANGE_4:
8575 case BUILT_IN_ATOMIC_EXCHANGE_8:
8576 case BUILT_IN_ATOMIC_EXCHANGE_16:
8577 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8578 target = expand_builtin_atomic_exchange (mode, exp, target);
8579 if (target)
8580 return target;
8581 break;
8582
8583 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8584 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8585 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8586 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8587 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8588 {
8589 unsigned int nargs, z;
8590 vec<tree, va_gc> *vec;
8591
8592 mode =
8593 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8594 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8595 if (target)
8596 return target;
8597
8598 /* If this is turned into an external library call, the weak parameter
8599 must be dropped to match the expected parameter list. */
8600 nargs = call_expr_nargs (exp);
8601 vec_alloc (vec, nargs - 1);
8602 for (z = 0; z < 3; z++)
8603 vec->quick_push (CALL_EXPR_ARG (exp, z));
8604 /* Skip the boolean weak parameter. */
8605 for (z = 4; z < 6; z++)
8606 vec->quick_push (CALL_EXPR_ARG (exp, z));
8607 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8608 break;
8609 }
8610
8611 case BUILT_IN_ATOMIC_LOAD_1:
8612 case BUILT_IN_ATOMIC_LOAD_2:
8613 case BUILT_IN_ATOMIC_LOAD_4:
8614 case BUILT_IN_ATOMIC_LOAD_8:
8615 case BUILT_IN_ATOMIC_LOAD_16:
8616 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8617 target = expand_builtin_atomic_load (mode, exp, target);
8618 if (target)
8619 return target;
8620 break;
8621
8622 case BUILT_IN_ATOMIC_STORE_1:
8623 case BUILT_IN_ATOMIC_STORE_2:
8624 case BUILT_IN_ATOMIC_STORE_4:
8625 case BUILT_IN_ATOMIC_STORE_8:
8626 case BUILT_IN_ATOMIC_STORE_16:
8627 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8628 target = expand_builtin_atomic_store (mode, exp);
8629 if (target)
8630 return const0_rtx;
8631 break;
8632
8633 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8634 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8635 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8636 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8637 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8638 {
8639 enum built_in_function lib;
8640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8641 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8642 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8643 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8644 ignore, lib);
8645 if (target)
8646 return target;
8647 break;
8648 }
8649 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8650 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8651 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8652 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8653 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8654 {
8655 enum built_in_function lib;
8656 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8657 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8658 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8659 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8660 ignore, lib);
8661 if (target)
8662 return target;
8663 break;
8664 }
8665 case BUILT_IN_ATOMIC_AND_FETCH_1:
8666 case BUILT_IN_ATOMIC_AND_FETCH_2:
8667 case BUILT_IN_ATOMIC_AND_FETCH_4:
8668 case BUILT_IN_ATOMIC_AND_FETCH_8:
8669 case BUILT_IN_ATOMIC_AND_FETCH_16:
8670 {
8671 enum built_in_function lib;
8672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8673 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8674 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8675 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8676 ignore, lib);
8677 if (target)
8678 return target;
8679 break;
8680 }
8681 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8682 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8683 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8684 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8685 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8686 {
8687 enum built_in_function lib;
8688 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8689 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8690 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8691 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8692 ignore, lib);
8693 if (target)
8694 return target;
8695 break;
8696 }
8697 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8698 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8699 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8700 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8701 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8702 {
8703 enum built_in_function lib;
8704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8705 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8706 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8707 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8708 ignore, lib);
8709 if (target)
8710 return target;
8711 break;
8712 }
8713 case BUILT_IN_ATOMIC_OR_FETCH_1:
8714 case BUILT_IN_ATOMIC_OR_FETCH_2:
8715 case BUILT_IN_ATOMIC_OR_FETCH_4:
8716 case BUILT_IN_ATOMIC_OR_FETCH_8:
8717 case BUILT_IN_ATOMIC_OR_FETCH_16:
8718 {
8719 enum built_in_function lib;
8720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8721 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8722 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8723 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8724 ignore, lib);
8725 if (target)
8726 return target;
8727 break;
8728 }
8729 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8730 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8731 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8732 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8733 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8734 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8735 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8736 ignore, BUILT_IN_NONE);
8737 if (target)
8738 return target;
8739 break;
8740
8741 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8742 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8743 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8744 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8745 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8747 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8748 ignore, BUILT_IN_NONE);
8749 if (target)
8750 return target;
8751 break;
8752
8753 case BUILT_IN_ATOMIC_FETCH_AND_1:
8754 case BUILT_IN_ATOMIC_FETCH_AND_2:
8755 case BUILT_IN_ATOMIC_FETCH_AND_4:
8756 case BUILT_IN_ATOMIC_FETCH_AND_8:
8757 case BUILT_IN_ATOMIC_FETCH_AND_16:
8758 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8759 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8760 ignore, BUILT_IN_NONE);
8761 if (target)
8762 return target;
8763 break;
8764
8765 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8766 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8767 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8768 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8769 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8771 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8772 ignore, BUILT_IN_NONE);
8773 if (target)
8774 return target;
8775 break;
8776
8777 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8778 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8779 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8780 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8781 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8782 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8783 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8784 ignore, BUILT_IN_NONE);
8785 if (target)
8786 return target;
8787 break;
8788
8789 case BUILT_IN_ATOMIC_FETCH_OR_1:
8790 case BUILT_IN_ATOMIC_FETCH_OR_2:
8791 case BUILT_IN_ATOMIC_FETCH_OR_4:
8792 case BUILT_IN_ATOMIC_FETCH_OR_8:
8793 case BUILT_IN_ATOMIC_FETCH_OR_16:
8794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8795 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8796 ignore, BUILT_IN_NONE);
8797 if (target)
8798 return target;
8799 break;
8800
8801 case BUILT_IN_ATOMIC_TEST_AND_SET:
8802 return expand_builtin_atomic_test_and_set (exp, target);
8803
8804 case BUILT_IN_ATOMIC_CLEAR:
8805 return expand_builtin_atomic_clear (exp);
8806
8807 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8808 return expand_builtin_atomic_always_lock_free (exp);
8809
8810 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8811 target = expand_builtin_atomic_is_lock_free (exp);
8812 if (target)
8813 return target;
8814 break;
8815
8816 case BUILT_IN_ATOMIC_THREAD_FENCE:
8817 expand_builtin_atomic_thread_fence (exp);
8818 return const0_rtx;
8819
8820 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8821 expand_builtin_atomic_signal_fence (exp);
8822 return const0_rtx;
8823
8824 case BUILT_IN_OBJECT_SIZE:
8825 return expand_builtin_object_size (exp);
8826
8827 case BUILT_IN_MEMCPY_CHK:
8828 case BUILT_IN_MEMPCPY_CHK:
8829 case BUILT_IN_MEMMOVE_CHK:
8830 case BUILT_IN_MEMSET_CHK:
8831 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8832 if (target)
8833 return target;
8834 break;
8835
8836 case BUILT_IN_STRCPY_CHK:
8837 case BUILT_IN_STPCPY_CHK:
8838 case BUILT_IN_STRNCPY_CHK:
8839 case BUILT_IN_STPNCPY_CHK:
8840 case BUILT_IN_STRCAT_CHK:
8841 case BUILT_IN_STRNCAT_CHK:
8842 case BUILT_IN_SNPRINTF_CHK:
8843 case BUILT_IN_VSNPRINTF_CHK:
8844 maybe_emit_chk_warning (exp, fcode);
8845 break;
8846
8847 case BUILT_IN_SPRINTF_CHK:
8848 case BUILT_IN_VSPRINTF_CHK:
8849 maybe_emit_sprintf_chk_warning (exp, fcode);
8850 break;
8851
8852 case BUILT_IN_FREE:
8853 if (warn_free_nonheap_object)
8854 maybe_emit_free_warning (exp);
8855 break;
8856
8857 case BUILT_IN_THREAD_POINTER:
8858 return expand_builtin_thread_pointer (exp, target);
8859
8860 case BUILT_IN_SET_THREAD_POINTER:
8861 expand_builtin_set_thread_pointer (exp);
8862 return const0_rtx;
8863
8864 case BUILT_IN_ACC_ON_DEVICE:
8865 /* Do library call, if we failed to expand the builtin when
8866 folding. */
8867 break;
8868
8869 case BUILT_IN_GOACC_PARLEVEL_ID:
8870 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8871 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8872
8873 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8874 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8875
8876 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8877 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8878 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8879 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8880 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8881 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8882 return expand_speculation_safe_value (mode, exp, target, ignore);
8883
8884 default: /* just do library call, if unknown builtin */
8885 break;
8886 }
8887
8888 /* The switch statement above can drop through to cause the function
8889 to be called normally. */
8890 return expand_call (exp, target, ignore);
8891 }
8892
8893 /* Determine whether a tree node represents a call to a built-in
8894 function. If the tree T is a call to a built-in function with
8895 the right number of arguments of the appropriate types, return
8896 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8897 Otherwise the return value is END_BUILTINS. */
8898
8899 enum built_in_function
8900 builtin_mathfn_code (const_tree t)
8901 {
8902 const_tree fndecl, arg, parmlist;
8903 const_tree argtype, parmtype;
8904 const_call_expr_arg_iterator iter;
8905
8906 if (TREE_CODE (t) != CALL_EXPR)
8907 return END_BUILTINS;
8908
8909 fndecl = get_callee_fndecl (t);
8910 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8911 return END_BUILTINS;
8912
8913 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8914 init_const_call_expr_arg_iterator (t, &iter);
8915 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8916 {
8917 /* If a function doesn't take a variable number of arguments,
8918 the last element in the list will have type `void'. */
8919 parmtype = TREE_VALUE (parmlist);
8920 if (VOID_TYPE_P (parmtype))
8921 {
8922 if (more_const_call_expr_args_p (&iter))
8923 return END_BUILTINS;
8924 return DECL_FUNCTION_CODE (fndecl);
8925 }
8926
8927 if (! more_const_call_expr_args_p (&iter))
8928 return END_BUILTINS;
8929
8930 arg = next_const_call_expr_arg (&iter);
8931 argtype = TREE_TYPE (arg);
8932
8933 if (SCALAR_FLOAT_TYPE_P (parmtype))
8934 {
8935 if (! SCALAR_FLOAT_TYPE_P (argtype))
8936 return END_BUILTINS;
8937 }
8938 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8939 {
8940 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8941 return END_BUILTINS;
8942 }
8943 else if (POINTER_TYPE_P (parmtype))
8944 {
8945 if (! POINTER_TYPE_P (argtype))
8946 return END_BUILTINS;
8947 }
8948 else if (INTEGRAL_TYPE_P (parmtype))
8949 {
8950 if (! INTEGRAL_TYPE_P (argtype))
8951 return END_BUILTINS;
8952 }
8953 else
8954 return END_BUILTINS;
8955 }
8956
8957 /* Variable-length argument list. */
8958 return DECL_FUNCTION_CODE (fndecl);
8959 }
8960
8961 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8962 evaluate to a constant. */
8963
8964 static tree
8965 fold_builtin_constant_p (tree arg)
8966 {
8967 /* We return 1 for a numeric type that's known to be a constant
8968 value at compile-time or for an aggregate type that's a
8969 literal constant. */
8970 STRIP_NOPS (arg);
8971
8972 /* If we know this is a constant, emit the constant of one. */
8973 if (CONSTANT_CLASS_P (arg)
8974 || (TREE_CODE (arg) == CONSTRUCTOR
8975 && TREE_CONSTANT (arg)))
8976 return integer_one_node;
8977 if (TREE_CODE (arg) == ADDR_EXPR)
8978 {
8979 tree op = TREE_OPERAND (arg, 0);
8980 if (TREE_CODE (op) == STRING_CST
8981 || (TREE_CODE (op) == ARRAY_REF
8982 && integer_zerop (TREE_OPERAND (op, 1))
8983 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8984 return integer_one_node;
8985 }
8986
8987 /* If this expression has side effects, show we don't know it to be a
8988 constant. Likewise if it's a pointer or aggregate type since in
8989 those case we only want literals, since those are only optimized
8990 when generating RTL, not later.
8991 And finally, if we are compiling an initializer, not code, we
8992 need to return a definite result now; there's not going to be any
8993 more optimization done. */
8994 if (TREE_SIDE_EFFECTS (arg)
8995 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8996 || POINTER_TYPE_P (TREE_TYPE (arg))
8997 || cfun == 0
8998 || folding_initializer
8999 || force_folding_builtin_constant_p)
9000 return integer_zero_node;
9001
9002 return NULL_TREE;
9003 }
9004
9005 /* Create builtin_expect or builtin_expect_with_probability
9006 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9007 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9008 builtin_expect_with_probability instead uses third argument as PROBABILITY
9009 value. */
9010
9011 static tree
9012 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9013 tree predictor, tree probability)
9014 {
9015 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9016
9017 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9018 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9019 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9020 ret_type = TREE_TYPE (TREE_TYPE (fn));
9021 pred_type = TREE_VALUE (arg_types);
9022 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9023
9024 pred = fold_convert_loc (loc, pred_type, pred);
9025 expected = fold_convert_loc (loc, expected_type, expected);
9026
9027 if (probability)
9028 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9029 else
9030 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9031 predictor);
9032
9033 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9034 build_int_cst (ret_type, 0));
9035 }
9036
9037 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9038 NULL_TREE if no simplification is possible. */
9039
9040 tree
9041 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9042 tree arg3)
9043 {
9044 tree inner, fndecl, inner_arg0;
9045 enum tree_code code;
9046
9047 /* Distribute the expected value over short-circuiting operators.
9048 See through the cast from truthvalue_type_node to long. */
9049 inner_arg0 = arg0;
9050 while (CONVERT_EXPR_P (inner_arg0)
9051 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9052 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9053 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9054
9055 /* If this is a builtin_expect within a builtin_expect keep the
9056 inner one. See through a comparison against a constant. It
9057 might have been added to create a thruthvalue. */
9058 inner = inner_arg0;
9059
9060 if (COMPARISON_CLASS_P (inner)
9061 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9062 inner = TREE_OPERAND (inner, 0);
9063
9064 if (TREE_CODE (inner) == CALL_EXPR
9065 && (fndecl = get_callee_fndecl (inner))
9066 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9067 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9068 return arg0;
9069
9070 inner = inner_arg0;
9071 code = TREE_CODE (inner);
9072 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9073 {
9074 tree op0 = TREE_OPERAND (inner, 0);
9075 tree op1 = TREE_OPERAND (inner, 1);
9076 arg1 = save_expr (arg1);
9077
9078 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9079 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9080 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9081
9082 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9083 }
9084
9085 /* If the argument isn't invariant then there's nothing else we can do. */
9086 if (!TREE_CONSTANT (inner_arg0))
9087 return NULL_TREE;
9088
9089 /* If we expect that a comparison against the argument will fold to
9090 a constant return the constant. In practice, this means a true
9091 constant or the address of a non-weak symbol. */
9092 inner = inner_arg0;
9093 STRIP_NOPS (inner);
9094 if (TREE_CODE (inner) == ADDR_EXPR)
9095 {
9096 do
9097 {
9098 inner = TREE_OPERAND (inner, 0);
9099 }
9100 while (TREE_CODE (inner) == COMPONENT_REF
9101 || TREE_CODE (inner) == ARRAY_REF);
9102 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9103 return NULL_TREE;
9104 }
9105
9106 /* Otherwise, ARG0 already has the proper type for the return value. */
9107 return arg0;
9108 }
9109
9110 /* Fold a call to __builtin_classify_type with argument ARG. */
9111
9112 static tree
9113 fold_builtin_classify_type (tree arg)
9114 {
9115 if (arg == 0)
9116 return build_int_cst (integer_type_node, no_type_class);
9117
9118 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9119 }
9120
9121 /* Fold a call to __builtin_strlen with argument ARG. */
9122
9123 static tree
9124 fold_builtin_strlen (location_t loc, tree type, tree arg)
9125 {
9126 if (!validate_arg (arg, POINTER_TYPE))
9127 return NULL_TREE;
9128 else
9129 {
9130 c_strlen_data lendata = { };
9131 tree len = c_strlen (arg, 0, &lendata);
9132
9133 if (len)
9134 return fold_convert_loc (loc, type, len);
9135
9136 if (!lendata.decl)
9137 c_strlen (arg, 1, &lendata);
9138
9139 if (lendata.decl)
9140 {
9141 if (EXPR_HAS_LOCATION (arg))
9142 loc = EXPR_LOCATION (arg);
9143 else if (loc == UNKNOWN_LOCATION)
9144 loc = input_location;
9145 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9146 }
9147
9148 return NULL_TREE;
9149 }
9150 }
9151
9152 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9153
9154 static tree
9155 fold_builtin_inf (location_t loc, tree type, int warn)
9156 {
9157 REAL_VALUE_TYPE real;
9158
9159 /* __builtin_inff is intended to be usable to define INFINITY on all
9160 targets. If an infinity is not available, INFINITY expands "to a
9161 positive constant of type float that overflows at translation
9162 time", footnote "In this case, using INFINITY will violate the
9163 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9164 Thus we pedwarn to ensure this constraint violation is
9165 diagnosed. */
9166 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9167 pedwarn (loc, 0, "target format does not support infinity");
9168
9169 real_inf (&real);
9170 return build_real (type, real);
9171 }
9172
9173 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9174 NULL_TREE if no simplification can be made. */
9175
9176 static tree
9177 fold_builtin_sincos (location_t loc,
9178 tree arg0, tree arg1, tree arg2)
9179 {
9180 tree type;
9181 tree fndecl, call = NULL_TREE;
9182
9183 if (!validate_arg (arg0, REAL_TYPE)
9184 || !validate_arg (arg1, POINTER_TYPE)
9185 || !validate_arg (arg2, POINTER_TYPE))
9186 return NULL_TREE;
9187
9188 type = TREE_TYPE (arg0);
9189
9190 /* Calculate the result when the argument is a constant. */
9191 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9192 if (fn == END_BUILTINS)
9193 return NULL_TREE;
9194
9195 /* Canonicalize sincos to cexpi. */
9196 if (TREE_CODE (arg0) == REAL_CST)
9197 {
9198 tree complex_type = build_complex_type (type);
9199 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9200 }
9201 if (!call)
9202 {
9203 if (!targetm.libc_has_function (function_c99_math_complex)
9204 || !builtin_decl_implicit_p (fn))
9205 return NULL_TREE;
9206 fndecl = builtin_decl_explicit (fn);
9207 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9208 call = builtin_save_expr (call);
9209 }
9210
9211 tree ptype = build_pointer_type (type);
9212 arg1 = fold_convert (ptype, arg1);
9213 arg2 = fold_convert (ptype, arg2);
9214 return build2 (COMPOUND_EXPR, void_type_node,
9215 build2 (MODIFY_EXPR, void_type_node,
9216 build_fold_indirect_ref_loc (loc, arg1),
9217 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9218 build2 (MODIFY_EXPR, void_type_node,
9219 build_fold_indirect_ref_loc (loc, arg2),
9220 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9221 }
9222
9223 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9224 Return NULL_TREE if no simplification can be made. */
9225
9226 static tree
9227 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9228 {
9229 if (!validate_arg (arg1, POINTER_TYPE)
9230 || !validate_arg (arg2, POINTER_TYPE)
9231 || !validate_arg (len, INTEGER_TYPE))
9232 return NULL_TREE;
9233
9234 /* If the LEN parameter is zero, return zero. */
9235 if (integer_zerop (len))
9236 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9237 arg1, arg2);
9238
9239 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9240 if (operand_equal_p (arg1, arg2, 0))
9241 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9242
9243 /* If len parameter is one, return an expression corresponding to
9244 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9245 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9246 {
9247 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9248 tree cst_uchar_ptr_node
9249 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9250
9251 tree ind1
9252 = fold_convert_loc (loc, integer_type_node,
9253 build1 (INDIRECT_REF, cst_uchar_node,
9254 fold_convert_loc (loc,
9255 cst_uchar_ptr_node,
9256 arg1)));
9257 tree ind2
9258 = fold_convert_loc (loc, integer_type_node,
9259 build1 (INDIRECT_REF, cst_uchar_node,
9260 fold_convert_loc (loc,
9261 cst_uchar_ptr_node,
9262 arg2)));
9263 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9264 }
9265
9266 return NULL_TREE;
9267 }
9268
9269 /* Fold a call to builtin isascii with argument ARG. */
9270
9271 static tree
9272 fold_builtin_isascii (location_t loc, tree arg)
9273 {
9274 if (!validate_arg (arg, INTEGER_TYPE))
9275 return NULL_TREE;
9276 else
9277 {
9278 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9279 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9280 build_int_cst (integer_type_node,
9281 ~ (unsigned HOST_WIDE_INT) 0x7f));
9282 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9283 arg, integer_zero_node);
9284 }
9285 }
9286
9287 /* Fold a call to builtin toascii with argument ARG. */
9288
9289 static tree
9290 fold_builtin_toascii (location_t loc, tree arg)
9291 {
9292 if (!validate_arg (arg, INTEGER_TYPE))
9293 return NULL_TREE;
9294
9295 /* Transform toascii(c) -> (c & 0x7f). */
9296 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9297 build_int_cst (integer_type_node, 0x7f));
9298 }
9299
9300 /* Fold a call to builtin isdigit with argument ARG. */
9301
9302 static tree
9303 fold_builtin_isdigit (location_t loc, tree arg)
9304 {
9305 if (!validate_arg (arg, INTEGER_TYPE))
9306 return NULL_TREE;
9307 else
9308 {
9309 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9310 /* According to the C standard, isdigit is unaffected by locale.
9311 However, it definitely is affected by the target character set. */
9312 unsigned HOST_WIDE_INT target_digit0
9313 = lang_hooks.to_target_charset ('0');
9314
9315 if (target_digit0 == 0)
9316 return NULL_TREE;
9317
9318 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9319 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9320 build_int_cst (unsigned_type_node, target_digit0));
9321 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9322 build_int_cst (unsigned_type_node, 9));
9323 }
9324 }
9325
9326 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9327
9328 static tree
9329 fold_builtin_fabs (location_t loc, tree arg, tree type)
9330 {
9331 if (!validate_arg (arg, REAL_TYPE))
9332 return NULL_TREE;
9333
9334 arg = fold_convert_loc (loc, type, arg);
9335 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9336 }
9337
9338 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9339
9340 static tree
9341 fold_builtin_abs (location_t loc, tree arg, tree type)
9342 {
9343 if (!validate_arg (arg, INTEGER_TYPE))
9344 return NULL_TREE;
9345
9346 arg = fold_convert_loc (loc, type, arg);
9347 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9348 }
9349
9350 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9351
9352 static tree
9353 fold_builtin_carg (location_t loc, tree arg, tree type)
9354 {
9355 if (validate_arg (arg, COMPLEX_TYPE)
9356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9357 {
9358 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9359
9360 if (atan2_fn)
9361 {
9362 tree new_arg = builtin_save_expr (arg);
9363 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9364 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9365 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9366 }
9367 }
9368
9369 return NULL_TREE;
9370 }
9371
9372 /* Fold a call to builtin frexp, we can assume the base is 2. */
9373
9374 static tree
9375 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9376 {
9377 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9378 return NULL_TREE;
9379
9380 STRIP_NOPS (arg0);
9381
9382 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9383 return NULL_TREE;
9384
9385 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9386
9387 /* Proceed if a valid pointer type was passed in. */
9388 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9389 {
9390 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9391 tree frac, exp;
9392
9393 switch (value->cl)
9394 {
9395 case rvc_zero:
9396 /* For +-0, return (*exp = 0, +-0). */
9397 exp = integer_zero_node;
9398 frac = arg0;
9399 break;
9400 case rvc_nan:
9401 case rvc_inf:
9402 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9403 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9404 case rvc_normal:
9405 {
9406 /* Since the frexp function always expects base 2, and in
9407 GCC normalized significands are already in the range
9408 [0.5, 1.0), we have exactly what frexp wants. */
9409 REAL_VALUE_TYPE frac_rvt = *value;
9410 SET_REAL_EXP (&frac_rvt, 0);
9411 frac = build_real (rettype, frac_rvt);
9412 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9413 }
9414 break;
9415 default:
9416 gcc_unreachable ();
9417 }
9418
9419 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9420 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9421 TREE_SIDE_EFFECTS (arg1) = 1;
9422 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9423 }
9424
9425 return NULL_TREE;
9426 }
9427
9428 /* Fold a call to builtin modf. */
9429
9430 static tree
9431 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9432 {
9433 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9434 return NULL_TREE;
9435
9436 STRIP_NOPS (arg0);
9437
9438 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9439 return NULL_TREE;
9440
9441 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9442
9443 /* Proceed if a valid pointer type was passed in. */
9444 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9445 {
9446 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9447 REAL_VALUE_TYPE trunc, frac;
9448
9449 switch (value->cl)
9450 {
9451 case rvc_nan:
9452 case rvc_zero:
9453 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9454 trunc = frac = *value;
9455 break;
9456 case rvc_inf:
9457 /* For +-Inf, return (*arg1 = arg0, +-0). */
9458 frac = dconst0;
9459 frac.sign = value->sign;
9460 trunc = *value;
9461 break;
9462 case rvc_normal:
9463 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9464 real_trunc (&trunc, VOIDmode, value);
9465 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9466 /* If the original number was negative and already
9467 integral, then the fractional part is -0.0. */
9468 if (value->sign && frac.cl == rvc_zero)
9469 frac.sign = value->sign;
9470 break;
9471 }
9472
9473 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9474 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9475 build_real (rettype, trunc));
9476 TREE_SIDE_EFFECTS (arg1) = 1;
9477 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9478 build_real (rettype, frac));
9479 }
9480
9481 return NULL_TREE;
9482 }
9483
9484 /* Given a location LOC, an interclass builtin function decl FNDECL
9485 and its single argument ARG, return an folded expression computing
9486 the same, or NULL_TREE if we either couldn't or didn't want to fold
9487 (the latter happen if there's an RTL instruction available). */
9488
9489 static tree
9490 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9491 {
9492 machine_mode mode;
9493
9494 if (!validate_arg (arg, REAL_TYPE))
9495 return NULL_TREE;
9496
9497 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9498 return NULL_TREE;
9499
9500 mode = TYPE_MODE (TREE_TYPE (arg));
9501
9502 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9503
9504 /* If there is no optab, try generic code. */
9505 switch (DECL_FUNCTION_CODE (fndecl))
9506 {
9507 tree result;
9508
9509 CASE_FLT_FN (BUILT_IN_ISINF):
9510 {
9511 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9512 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9513 tree type = TREE_TYPE (arg);
9514 REAL_VALUE_TYPE r;
9515 char buf[128];
9516
9517 if (is_ibm_extended)
9518 {
9519 /* NaN and Inf are encoded in the high-order double value
9520 only. The low-order value is not significant. */
9521 type = double_type_node;
9522 mode = DFmode;
9523 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9524 }
9525 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9526 real_from_string (&r, buf);
9527 result = build_call_expr (isgr_fn, 2,
9528 fold_build1_loc (loc, ABS_EXPR, type, arg),
9529 build_real (type, r));
9530 return result;
9531 }
9532 CASE_FLT_FN (BUILT_IN_FINITE):
9533 case BUILT_IN_ISFINITE:
9534 {
9535 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9536 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9537 tree type = TREE_TYPE (arg);
9538 REAL_VALUE_TYPE r;
9539 char buf[128];
9540
9541 if (is_ibm_extended)
9542 {
9543 /* NaN and Inf are encoded in the high-order double value
9544 only. The low-order value is not significant. */
9545 type = double_type_node;
9546 mode = DFmode;
9547 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9548 }
9549 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9550 real_from_string (&r, buf);
9551 result = build_call_expr (isle_fn, 2,
9552 fold_build1_loc (loc, ABS_EXPR, type, arg),
9553 build_real (type, r));
9554 /*result = fold_build2_loc (loc, UNGT_EXPR,
9555 TREE_TYPE (TREE_TYPE (fndecl)),
9556 fold_build1_loc (loc, ABS_EXPR, type, arg),
9557 build_real (type, r));
9558 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9559 TREE_TYPE (TREE_TYPE (fndecl)),
9560 result);*/
9561 return result;
9562 }
9563 case BUILT_IN_ISNORMAL:
9564 {
9565 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9566 islessequal(fabs(x),DBL_MAX). */
9567 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9568 tree type = TREE_TYPE (arg);
9569 tree orig_arg, max_exp, min_exp;
9570 machine_mode orig_mode = mode;
9571 REAL_VALUE_TYPE rmax, rmin;
9572 char buf[128];
9573
9574 orig_arg = arg = builtin_save_expr (arg);
9575 if (is_ibm_extended)
9576 {
9577 /* Use double to test the normal range of IBM extended
9578 precision. Emin for IBM extended precision is
9579 different to emin for IEEE double, being 53 higher
9580 since the low double exponent is at least 53 lower
9581 than the high double exponent. */
9582 type = double_type_node;
9583 mode = DFmode;
9584 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9585 }
9586 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9587
9588 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9589 real_from_string (&rmax, buf);
9590 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9591 real_from_string (&rmin, buf);
9592 max_exp = build_real (type, rmax);
9593 min_exp = build_real (type, rmin);
9594
9595 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9596 if (is_ibm_extended)
9597 {
9598 /* Testing the high end of the range is done just using
9599 the high double, using the same test as isfinite().
9600 For the subnormal end of the range we first test the
9601 high double, then if its magnitude is equal to the
9602 limit of 0x1p-969, we test whether the low double is
9603 non-zero and opposite sign to the high double. */
9604 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9605 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9606 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9607 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9608 arg, min_exp);
9609 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9610 complex_double_type_node, orig_arg);
9611 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9612 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9613 tree zero = build_real (type, dconst0);
9614 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9615 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9616 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9617 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9618 fold_build3 (COND_EXPR,
9619 integer_type_node,
9620 hilt, logt, lolt));
9621 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9622 eq_min, ok_lo);
9623 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9624 gt_min, eq_min);
9625 }
9626 else
9627 {
9628 tree const isge_fn
9629 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9630 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9631 }
9632 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9633 max_exp, min_exp);
9634 return result;
9635 }
9636 default:
9637 break;
9638 }
9639
9640 return NULL_TREE;
9641 }
9642
9643 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9644 ARG is the argument for the call. */
9645
9646 static tree
9647 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9648 {
9649 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9650
9651 if (!validate_arg (arg, REAL_TYPE))
9652 return NULL_TREE;
9653
9654 switch (builtin_index)
9655 {
9656 case BUILT_IN_ISINF:
9657 if (!HONOR_INFINITIES (arg))
9658 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9659
9660 return NULL_TREE;
9661
9662 case BUILT_IN_ISINF_SIGN:
9663 {
9664 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9665 /* In a boolean context, GCC will fold the inner COND_EXPR to
9666 1. So e.g. "if (isinf_sign(x))" would be folded to just
9667 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9668 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9669 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9670 tree tmp = NULL_TREE;
9671
9672 arg = builtin_save_expr (arg);
9673
9674 if (signbit_fn && isinf_fn)
9675 {
9676 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9677 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9678
9679 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9680 signbit_call, integer_zero_node);
9681 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9682 isinf_call, integer_zero_node);
9683
9684 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9685 integer_minus_one_node, integer_one_node);
9686 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9687 isinf_call, tmp,
9688 integer_zero_node);
9689 }
9690
9691 return tmp;
9692 }
9693
9694 case BUILT_IN_ISFINITE:
9695 if (!HONOR_NANS (arg)
9696 && !HONOR_INFINITIES (arg))
9697 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9698
9699 return NULL_TREE;
9700
9701 case BUILT_IN_ISNAN:
9702 if (!HONOR_NANS (arg))
9703 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9704
9705 {
9706 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9707 if (is_ibm_extended)
9708 {
9709 /* NaN and Inf are encoded in the high-order double value
9710 only. The low-order value is not significant. */
9711 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9712 }
9713 }
9714 arg = builtin_save_expr (arg);
9715 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9716
9717 default:
9718 gcc_unreachable ();
9719 }
9720 }
9721
9722 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9723 This builtin will generate code to return the appropriate floating
9724 point classification depending on the value of the floating point
9725 number passed in. The possible return values must be supplied as
9726 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9727 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9728 one floating point argument which is "type generic". */
9729
9730 static tree
9731 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9732 {
9733 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9734 arg, type, res, tmp;
9735 machine_mode mode;
9736 REAL_VALUE_TYPE r;
9737 char buf[128];
9738
9739 /* Verify the required arguments in the original call. */
9740 if (nargs != 6
9741 || !validate_arg (args[0], INTEGER_TYPE)
9742 || !validate_arg (args[1], INTEGER_TYPE)
9743 || !validate_arg (args[2], INTEGER_TYPE)
9744 || !validate_arg (args[3], INTEGER_TYPE)
9745 || !validate_arg (args[4], INTEGER_TYPE)
9746 || !validate_arg (args[5], REAL_TYPE))
9747 return NULL_TREE;
9748
9749 fp_nan = args[0];
9750 fp_infinite = args[1];
9751 fp_normal = args[2];
9752 fp_subnormal = args[3];
9753 fp_zero = args[4];
9754 arg = args[5];
9755 type = TREE_TYPE (arg);
9756 mode = TYPE_MODE (type);
9757 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9758
9759 /* fpclassify(x) ->
9760 isnan(x) ? FP_NAN :
9761 (fabs(x) == Inf ? FP_INFINITE :
9762 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9763 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9764
9765 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9766 build_real (type, dconst0));
9767 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9768 tmp, fp_zero, fp_subnormal);
9769
9770 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9771 real_from_string (&r, buf);
9772 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9773 arg, build_real (type, r));
9774 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9775
9776 if (HONOR_INFINITIES (mode))
9777 {
9778 real_inf (&r);
9779 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9780 build_real (type, r));
9781 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9782 fp_infinite, res);
9783 }
9784
9785 if (HONOR_NANS (mode))
9786 {
9787 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9788 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9789 }
9790
9791 return res;
9792 }
9793
9794 /* Fold a call to an unordered comparison function such as
9795 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9796 being called and ARG0 and ARG1 are the arguments for the call.
9797 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9798 the opposite of the desired result. UNORDERED_CODE is used
9799 for modes that can hold NaNs and ORDERED_CODE is used for
9800 the rest. */
9801
9802 static tree
9803 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9804 enum tree_code unordered_code,
9805 enum tree_code ordered_code)
9806 {
9807 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9808 enum tree_code code;
9809 tree type0, type1;
9810 enum tree_code code0, code1;
9811 tree cmp_type = NULL_TREE;
9812
9813 type0 = TREE_TYPE (arg0);
9814 type1 = TREE_TYPE (arg1);
9815
9816 code0 = TREE_CODE (type0);
9817 code1 = TREE_CODE (type1);
9818
9819 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9820 /* Choose the wider of two real types. */
9821 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9822 ? type0 : type1;
9823 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9824 cmp_type = type0;
9825 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9826 cmp_type = type1;
9827
9828 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9829 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9830
9831 if (unordered_code == UNORDERED_EXPR)
9832 {
9833 if (!HONOR_NANS (arg0))
9834 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9835 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9836 }
9837
9838 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9839 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9840 fold_build2_loc (loc, code, type, arg0, arg1));
9841 }
9842
9843 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9844 arithmetics if it can never overflow, or into internal functions that
9845 return both result of arithmetics and overflowed boolean flag in
9846 a complex integer result, or some other check for overflow.
9847 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9848 checking part of that. */
9849
9850 static tree
9851 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9852 tree arg0, tree arg1, tree arg2)
9853 {
9854 enum internal_fn ifn = IFN_LAST;
9855 /* The code of the expression corresponding to the built-in. */
9856 enum tree_code opcode = ERROR_MARK;
9857 bool ovf_only = false;
9858
9859 switch (fcode)
9860 {
9861 case BUILT_IN_ADD_OVERFLOW_P:
9862 ovf_only = true;
9863 /* FALLTHRU */
9864 case BUILT_IN_ADD_OVERFLOW:
9865 case BUILT_IN_SADD_OVERFLOW:
9866 case BUILT_IN_SADDL_OVERFLOW:
9867 case BUILT_IN_SADDLL_OVERFLOW:
9868 case BUILT_IN_UADD_OVERFLOW:
9869 case BUILT_IN_UADDL_OVERFLOW:
9870 case BUILT_IN_UADDLL_OVERFLOW:
9871 opcode = PLUS_EXPR;
9872 ifn = IFN_ADD_OVERFLOW;
9873 break;
9874 case BUILT_IN_SUB_OVERFLOW_P:
9875 ovf_only = true;
9876 /* FALLTHRU */
9877 case BUILT_IN_SUB_OVERFLOW:
9878 case BUILT_IN_SSUB_OVERFLOW:
9879 case BUILT_IN_SSUBL_OVERFLOW:
9880 case BUILT_IN_SSUBLL_OVERFLOW:
9881 case BUILT_IN_USUB_OVERFLOW:
9882 case BUILT_IN_USUBL_OVERFLOW:
9883 case BUILT_IN_USUBLL_OVERFLOW:
9884 opcode = MINUS_EXPR;
9885 ifn = IFN_SUB_OVERFLOW;
9886 break;
9887 case BUILT_IN_MUL_OVERFLOW_P:
9888 ovf_only = true;
9889 /* FALLTHRU */
9890 case BUILT_IN_MUL_OVERFLOW:
9891 case BUILT_IN_SMUL_OVERFLOW:
9892 case BUILT_IN_SMULL_OVERFLOW:
9893 case BUILT_IN_SMULLL_OVERFLOW:
9894 case BUILT_IN_UMUL_OVERFLOW:
9895 case BUILT_IN_UMULL_OVERFLOW:
9896 case BUILT_IN_UMULLL_OVERFLOW:
9897 opcode = MULT_EXPR;
9898 ifn = IFN_MUL_OVERFLOW;
9899 break;
9900 default:
9901 gcc_unreachable ();
9902 }
9903
9904 /* For the "generic" overloads, the first two arguments can have different
9905 types and the last argument determines the target type to use to check
9906 for overflow. The arguments of the other overloads all have the same
9907 type. */
9908 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9909
9910 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9911 arguments are constant, attempt to fold the built-in call into a constant
9912 expression indicating whether or not it detected an overflow. */
9913 if (ovf_only
9914 && TREE_CODE (arg0) == INTEGER_CST
9915 && TREE_CODE (arg1) == INTEGER_CST)
9916 /* Perform the computation in the target type and check for overflow. */
9917 return omit_one_operand_loc (loc, boolean_type_node,
9918 arith_overflowed_p (opcode, type, arg0, arg1)
9919 ? boolean_true_node : boolean_false_node,
9920 arg2);
9921
9922 tree intres, ovfres;
9923 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9924 {
9925 intres = fold_binary_loc (loc, opcode, type,
9926 fold_convert_loc (loc, type, arg0),
9927 fold_convert_loc (loc, type, arg1));
9928 if (TREE_OVERFLOW (intres))
9929 intres = drop_tree_overflow (intres);
9930 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9931 ? boolean_true_node : boolean_false_node);
9932 }
9933 else
9934 {
9935 tree ctype = build_complex_type (type);
9936 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9937 arg0, arg1);
9938 tree tgt = save_expr (call);
9939 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9940 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9941 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9942 }
9943
9944 if (ovf_only)
9945 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9946
9947 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9948 tree store
9949 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9950 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9951 }
9952
9953 /* Fold a call to __builtin_FILE to a constant string. */
9954
9955 static inline tree
9956 fold_builtin_FILE (location_t loc)
9957 {
9958 if (const char *fname = LOCATION_FILE (loc))
9959 {
9960 /* The documentation says this builtin is equivalent to the preprocessor
9961 __FILE__ macro so it appears appropriate to use the same file prefix
9962 mappings. */
9963 fname = remap_macro_filename (fname);
9964 return build_string_literal (strlen (fname) + 1, fname);
9965 }
9966
9967 return build_string_literal (1, "");
9968 }
9969
9970 /* Fold a call to __builtin_FUNCTION to a constant string. */
9971
9972 static inline tree
9973 fold_builtin_FUNCTION ()
9974 {
9975 const char *name = "";
9976
9977 if (current_function_decl)
9978 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9979
9980 return build_string_literal (strlen (name) + 1, name);
9981 }
9982
9983 /* Fold a call to __builtin_LINE to an integer constant. */
9984
9985 static inline tree
9986 fold_builtin_LINE (location_t loc, tree type)
9987 {
9988 return build_int_cst (type, LOCATION_LINE (loc));
9989 }
9990
9991 /* Fold a call to built-in function FNDECL with 0 arguments.
9992 This function returns NULL_TREE if no simplification was possible. */
9993
9994 static tree
9995 fold_builtin_0 (location_t loc, tree fndecl)
9996 {
9997 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9998 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9999 switch (fcode)
10000 {
10001 case BUILT_IN_FILE:
10002 return fold_builtin_FILE (loc);
10003
10004 case BUILT_IN_FUNCTION:
10005 return fold_builtin_FUNCTION ();
10006
10007 case BUILT_IN_LINE:
10008 return fold_builtin_LINE (loc, type);
10009
10010 CASE_FLT_FN (BUILT_IN_INF):
10011 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10012 case BUILT_IN_INFD32:
10013 case BUILT_IN_INFD64:
10014 case BUILT_IN_INFD128:
10015 return fold_builtin_inf (loc, type, true);
10016
10017 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10018 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10019 return fold_builtin_inf (loc, type, false);
10020
10021 case BUILT_IN_CLASSIFY_TYPE:
10022 return fold_builtin_classify_type (NULL_TREE);
10023
10024 default:
10025 break;
10026 }
10027 return NULL_TREE;
10028 }
10029
10030 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10031 This function returns NULL_TREE if no simplification was possible. */
10032
10033 static tree
10034 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10035 {
10036 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10037 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10038
10039 if (TREE_CODE (arg0) == ERROR_MARK)
10040 return NULL_TREE;
10041
10042 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10043 return ret;
10044
10045 switch (fcode)
10046 {
10047 case BUILT_IN_CONSTANT_P:
10048 {
10049 tree val = fold_builtin_constant_p (arg0);
10050
10051 /* Gimplification will pull the CALL_EXPR for the builtin out of
10052 an if condition. When not optimizing, we'll not CSE it back.
10053 To avoid link error types of regressions, return false now. */
10054 if (!val && !optimize)
10055 val = integer_zero_node;
10056
10057 return val;
10058 }
10059
10060 case BUILT_IN_CLASSIFY_TYPE:
10061 return fold_builtin_classify_type (arg0);
10062
10063 case BUILT_IN_STRLEN:
10064 return fold_builtin_strlen (loc, type, arg0);
10065
10066 CASE_FLT_FN (BUILT_IN_FABS):
10067 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10068 case BUILT_IN_FABSD32:
10069 case BUILT_IN_FABSD64:
10070 case BUILT_IN_FABSD128:
10071 return fold_builtin_fabs (loc, arg0, type);
10072
10073 case BUILT_IN_ABS:
10074 case BUILT_IN_LABS:
10075 case BUILT_IN_LLABS:
10076 case BUILT_IN_IMAXABS:
10077 return fold_builtin_abs (loc, arg0, type);
10078
10079 CASE_FLT_FN (BUILT_IN_CONJ):
10080 if (validate_arg (arg0, COMPLEX_TYPE)
10081 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10082 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10083 break;
10084
10085 CASE_FLT_FN (BUILT_IN_CREAL):
10086 if (validate_arg (arg0, COMPLEX_TYPE)
10087 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10088 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10089 break;
10090
10091 CASE_FLT_FN (BUILT_IN_CIMAG):
10092 if (validate_arg (arg0, COMPLEX_TYPE)
10093 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10094 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10095 break;
10096
10097 CASE_FLT_FN (BUILT_IN_CARG):
10098 return fold_builtin_carg (loc, arg0, type);
10099
10100 case BUILT_IN_ISASCII:
10101 return fold_builtin_isascii (loc, arg0);
10102
10103 case BUILT_IN_TOASCII:
10104 return fold_builtin_toascii (loc, arg0);
10105
10106 case BUILT_IN_ISDIGIT:
10107 return fold_builtin_isdigit (loc, arg0);
10108
10109 CASE_FLT_FN (BUILT_IN_FINITE):
10110 case BUILT_IN_FINITED32:
10111 case BUILT_IN_FINITED64:
10112 case BUILT_IN_FINITED128:
10113 case BUILT_IN_ISFINITE:
10114 {
10115 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10116 if (ret)
10117 return ret;
10118 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10119 }
10120
10121 CASE_FLT_FN (BUILT_IN_ISINF):
10122 case BUILT_IN_ISINFD32:
10123 case BUILT_IN_ISINFD64:
10124 case BUILT_IN_ISINFD128:
10125 {
10126 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10127 if (ret)
10128 return ret;
10129 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10130 }
10131
10132 case BUILT_IN_ISNORMAL:
10133 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10134
10135 case BUILT_IN_ISINF_SIGN:
10136 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10137
10138 CASE_FLT_FN (BUILT_IN_ISNAN):
10139 case BUILT_IN_ISNAND32:
10140 case BUILT_IN_ISNAND64:
10141 case BUILT_IN_ISNAND128:
10142 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10143
10144 case BUILT_IN_FREE:
10145 if (integer_zerop (arg0))
10146 return build_empty_stmt (loc);
10147 break;
10148
10149 default:
10150 break;
10151 }
10152
10153 return NULL_TREE;
10154
10155 }
10156
10157 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10158 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10159 if no simplification was possible. */
10160
10161 static tree
10162 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10163 {
10164 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10166
10167 if (TREE_CODE (arg0) == ERROR_MARK
10168 || TREE_CODE (arg1) == ERROR_MARK)
10169 return NULL_TREE;
10170
10171 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10172 return ret;
10173
10174 switch (fcode)
10175 {
10176 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10177 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10178 if (validate_arg (arg0, REAL_TYPE)
10179 && validate_arg (arg1, POINTER_TYPE))
10180 return do_mpfr_lgamma_r (arg0, arg1, type);
10181 break;
10182
10183 CASE_FLT_FN (BUILT_IN_FREXP):
10184 return fold_builtin_frexp (loc, arg0, arg1, type);
10185
10186 CASE_FLT_FN (BUILT_IN_MODF):
10187 return fold_builtin_modf (loc, arg0, arg1, type);
10188
10189 case BUILT_IN_STRSPN:
10190 return fold_builtin_strspn (loc, expr, arg0, arg1);
10191
10192 case BUILT_IN_STRCSPN:
10193 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10194
10195 case BUILT_IN_STRPBRK:
10196 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10197
10198 case BUILT_IN_EXPECT:
10199 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10200
10201 case BUILT_IN_ISGREATER:
10202 return fold_builtin_unordered_cmp (loc, fndecl,
10203 arg0, arg1, UNLE_EXPR, LE_EXPR);
10204 case BUILT_IN_ISGREATEREQUAL:
10205 return fold_builtin_unordered_cmp (loc, fndecl,
10206 arg0, arg1, UNLT_EXPR, LT_EXPR);
10207 case BUILT_IN_ISLESS:
10208 return fold_builtin_unordered_cmp (loc, fndecl,
10209 arg0, arg1, UNGE_EXPR, GE_EXPR);
10210 case BUILT_IN_ISLESSEQUAL:
10211 return fold_builtin_unordered_cmp (loc, fndecl,
10212 arg0, arg1, UNGT_EXPR, GT_EXPR);
10213 case BUILT_IN_ISLESSGREATER:
10214 return fold_builtin_unordered_cmp (loc, fndecl,
10215 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10216 case BUILT_IN_ISUNORDERED:
10217 return fold_builtin_unordered_cmp (loc, fndecl,
10218 arg0, arg1, UNORDERED_EXPR,
10219 NOP_EXPR);
10220
10221 /* We do the folding for va_start in the expander. */
10222 case BUILT_IN_VA_START:
10223 break;
10224
10225 case BUILT_IN_OBJECT_SIZE:
10226 return fold_builtin_object_size (arg0, arg1);
10227
10228 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10229 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10230
10231 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10232 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10233
10234 default:
10235 break;
10236 }
10237 return NULL_TREE;
10238 }
10239
10240 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10241 and ARG2.
10242 This function returns NULL_TREE if no simplification was possible. */
10243
10244 static tree
10245 fold_builtin_3 (location_t loc, tree fndecl,
10246 tree arg0, tree arg1, tree arg2)
10247 {
10248 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10249 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10250
10251 if (TREE_CODE (arg0) == ERROR_MARK
10252 || TREE_CODE (arg1) == ERROR_MARK
10253 || TREE_CODE (arg2) == ERROR_MARK)
10254 return NULL_TREE;
10255
10256 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10257 arg0, arg1, arg2))
10258 return ret;
10259
10260 switch (fcode)
10261 {
10262
10263 CASE_FLT_FN (BUILT_IN_SINCOS):
10264 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10265
10266 CASE_FLT_FN (BUILT_IN_REMQUO):
10267 if (validate_arg (arg0, REAL_TYPE)
10268 && validate_arg (arg1, REAL_TYPE)
10269 && validate_arg (arg2, POINTER_TYPE))
10270 return do_mpfr_remquo (arg0, arg1, arg2);
10271 break;
10272
10273 case BUILT_IN_MEMCMP:
10274 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10275
10276 case BUILT_IN_EXPECT:
10277 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10278
10279 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10280 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10281
10282 case BUILT_IN_ADD_OVERFLOW:
10283 case BUILT_IN_SUB_OVERFLOW:
10284 case BUILT_IN_MUL_OVERFLOW:
10285 case BUILT_IN_ADD_OVERFLOW_P:
10286 case BUILT_IN_SUB_OVERFLOW_P:
10287 case BUILT_IN_MUL_OVERFLOW_P:
10288 case BUILT_IN_SADD_OVERFLOW:
10289 case BUILT_IN_SADDL_OVERFLOW:
10290 case BUILT_IN_SADDLL_OVERFLOW:
10291 case BUILT_IN_SSUB_OVERFLOW:
10292 case BUILT_IN_SSUBL_OVERFLOW:
10293 case BUILT_IN_SSUBLL_OVERFLOW:
10294 case BUILT_IN_SMUL_OVERFLOW:
10295 case BUILT_IN_SMULL_OVERFLOW:
10296 case BUILT_IN_SMULLL_OVERFLOW:
10297 case BUILT_IN_UADD_OVERFLOW:
10298 case BUILT_IN_UADDL_OVERFLOW:
10299 case BUILT_IN_UADDLL_OVERFLOW:
10300 case BUILT_IN_USUB_OVERFLOW:
10301 case BUILT_IN_USUBL_OVERFLOW:
10302 case BUILT_IN_USUBLL_OVERFLOW:
10303 case BUILT_IN_UMUL_OVERFLOW:
10304 case BUILT_IN_UMULL_OVERFLOW:
10305 case BUILT_IN_UMULLL_OVERFLOW:
10306 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10307
10308 default:
10309 break;
10310 }
10311 return NULL_TREE;
10312 }
10313
10314 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10315 ARGS is an array of NARGS arguments. IGNORE is true if the result
10316 of the function call is ignored. This function returns NULL_TREE
10317 if no simplification was possible. */
10318
10319 static tree
10320 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10321 int nargs, bool)
10322 {
10323 tree ret = NULL_TREE;
10324
10325 switch (nargs)
10326 {
10327 case 0:
10328 ret = fold_builtin_0 (loc, fndecl);
10329 break;
10330 case 1:
10331 ret = fold_builtin_1 (loc, fndecl, args[0]);
10332 break;
10333 case 2:
10334 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10335 break;
10336 case 3:
10337 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10338 break;
10339 default:
10340 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10341 break;
10342 }
10343 if (ret)
10344 {
10345 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10346 SET_EXPR_LOCATION (ret, loc);
10347 return ret;
10348 }
10349 return NULL_TREE;
10350 }
10351
10352 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10353 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10354 of arguments in ARGS to be omitted. OLDNARGS is the number of
10355 elements in ARGS. */
10356
10357 static tree
10358 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10359 int skip, tree fndecl, int n, va_list newargs)
10360 {
10361 int nargs = oldnargs - skip + n;
10362 tree *buffer;
10363
10364 if (n > 0)
10365 {
10366 int i, j;
10367
10368 buffer = XALLOCAVEC (tree, nargs);
10369 for (i = 0; i < n; i++)
10370 buffer[i] = va_arg (newargs, tree);
10371 for (j = skip; j < oldnargs; j++, i++)
10372 buffer[i] = args[j];
10373 }
10374 else
10375 buffer = args + skip;
10376
10377 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10378 }
10379
10380 /* Return true if FNDECL shouldn't be folded right now.
10381 If a built-in function has an inline attribute always_inline
10382 wrapper, defer folding it after always_inline functions have
10383 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10384 might not be performed. */
10385
10386 bool
10387 avoid_folding_inline_builtin (tree fndecl)
10388 {
10389 return (DECL_DECLARED_INLINE_P (fndecl)
10390 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10391 && cfun
10392 && !cfun->always_inline_functions_inlined
10393 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10394 }
10395
10396 /* A wrapper function for builtin folding that prevents warnings for
10397 "statement without effect" and the like, caused by removing the
10398 call node earlier than the warning is generated. */
10399
10400 tree
10401 fold_call_expr (location_t loc, tree exp, bool ignore)
10402 {
10403 tree ret = NULL_TREE;
10404 tree fndecl = get_callee_fndecl (exp);
10405 if (fndecl && fndecl_built_in_p (fndecl)
10406 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10407 yet. Defer folding until we see all the arguments
10408 (after inlining). */
10409 && !CALL_EXPR_VA_ARG_PACK (exp))
10410 {
10411 int nargs = call_expr_nargs (exp);
10412
10413 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10414 instead last argument is __builtin_va_arg_pack (). Defer folding
10415 even in that case, until arguments are finalized. */
10416 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10417 {
10418 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10419 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10420 return NULL_TREE;
10421 }
10422
10423 if (avoid_folding_inline_builtin (fndecl))
10424 return NULL_TREE;
10425
10426 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10427 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10428 CALL_EXPR_ARGP (exp), ignore);
10429 else
10430 {
10431 tree *args = CALL_EXPR_ARGP (exp);
10432 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10433 if (ret)
10434 return ret;
10435 }
10436 }
10437 return NULL_TREE;
10438 }
10439
10440 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10441 N arguments are passed in the array ARGARRAY. Return a folded
10442 expression or NULL_TREE if no simplification was possible. */
10443
10444 tree
10445 fold_builtin_call_array (location_t loc, tree,
10446 tree fn,
10447 int n,
10448 tree *argarray)
10449 {
10450 if (TREE_CODE (fn) != ADDR_EXPR)
10451 return NULL_TREE;
10452
10453 tree fndecl = TREE_OPERAND (fn, 0);
10454 if (TREE_CODE (fndecl) == FUNCTION_DECL
10455 && fndecl_built_in_p (fndecl))
10456 {
10457 /* If last argument is __builtin_va_arg_pack (), arguments to this
10458 function are not finalized yet. Defer folding until they are. */
10459 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10460 {
10461 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10462 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10463 return NULL_TREE;
10464 }
10465 if (avoid_folding_inline_builtin (fndecl))
10466 return NULL_TREE;
10467 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10468 return targetm.fold_builtin (fndecl, n, argarray, false);
10469 else
10470 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10471 }
10472
10473 return NULL_TREE;
10474 }
10475
10476 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10477 along with N new arguments specified as the "..." parameters. SKIP
10478 is the number of arguments in EXP to be omitted. This function is used
10479 to do varargs-to-varargs transformations. */
10480
10481 static tree
10482 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10483 {
10484 va_list ap;
10485 tree t;
10486
10487 va_start (ap, n);
10488 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10489 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10490 va_end (ap);
10491
10492 return t;
10493 }
10494
10495 /* Validate a single argument ARG against a tree code CODE representing
10496 a type. Return true when argument is valid. */
10497
10498 static bool
10499 validate_arg (const_tree arg, enum tree_code code)
10500 {
10501 if (!arg)
10502 return false;
10503 else if (code == POINTER_TYPE)
10504 return POINTER_TYPE_P (TREE_TYPE (arg));
10505 else if (code == INTEGER_TYPE)
10506 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10507 return code == TREE_CODE (TREE_TYPE (arg));
10508 }
10509
10510 /* This function validates the types of a function call argument list
10511 against a specified list of tree_codes. If the last specifier is a 0,
10512 that represents an ellipses, otherwise the last specifier must be a
10513 VOID_TYPE.
10514
10515 This is the GIMPLE version of validate_arglist. Eventually we want to
10516 completely convert builtins.c to work from GIMPLEs and the tree based
10517 validate_arglist will then be removed. */
10518
10519 bool
10520 validate_gimple_arglist (const gcall *call, ...)
10521 {
10522 enum tree_code code;
10523 bool res = 0;
10524 va_list ap;
10525 const_tree arg;
10526 size_t i;
10527
10528 va_start (ap, call);
10529 i = 0;
10530
10531 do
10532 {
10533 code = (enum tree_code) va_arg (ap, int);
10534 switch (code)
10535 {
10536 case 0:
10537 /* This signifies an ellipses, any further arguments are all ok. */
10538 res = true;
10539 goto end;
10540 case VOID_TYPE:
10541 /* This signifies an endlink, if no arguments remain, return
10542 true, otherwise return false. */
10543 res = (i == gimple_call_num_args (call));
10544 goto end;
10545 default:
10546 /* If no parameters remain or the parameter's code does not
10547 match the specified code, return false. Otherwise continue
10548 checking any remaining arguments. */
10549 arg = gimple_call_arg (call, i++);
10550 if (!validate_arg (arg, code))
10551 goto end;
10552 break;
10553 }
10554 }
10555 while (1);
10556
10557 /* We need gotos here since we can only have one VA_CLOSE in a
10558 function. */
10559 end: ;
10560 va_end (ap);
10561
10562 return res;
10563 }
10564
10565 /* Default target-specific builtin expander that does nothing. */
10566
10567 rtx
10568 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10569 rtx target ATTRIBUTE_UNUSED,
10570 rtx subtarget ATTRIBUTE_UNUSED,
10571 machine_mode mode ATTRIBUTE_UNUSED,
10572 int ignore ATTRIBUTE_UNUSED)
10573 {
10574 return NULL_RTX;
10575 }
10576
10577 /* Returns true is EXP represents data that would potentially reside
10578 in a readonly section. */
10579
10580 bool
10581 readonly_data_expr (tree exp)
10582 {
10583 STRIP_NOPS (exp);
10584
10585 if (TREE_CODE (exp) != ADDR_EXPR)
10586 return false;
10587
10588 exp = get_base_address (TREE_OPERAND (exp, 0));
10589 if (!exp)
10590 return false;
10591
10592 /* Make sure we call decl_readonly_section only for trees it
10593 can handle (since it returns true for everything it doesn't
10594 understand). */
10595 if (TREE_CODE (exp) == STRING_CST
10596 || TREE_CODE (exp) == CONSTRUCTOR
10597 || (VAR_P (exp) && TREE_STATIC (exp)))
10598 return decl_readonly_section (exp, 0);
10599 else
10600 return false;
10601 }
10602
10603 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10604 to the call, and TYPE is its return type.
10605
10606 Return NULL_TREE if no simplification was possible, otherwise return the
10607 simplified form of the call as a tree.
10608
10609 The simplified form may be a constant or other expression which
10610 computes the same value, but in a more efficient manner (including
10611 calls to other builtin functions).
10612
10613 The call may contain arguments which need to be evaluated, but
10614 which are not useful to determine the result of the call. In
10615 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10616 COMPOUND_EXPR will be an argument which must be evaluated.
10617 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10618 COMPOUND_EXPR in the chain will contain the tree for the simplified
10619 form of the builtin function call. */
10620
10621 static tree
10622 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10623 {
10624 if (!validate_arg (s1, POINTER_TYPE)
10625 || !validate_arg (s2, POINTER_TYPE))
10626 return NULL_TREE;
10627
10628 if (!check_nul_terminated_array (expr, s1)
10629 || !check_nul_terminated_array (expr, s2))
10630 return NULL_TREE;
10631
10632 tree fn;
10633 const char *p1, *p2;
10634
10635 p2 = c_getstr (s2);
10636 if (p2 == NULL)
10637 return NULL_TREE;
10638
10639 p1 = c_getstr (s1);
10640 if (p1 != NULL)
10641 {
10642 const char *r = strpbrk (p1, p2);
10643 tree tem;
10644
10645 if (r == NULL)
10646 return build_int_cst (TREE_TYPE (s1), 0);
10647
10648 /* Return an offset into the constant string argument. */
10649 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10650 return fold_convert_loc (loc, type, tem);
10651 }
10652
10653 if (p2[0] == '\0')
10654 /* strpbrk(x, "") == NULL.
10655 Evaluate and ignore s1 in case it had side-effects. */
10656 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10657
10658 if (p2[1] != '\0')
10659 return NULL_TREE; /* Really call strpbrk. */
10660
10661 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10662 if (!fn)
10663 return NULL_TREE;
10664
10665 /* New argument list transforming strpbrk(s1, s2) to
10666 strchr(s1, s2[0]). */
10667 return build_call_expr_loc (loc, fn, 2, s1,
10668 build_int_cst (integer_type_node, p2[0]));
10669 }
10670
10671 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10672 to the call.
10673
10674 Return NULL_TREE if no simplification was possible, otherwise return the
10675 simplified form of the call as a tree.
10676
10677 The simplified form may be a constant or other expression which
10678 computes the same value, but in a more efficient manner (including
10679 calls to other builtin functions).
10680
10681 The call may contain arguments which need to be evaluated, but
10682 which are not useful to determine the result of the call. In
10683 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10684 COMPOUND_EXPR will be an argument which must be evaluated.
10685 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10686 COMPOUND_EXPR in the chain will contain the tree for the simplified
10687 form of the builtin function call. */
10688
10689 static tree
10690 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10691 {
10692 if (!validate_arg (s1, POINTER_TYPE)
10693 || !validate_arg (s2, POINTER_TYPE))
10694 return NULL_TREE;
10695
10696 if (!check_nul_terminated_array (expr, s1)
10697 || !check_nul_terminated_array (expr, s2))
10698 return NULL_TREE;
10699
10700 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10701
10702 /* If either argument is "", return NULL_TREE. */
10703 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10704 /* Evaluate and ignore both arguments in case either one has
10705 side-effects. */
10706 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10707 s1, s2);
10708 return NULL_TREE;
10709 }
10710
10711 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10712 to the call.
10713
10714 Return NULL_TREE if no simplification was possible, otherwise return the
10715 simplified form of the call as a tree.
10716
10717 The simplified form may be a constant or other expression which
10718 computes the same value, but in a more efficient manner (including
10719 calls to other builtin functions).
10720
10721 The call may contain arguments which need to be evaluated, but
10722 which are not useful to determine the result of the call. In
10723 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10724 COMPOUND_EXPR will be an argument which must be evaluated.
10725 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10726 COMPOUND_EXPR in the chain will contain the tree for the simplified
10727 form of the builtin function call. */
10728
10729 static tree
10730 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10731 {
10732 if (!validate_arg (s1, POINTER_TYPE)
10733 || !validate_arg (s2, POINTER_TYPE))
10734 return NULL_TREE;
10735
10736 if (!check_nul_terminated_array (expr, s1)
10737 || !check_nul_terminated_array (expr, s2))
10738 return NULL_TREE;
10739
10740 /* If the first argument is "", return NULL_TREE. */
10741 const char *p1 = c_getstr (s1);
10742 if (p1 && *p1 == '\0')
10743 {
10744 /* Evaluate and ignore argument s2 in case it has
10745 side-effects. */
10746 return omit_one_operand_loc (loc, size_type_node,
10747 size_zero_node, s2);
10748 }
10749
10750 /* If the second argument is "", return __builtin_strlen(s1). */
10751 const char *p2 = c_getstr (s2);
10752 if (p2 && *p2 == '\0')
10753 {
10754 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10755
10756 /* If the replacement _DECL isn't initialized, don't do the
10757 transformation. */
10758 if (!fn)
10759 return NULL_TREE;
10760
10761 return build_call_expr_loc (loc, fn, 1, s1);
10762 }
10763 return NULL_TREE;
10764 }
10765
10766 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10767 produced. False otherwise. This is done so that we don't output the error
10768 or warning twice or three times. */
10769
10770 bool
10771 fold_builtin_next_arg (tree exp, bool va_start_p)
10772 {
10773 tree fntype = TREE_TYPE (current_function_decl);
10774 int nargs = call_expr_nargs (exp);
10775 tree arg;
10776 /* There is good chance the current input_location points inside the
10777 definition of the va_start macro (perhaps on the token for
10778 builtin) in a system header, so warnings will not be emitted.
10779 Use the location in real source code. */
10780 location_t current_location =
10781 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10782 NULL);
10783
10784 if (!stdarg_p (fntype))
10785 {
10786 error ("%<va_start%> used in function with fixed arguments");
10787 return true;
10788 }
10789
10790 if (va_start_p)
10791 {
10792 if (va_start_p && (nargs != 2))
10793 {
10794 error ("wrong number of arguments to function %<va_start%>");
10795 return true;
10796 }
10797 arg = CALL_EXPR_ARG (exp, 1);
10798 }
10799 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10800 when we checked the arguments and if needed issued a warning. */
10801 else
10802 {
10803 if (nargs == 0)
10804 {
10805 /* Evidently an out of date version of <stdarg.h>; can't validate
10806 va_start's second argument, but can still work as intended. */
10807 warning_at (current_location,
10808 OPT_Wvarargs,
10809 "%<__builtin_next_arg%> called without an argument");
10810 return true;
10811 }
10812 else if (nargs > 1)
10813 {
10814 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10815 return true;
10816 }
10817 arg = CALL_EXPR_ARG (exp, 0);
10818 }
10819
10820 if (TREE_CODE (arg) == SSA_NAME)
10821 arg = SSA_NAME_VAR (arg);
10822
10823 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10824 or __builtin_next_arg (0) the first time we see it, after checking
10825 the arguments and if needed issuing a warning. */
10826 if (!integer_zerop (arg))
10827 {
10828 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10829
10830 /* Strip off all nops for the sake of the comparison. This
10831 is not quite the same as STRIP_NOPS. It does more.
10832 We must also strip off INDIRECT_EXPR for C++ reference
10833 parameters. */
10834 while (CONVERT_EXPR_P (arg)
10835 || TREE_CODE (arg) == INDIRECT_REF)
10836 arg = TREE_OPERAND (arg, 0);
10837 if (arg != last_parm)
10838 {
10839 /* FIXME: Sometimes with the tree optimizers we can get the
10840 not the last argument even though the user used the last
10841 argument. We just warn and set the arg to be the last
10842 argument so that we will get wrong-code because of
10843 it. */
10844 warning_at (current_location,
10845 OPT_Wvarargs,
10846 "second parameter of %<va_start%> not last named argument");
10847 }
10848
10849 /* Undefined by C99 7.15.1.4p4 (va_start):
10850 "If the parameter parmN is declared with the register storage
10851 class, with a function or array type, or with a type that is
10852 not compatible with the type that results after application of
10853 the default argument promotions, the behavior is undefined."
10854 */
10855 else if (DECL_REGISTER (arg))
10856 {
10857 warning_at (current_location,
10858 OPT_Wvarargs,
10859 "undefined behavior when second parameter of "
10860 "%<va_start%> is declared with %<register%> storage");
10861 }
10862
10863 /* We want to verify the second parameter just once before the tree
10864 optimizers are run and then avoid keeping it in the tree,
10865 as otherwise we could warn even for correct code like:
10866 void foo (int i, ...)
10867 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10868 if (va_start_p)
10869 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10870 else
10871 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10872 }
10873 return false;
10874 }
10875
10876
10877 /* Expand a call EXP to __builtin_object_size. */
10878
10879 static rtx
10880 expand_builtin_object_size (tree exp)
10881 {
10882 tree ost;
10883 int object_size_type;
10884 tree fndecl = get_callee_fndecl (exp);
10885
10886 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10887 {
10888 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10889 exp, fndecl);
10890 expand_builtin_trap ();
10891 return const0_rtx;
10892 }
10893
10894 ost = CALL_EXPR_ARG (exp, 1);
10895 STRIP_NOPS (ost);
10896
10897 if (TREE_CODE (ost) != INTEGER_CST
10898 || tree_int_cst_sgn (ost) < 0
10899 || compare_tree_int (ost, 3) > 0)
10900 {
10901 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10902 exp, fndecl);
10903 expand_builtin_trap ();
10904 return const0_rtx;
10905 }
10906
10907 object_size_type = tree_to_shwi (ost);
10908
10909 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10910 }
10911
10912 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10913 FCODE is the BUILT_IN_* to use.
10914 Return NULL_RTX if we failed; the caller should emit a normal call,
10915 otherwise try to get the result in TARGET, if convenient (and in
10916 mode MODE if that's convenient). */
10917
10918 static rtx
10919 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10920 enum built_in_function fcode)
10921 {
10922 if (!validate_arglist (exp,
10923 POINTER_TYPE,
10924 fcode == BUILT_IN_MEMSET_CHK
10925 ? INTEGER_TYPE : POINTER_TYPE,
10926 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10927 return NULL_RTX;
10928
10929 tree dest = CALL_EXPR_ARG (exp, 0);
10930 tree src = CALL_EXPR_ARG (exp, 1);
10931 tree len = CALL_EXPR_ARG (exp, 2);
10932 tree size = CALL_EXPR_ARG (exp, 3);
10933
10934 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10935 /*str=*/NULL_TREE, size);
10936
10937 if (!tree_fits_uhwi_p (size))
10938 return NULL_RTX;
10939
10940 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10941 {
10942 /* Avoid transforming the checking call to an ordinary one when
10943 an overflow has been detected or when the call couldn't be
10944 validated because the size is not constant. */
10945 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10946 return NULL_RTX;
10947
10948 tree fn = NULL_TREE;
10949 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10950 mem{cpy,pcpy,move,set} is available. */
10951 switch (fcode)
10952 {
10953 case BUILT_IN_MEMCPY_CHK:
10954 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10955 break;
10956 case BUILT_IN_MEMPCPY_CHK:
10957 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10958 break;
10959 case BUILT_IN_MEMMOVE_CHK:
10960 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10961 break;
10962 case BUILT_IN_MEMSET_CHK:
10963 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10964 break;
10965 default:
10966 break;
10967 }
10968
10969 if (! fn)
10970 return NULL_RTX;
10971
10972 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10973 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10974 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10975 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10976 }
10977 else if (fcode == BUILT_IN_MEMSET_CHK)
10978 return NULL_RTX;
10979 else
10980 {
10981 unsigned int dest_align = get_pointer_alignment (dest);
10982
10983 /* If DEST is not a pointer type, call the normal function. */
10984 if (dest_align == 0)
10985 return NULL_RTX;
10986
10987 /* If SRC and DEST are the same (and not volatile), do nothing. */
10988 if (operand_equal_p (src, dest, 0))
10989 {
10990 tree expr;
10991
10992 if (fcode != BUILT_IN_MEMPCPY_CHK)
10993 {
10994 /* Evaluate and ignore LEN in case it has side-effects. */
10995 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10996 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10997 }
10998
10999 expr = fold_build_pointer_plus (dest, len);
11000 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11001 }
11002
11003 /* __memmove_chk special case. */
11004 if (fcode == BUILT_IN_MEMMOVE_CHK)
11005 {
11006 unsigned int src_align = get_pointer_alignment (src);
11007
11008 if (src_align == 0)
11009 return NULL_RTX;
11010
11011 /* If src is categorized for a readonly section we can use
11012 normal __memcpy_chk. */
11013 if (readonly_data_expr (src))
11014 {
11015 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11016 if (!fn)
11017 return NULL_RTX;
11018 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11019 dest, src, len, size);
11020 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11021 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11022 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11023 }
11024 }
11025 return NULL_RTX;
11026 }
11027 }
11028
11029 /* Emit warning if a buffer overflow is detected at compile time. */
11030
11031 static void
11032 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11033 {
11034 /* The source string. */
11035 tree srcstr = NULL_TREE;
11036 /* The size of the destination object. */
11037 tree objsize = NULL_TREE;
11038 /* The string that is being concatenated with (as in __strcat_chk)
11039 or null if it isn't. */
11040 tree catstr = NULL_TREE;
11041 /* The maximum length of the source sequence in a bounded operation
11042 (such as __strncat_chk) or null if the operation isn't bounded
11043 (such as __strcat_chk). */
11044 tree maxread = NULL_TREE;
11045 /* The exact size of the access (such as in __strncpy_chk). */
11046 tree size = NULL_TREE;
11047
11048 switch (fcode)
11049 {
11050 case BUILT_IN_STRCPY_CHK:
11051 case BUILT_IN_STPCPY_CHK:
11052 srcstr = CALL_EXPR_ARG (exp, 1);
11053 objsize = CALL_EXPR_ARG (exp, 2);
11054 break;
11055
11056 case BUILT_IN_STRCAT_CHK:
11057 /* For __strcat_chk the warning will be emitted only if overflowing
11058 by at least strlen (dest) + 1 bytes. */
11059 catstr = CALL_EXPR_ARG (exp, 0);
11060 srcstr = CALL_EXPR_ARG (exp, 1);
11061 objsize = CALL_EXPR_ARG (exp, 2);
11062 break;
11063
11064 case BUILT_IN_STRNCAT_CHK:
11065 catstr = CALL_EXPR_ARG (exp, 0);
11066 srcstr = CALL_EXPR_ARG (exp, 1);
11067 maxread = CALL_EXPR_ARG (exp, 2);
11068 objsize = CALL_EXPR_ARG (exp, 3);
11069 break;
11070
11071 case BUILT_IN_STRNCPY_CHK:
11072 case BUILT_IN_STPNCPY_CHK:
11073 srcstr = CALL_EXPR_ARG (exp, 1);
11074 size = CALL_EXPR_ARG (exp, 2);
11075 objsize = CALL_EXPR_ARG (exp, 3);
11076 break;
11077
11078 case BUILT_IN_SNPRINTF_CHK:
11079 case BUILT_IN_VSNPRINTF_CHK:
11080 maxread = CALL_EXPR_ARG (exp, 1);
11081 objsize = CALL_EXPR_ARG (exp, 3);
11082 break;
11083 default:
11084 gcc_unreachable ();
11085 }
11086
11087 if (catstr && maxread)
11088 {
11089 /* Check __strncat_chk. There is no way to determine the length
11090 of the string to which the source string is being appended so
11091 just warn when the length of the source string is not known. */
11092 check_strncat_sizes (exp, objsize);
11093 return;
11094 }
11095
11096 /* The destination argument is the first one for all built-ins above. */
11097 tree dst = CALL_EXPR_ARG (exp, 0);
11098
11099 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11100 }
11101
11102 /* Emit warning if a buffer overflow is detected at compile time
11103 in __sprintf_chk/__vsprintf_chk calls. */
11104
11105 static void
11106 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11107 {
11108 tree size, len, fmt;
11109 const char *fmt_str;
11110 int nargs = call_expr_nargs (exp);
11111
11112 /* Verify the required arguments in the original call. */
11113
11114 if (nargs < 4)
11115 return;
11116 size = CALL_EXPR_ARG (exp, 2);
11117 fmt = CALL_EXPR_ARG (exp, 3);
11118
11119 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11120 return;
11121
11122 /* Check whether the format is a literal string constant. */
11123 fmt_str = c_getstr (fmt);
11124 if (fmt_str == NULL)
11125 return;
11126
11127 if (!init_target_chars ())
11128 return;
11129
11130 /* If the format doesn't contain % args or %%, we know its size. */
11131 if (strchr (fmt_str, target_percent) == 0)
11132 len = build_int_cstu (size_type_node, strlen (fmt_str));
11133 /* If the format is "%s" and first ... argument is a string literal,
11134 we know it too. */
11135 else if (fcode == BUILT_IN_SPRINTF_CHK
11136 && strcmp (fmt_str, target_percent_s) == 0)
11137 {
11138 tree arg;
11139
11140 if (nargs < 5)
11141 return;
11142 arg = CALL_EXPR_ARG (exp, 4);
11143 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11144 return;
11145
11146 len = c_strlen (arg, 1);
11147 if (!len || ! tree_fits_uhwi_p (len))
11148 return;
11149 }
11150 else
11151 return;
11152
11153 /* Add one for the terminating nul. */
11154 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11155
11156 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11157 /*maxread=*/NULL_TREE, len, size);
11158 }
11159
11160 /* Emit warning if a free is called with address of a variable. */
11161
11162 static void
11163 maybe_emit_free_warning (tree exp)
11164 {
11165 if (call_expr_nargs (exp) != 1)
11166 return;
11167
11168 tree arg = CALL_EXPR_ARG (exp, 0);
11169
11170 STRIP_NOPS (arg);
11171 if (TREE_CODE (arg) != ADDR_EXPR)
11172 return;
11173
11174 arg = get_base_address (TREE_OPERAND (arg, 0));
11175 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11176 return;
11177
11178 if (SSA_VAR_P (arg))
11179 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11180 "%Kattempt to free a non-heap object %qD", exp, arg);
11181 else
11182 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11183 "%Kattempt to free a non-heap object", exp);
11184 }
11185
11186 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11187 if possible. */
11188
11189 static tree
11190 fold_builtin_object_size (tree ptr, tree ost)
11191 {
11192 unsigned HOST_WIDE_INT bytes;
11193 int object_size_type;
11194
11195 if (!validate_arg (ptr, POINTER_TYPE)
11196 || !validate_arg (ost, INTEGER_TYPE))
11197 return NULL_TREE;
11198
11199 STRIP_NOPS (ost);
11200
11201 if (TREE_CODE (ost) != INTEGER_CST
11202 || tree_int_cst_sgn (ost) < 0
11203 || compare_tree_int (ost, 3) > 0)
11204 return NULL_TREE;
11205
11206 object_size_type = tree_to_shwi (ost);
11207
11208 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11209 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11210 and (size_t) 0 for types 2 and 3. */
11211 if (TREE_SIDE_EFFECTS (ptr))
11212 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11213
11214 if (TREE_CODE (ptr) == ADDR_EXPR)
11215 {
11216 compute_builtin_object_size (ptr, object_size_type, &bytes);
11217 if (wi::fits_to_tree_p (bytes, size_type_node))
11218 return build_int_cstu (size_type_node, bytes);
11219 }
11220 else if (TREE_CODE (ptr) == SSA_NAME)
11221 {
11222 /* If object size is not known yet, delay folding until
11223 later. Maybe subsequent passes will help determining
11224 it. */
11225 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11226 && wi::fits_to_tree_p (bytes, size_type_node))
11227 return build_int_cstu (size_type_node, bytes);
11228 }
11229
11230 return NULL_TREE;
11231 }
11232
11233 /* Builtins with folding operations that operate on "..." arguments
11234 need special handling; we need to store the arguments in a convenient
11235 data structure before attempting any folding. Fortunately there are
11236 only a few builtins that fall into this category. FNDECL is the
11237 function, EXP is the CALL_EXPR for the call. */
11238
11239 static tree
11240 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11241 {
11242 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11243 tree ret = NULL_TREE;
11244
11245 switch (fcode)
11246 {
11247 case BUILT_IN_FPCLASSIFY:
11248 ret = fold_builtin_fpclassify (loc, args, nargs);
11249 break;
11250
11251 default:
11252 break;
11253 }
11254 if (ret)
11255 {
11256 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11257 SET_EXPR_LOCATION (ret, loc);
11258 TREE_NO_WARNING (ret) = 1;
11259 return ret;
11260 }
11261 return NULL_TREE;
11262 }
11263
11264 /* Initialize format string characters in the target charset. */
11265
11266 bool
11267 init_target_chars (void)
11268 {
11269 static bool init;
11270 if (!init)
11271 {
11272 target_newline = lang_hooks.to_target_charset ('\n');
11273 target_percent = lang_hooks.to_target_charset ('%');
11274 target_c = lang_hooks.to_target_charset ('c');
11275 target_s = lang_hooks.to_target_charset ('s');
11276 if (target_newline == 0 || target_percent == 0 || target_c == 0
11277 || target_s == 0)
11278 return false;
11279
11280 target_percent_c[0] = target_percent;
11281 target_percent_c[1] = target_c;
11282 target_percent_c[2] = '\0';
11283
11284 target_percent_s[0] = target_percent;
11285 target_percent_s[1] = target_s;
11286 target_percent_s[2] = '\0';
11287
11288 target_percent_s_newline[0] = target_percent;
11289 target_percent_s_newline[1] = target_s;
11290 target_percent_s_newline[2] = target_newline;
11291 target_percent_s_newline[3] = '\0';
11292
11293 init = true;
11294 }
11295 return true;
11296 }
11297
11298 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11299 and no overflow/underflow occurred. INEXACT is true if M was not
11300 exactly calculated. TYPE is the tree type for the result. This
11301 function assumes that you cleared the MPFR flags and then
11302 calculated M to see if anything subsequently set a flag prior to
11303 entering this function. Return NULL_TREE if any checks fail. */
11304
11305 static tree
11306 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11307 {
11308 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11309 overflow/underflow occurred. If -frounding-math, proceed iff the
11310 result of calling FUNC was exact. */
11311 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11312 && (!flag_rounding_math || !inexact))
11313 {
11314 REAL_VALUE_TYPE rr;
11315
11316 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11317 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11318 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11319 but the mpft_t is not, then we underflowed in the
11320 conversion. */
11321 if (real_isfinite (&rr)
11322 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11323 {
11324 REAL_VALUE_TYPE rmode;
11325
11326 real_convert (&rmode, TYPE_MODE (type), &rr);
11327 /* Proceed iff the specified mode can hold the value. */
11328 if (real_identical (&rmode, &rr))
11329 return build_real (type, rmode);
11330 }
11331 }
11332 return NULL_TREE;
11333 }
11334
11335 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11336 number and no overflow/underflow occurred. INEXACT is true if M
11337 was not exactly calculated. TYPE is the tree type for the result.
11338 This function assumes that you cleared the MPFR flags and then
11339 calculated M to see if anything subsequently set a flag prior to
11340 entering this function. Return NULL_TREE if any checks fail, if
11341 FORCE_CONVERT is true, then bypass the checks. */
11342
11343 static tree
11344 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11345 {
11346 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11347 overflow/underflow occurred. If -frounding-math, proceed iff the
11348 result of calling FUNC was exact. */
11349 if (force_convert
11350 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11351 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11352 && (!flag_rounding_math || !inexact)))
11353 {
11354 REAL_VALUE_TYPE re, im;
11355
11356 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11357 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11358 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11359 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11360 but the mpft_t is not, then we underflowed in the
11361 conversion. */
11362 if (force_convert
11363 || (real_isfinite (&re) && real_isfinite (&im)
11364 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11365 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11366 {
11367 REAL_VALUE_TYPE re_mode, im_mode;
11368
11369 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11370 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11371 /* Proceed iff the specified mode can hold the value. */
11372 if (force_convert
11373 || (real_identical (&re_mode, &re)
11374 && real_identical (&im_mode, &im)))
11375 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11376 build_real (TREE_TYPE (type), im_mode));
11377 }
11378 }
11379 return NULL_TREE;
11380 }
11381
11382 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11383 the pointer *(ARG_QUO) and return the result. The type is taken
11384 from the type of ARG0 and is used for setting the precision of the
11385 calculation and results. */
11386
11387 static tree
11388 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11389 {
11390 tree const type = TREE_TYPE (arg0);
11391 tree result = NULL_TREE;
11392
11393 STRIP_NOPS (arg0);
11394 STRIP_NOPS (arg1);
11395
11396 /* To proceed, MPFR must exactly represent the target floating point
11397 format, which only happens when the target base equals two. */
11398 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11399 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11400 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11401 {
11402 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11403 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11404
11405 if (real_isfinite (ra0) && real_isfinite (ra1))
11406 {
11407 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11408 const int prec = fmt->p;
11409 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11410 tree result_rem;
11411 long integer_quo;
11412 mpfr_t m0, m1;
11413
11414 mpfr_inits2 (prec, m0, m1, NULL);
11415 mpfr_from_real (m0, ra0, MPFR_RNDN);
11416 mpfr_from_real (m1, ra1, MPFR_RNDN);
11417 mpfr_clear_flags ();
11418 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11419 /* Remquo is independent of the rounding mode, so pass
11420 inexact=0 to do_mpfr_ckconv(). */
11421 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11422 mpfr_clears (m0, m1, NULL);
11423 if (result_rem)
11424 {
11425 /* MPFR calculates quo in the host's long so it may
11426 return more bits in quo than the target int can hold
11427 if sizeof(host long) > sizeof(target int). This can
11428 happen even for native compilers in LP64 mode. In
11429 these cases, modulo the quo value with the largest
11430 number that the target int can hold while leaving one
11431 bit for the sign. */
11432 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11433 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11434
11435 /* Dereference the quo pointer argument. */
11436 arg_quo = build_fold_indirect_ref (arg_quo);
11437 /* Proceed iff a valid pointer type was passed in. */
11438 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11439 {
11440 /* Set the value. */
11441 tree result_quo
11442 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11443 build_int_cst (TREE_TYPE (arg_quo),
11444 integer_quo));
11445 TREE_SIDE_EFFECTS (result_quo) = 1;
11446 /* Combine the quo assignment with the rem. */
11447 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11448 result_quo, result_rem));
11449 }
11450 }
11451 }
11452 }
11453 return result;
11454 }
11455
11456 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11457 resulting value as a tree with type TYPE. The mpfr precision is
11458 set to the precision of TYPE. We assume that this mpfr function
11459 returns zero if the result could be calculated exactly within the
11460 requested precision. In addition, the integer pointer represented
11461 by ARG_SG will be dereferenced and set to the appropriate signgam
11462 (-1,1) value. */
11463
11464 static tree
11465 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11466 {
11467 tree result = NULL_TREE;
11468
11469 STRIP_NOPS (arg);
11470
11471 /* To proceed, MPFR must exactly represent the target floating point
11472 format, which only happens when the target base equals two. Also
11473 verify ARG is a constant and that ARG_SG is an int pointer. */
11474 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11475 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11476 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11477 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11478 {
11479 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11480
11481 /* In addition to NaN and Inf, the argument cannot be zero or a
11482 negative integer. */
11483 if (real_isfinite (ra)
11484 && ra->cl != rvc_zero
11485 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11486 {
11487 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11488 const int prec = fmt->p;
11489 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11490 int inexact, sg;
11491 mpfr_t m;
11492 tree result_lg;
11493
11494 mpfr_init2 (m, prec);
11495 mpfr_from_real (m, ra, MPFR_RNDN);
11496 mpfr_clear_flags ();
11497 inexact = mpfr_lgamma (m, &sg, m, rnd);
11498 result_lg = do_mpfr_ckconv (m, type, inexact);
11499 mpfr_clear (m);
11500 if (result_lg)
11501 {
11502 tree result_sg;
11503
11504 /* Dereference the arg_sg pointer argument. */
11505 arg_sg = build_fold_indirect_ref (arg_sg);
11506 /* Assign the signgam value into *arg_sg. */
11507 result_sg = fold_build2 (MODIFY_EXPR,
11508 TREE_TYPE (arg_sg), arg_sg,
11509 build_int_cst (TREE_TYPE (arg_sg), sg));
11510 TREE_SIDE_EFFECTS (result_sg) = 1;
11511 /* Combine the signgam assignment with the lgamma result. */
11512 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11513 result_sg, result_lg));
11514 }
11515 }
11516 }
11517
11518 return result;
11519 }
11520
11521 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11522 mpc function FUNC on it and return the resulting value as a tree
11523 with type TYPE. The mpfr precision is set to the precision of
11524 TYPE. We assume that function FUNC returns zero if the result
11525 could be calculated exactly within the requested precision. If
11526 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11527 in the arguments and/or results. */
11528
11529 tree
11530 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11531 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11532 {
11533 tree result = NULL_TREE;
11534
11535 STRIP_NOPS (arg0);
11536 STRIP_NOPS (arg1);
11537
11538 /* To proceed, MPFR must exactly represent the target floating point
11539 format, which only happens when the target base equals two. */
11540 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11542 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11543 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11544 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11545 {
11546 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11547 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11548 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11549 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11550
11551 if (do_nonfinite
11552 || (real_isfinite (re0) && real_isfinite (im0)
11553 && real_isfinite (re1) && real_isfinite (im1)))
11554 {
11555 const struct real_format *const fmt =
11556 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11557 const int prec = fmt->p;
11558 const mpfr_rnd_t rnd = fmt->round_towards_zero
11559 ? MPFR_RNDZ : MPFR_RNDN;
11560 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11561 int inexact;
11562 mpc_t m0, m1;
11563
11564 mpc_init2 (m0, prec);
11565 mpc_init2 (m1, prec);
11566 mpfr_from_real (mpc_realref (m0), re0, rnd);
11567 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11568 mpfr_from_real (mpc_realref (m1), re1, rnd);
11569 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11570 mpfr_clear_flags ();
11571 inexact = func (m0, m0, m1, crnd);
11572 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11573 mpc_clear (m0);
11574 mpc_clear (m1);
11575 }
11576 }
11577
11578 return result;
11579 }
11580
11581 /* A wrapper function for builtin folding that prevents warnings for
11582 "statement without effect" and the like, caused by removing the
11583 call node earlier than the warning is generated. */
11584
11585 tree
11586 fold_call_stmt (gcall *stmt, bool ignore)
11587 {
11588 tree ret = NULL_TREE;
11589 tree fndecl = gimple_call_fndecl (stmt);
11590 location_t loc = gimple_location (stmt);
11591 if (fndecl && fndecl_built_in_p (fndecl)
11592 && !gimple_call_va_arg_pack_p (stmt))
11593 {
11594 int nargs = gimple_call_num_args (stmt);
11595 tree *args = (nargs > 0
11596 ? gimple_call_arg_ptr (stmt, 0)
11597 : &error_mark_node);
11598
11599 if (avoid_folding_inline_builtin (fndecl))
11600 return NULL_TREE;
11601 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11602 {
11603 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11604 }
11605 else
11606 {
11607 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11608 if (ret)
11609 {
11610 /* Propagate location information from original call to
11611 expansion of builtin. Otherwise things like
11612 maybe_emit_chk_warning, that operate on the expansion
11613 of a builtin, will use the wrong location information. */
11614 if (gimple_has_location (stmt))
11615 {
11616 tree realret = ret;
11617 if (TREE_CODE (ret) == NOP_EXPR)
11618 realret = TREE_OPERAND (ret, 0);
11619 if (CAN_HAVE_LOCATION_P (realret)
11620 && !EXPR_HAS_LOCATION (realret))
11621 SET_EXPR_LOCATION (realret, loc);
11622 return realret;
11623 }
11624 return ret;
11625 }
11626 }
11627 }
11628 return NULL_TREE;
11629 }
11630
11631 /* Look up the function in builtin_decl that corresponds to DECL
11632 and set ASMSPEC as its user assembler name. DECL must be a
11633 function decl that declares a builtin. */
11634
11635 void
11636 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11637 {
11638 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11639 && asmspec != 0);
11640
11641 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11642 set_user_assembler_name (builtin, asmspec);
11643
11644 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11645 && INT_TYPE_SIZE < BITS_PER_WORD)
11646 {
11647 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11648 set_user_assembler_libfunc ("ffs", asmspec);
11649 set_optab_libfunc (ffs_optab, mode, "ffs");
11650 }
11651 }
11652
11653 /* Return true if DECL is a builtin that expands to a constant or similarly
11654 simple code. */
11655 bool
11656 is_simple_builtin (tree decl)
11657 {
11658 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11659 switch (DECL_FUNCTION_CODE (decl))
11660 {
11661 /* Builtins that expand to constants. */
11662 case BUILT_IN_CONSTANT_P:
11663 case BUILT_IN_EXPECT:
11664 case BUILT_IN_OBJECT_SIZE:
11665 case BUILT_IN_UNREACHABLE:
11666 /* Simple register moves or loads from stack. */
11667 case BUILT_IN_ASSUME_ALIGNED:
11668 case BUILT_IN_RETURN_ADDRESS:
11669 case BUILT_IN_EXTRACT_RETURN_ADDR:
11670 case BUILT_IN_FROB_RETURN_ADDR:
11671 case BUILT_IN_RETURN:
11672 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11673 case BUILT_IN_FRAME_ADDRESS:
11674 case BUILT_IN_VA_END:
11675 case BUILT_IN_STACK_SAVE:
11676 case BUILT_IN_STACK_RESTORE:
11677 /* Exception state returns or moves registers around. */
11678 case BUILT_IN_EH_FILTER:
11679 case BUILT_IN_EH_POINTER:
11680 case BUILT_IN_EH_COPY_VALUES:
11681 return true;
11682
11683 default:
11684 return false;
11685 }
11686
11687 return false;
11688 }
11689
11690 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11691 most probably expanded inline into reasonably simple code. This is a
11692 superset of is_simple_builtin. */
11693 bool
11694 is_inexpensive_builtin (tree decl)
11695 {
11696 if (!decl)
11697 return false;
11698 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11699 return true;
11700 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11701 switch (DECL_FUNCTION_CODE (decl))
11702 {
11703 case BUILT_IN_ABS:
11704 CASE_BUILT_IN_ALLOCA:
11705 case BUILT_IN_BSWAP16:
11706 case BUILT_IN_BSWAP32:
11707 case BUILT_IN_BSWAP64:
11708 case BUILT_IN_BSWAP128:
11709 case BUILT_IN_CLZ:
11710 case BUILT_IN_CLZIMAX:
11711 case BUILT_IN_CLZL:
11712 case BUILT_IN_CLZLL:
11713 case BUILT_IN_CTZ:
11714 case BUILT_IN_CTZIMAX:
11715 case BUILT_IN_CTZL:
11716 case BUILT_IN_CTZLL:
11717 case BUILT_IN_FFS:
11718 case BUILT_IN_FFSIMAX:
11719 case BUILT_IN_FFSL:
11720 case BUILT_IN_FFSLL:
11721 case BUILT_IN_IMAXABS:
11722 case BUILT_IN_FINITE:
11723 case BUILT_IN_FINITEF:
11724 case BUILT_IN_FINITEL:
11725 case BUILT_IN_FINITED32:
11726 case BUILT_IN_FINITED64:
11727 case BUILT_IN_FINITED128:
11728 case BUILT_IN_FPCLASSIFY:
11729 case BUILT_IN_ISFINITE:
11730 case BUILT_IN_ISINF_SIGN:
11731 case BUILT_IN_ISINF:
11732 case BUILT_IN_ISINFF:
11733 case BUILT_IN_ISINFL:
11734 case BUILT_IN_ISINFD32:
11735 case BUILT_IN_ISINFD64:
11736 case BUILT_IN_ISINFD128:
11737 case BUILT_IN_ISNAN:
11738 case BUILT_IN_ISNANF:
11739 case BUILT_IN_ISNANL:
11740 case BUILT_IN_ISNAND32:
11741 case BUILT_IN_ISNAND64:
11742 case BUILT_IN_ISNAND128:
11743 case BUILT_IN_ISNORMAL:
11744 case BUILT_IN_ISGREATER:
11745 case BUILT_IN_ISGREATEREQUAL:
11746 case BUILT_IN_ISLESS:
11747 case BUILT_IN_ISLESSEQUAL:
11748 case BUILT_IN_ISLESSGREATER:
11749 case BUILT_IN_ISUNORDERED:
11750 case BUILT_IN_VA_ARG_PACK:
11751 case BUILT_IN_VA_ARG_PACK_LEN:
11752 case BUILT_IN_VA_COPY:
11753 case BUILT_IN_TRAP:
11754 case BUILT_IN_SAVEREGS:
11755 case BUILT_IN_POPCOUNTL:
11756 case BUILT_IN_POPCOUNTLL:
11757 case BUILT_IN_POPCOUNTIMAX:
11758 case BUILT_IN_POPCOUNT:
11759 case BUILT_IN_PARITYL:
11760 case BUILT_IN_PARITYLL:
11761 case BUILT_IN_PARITYIMAX:
11762 case BUILT_IN_PARITY:
11763 case BUILT_IN_LABS:
11764 case BUILT_IN_LLABS:
11765 case BUILT_IN_PREFETCH:
11766 case BUILT_IN_ACC_ON_DEVICE:
11767 return true;
11768
11769 default:
11770 return is_simple_builtin (decl);
11771 }
11772
11773 return false;
11774 }
11775
11776 /* Return true if T is a constant and the value cast to a target char
11777 can be represented by a host char.
11778 Store the casted char constant in *P if so. */
11779
11780 bool
11781 target_char_cst_p (tree t, char *p)
11782 {
11783 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11784 return false;
11785
11786 *p = (char)tree_to_uhwi (t);
11787 return true;
11788 }
11789
11790 /* Return true if the builtin DECL is implemented in a standard library.
11791 Otherwise returns false which doesn't guarantee it is not (thus the list of
11792 handled builtins below may be incomplete). */
11793
11794 bool
11795 builtin_with_linkage_p (tree decl)
11796 {
11797 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11798 switch (DECL_FUNCTION_CODE (decl))
11799 {
11800 CASE_FLT_FN (BUILT_IN_ACOS):
11801 CASE_FLT_FN (BUILT_IN_ACOSH):
11802 CASE_FLT_FN (BUILT_IN_ASIN):
11803 CASE_FLT_FN (BUILT_IN_ASINH):
11804 CASE_FLT_FN (BUILT_IN_ATAN):
11805 CASE_FLT_FN (BUILT_IN_ATANH):
11806 CASE_FLT_FN (BUILT_IN_ATAN2):
11807 CASE_FLT_FN (BUILT_IN_CBRT):
11808 CASE_FLT_FN (BUILT_IN_CEIL):
11809 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11810 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11811 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11812 CASE_FLT_FN (BUILT_IN_COS):
11813 CASE_FLT_FN (BUILT_IN_COSH):
11814 CASE_FLT_FN (BUILT_IN_ERF):
11815 CASE_FLT_FN (BUILT_IN_ERFC):
11816 CASE_FLT_FN (BUILT_IN_EXP):
11817 CASE_FLT_FN (BUILT_IN_EXP2):
11818 CASE_FLT_FN (BUILT_IN_EXPM1):
11819 CASE_FLT_FN (BUILT_IN_FABS):
11820 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11821 CASE_FLT_FN (BUILT_IN_FDIM):
11822 CASE_FLT_FN (BUILT_IN_FLOOR):
11823 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11824 CASE_FLT_FN (BUILT_IN_FMA):
11825 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11826 CASE_FLT_FN (BUILT_IN_FMAX):
11827 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11828 CASE_FLT_FN (BUILT_IN_FMIN):
11829 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11830 CASE_FLT_FN (BUILT_IN_FMOD):
11831 CASE_FLT_FN (BUILT_IN_FREXP):
11832 CASE_FLT_FN (BUILT_IN_HYPOT):
11833 CASE_FLT_FN (BUILT_IN_ILOGB):
11834 CASE_FLT_FN (BUILT_IN_LDEXP):
11835 CASE_FLT_FN (BUILT_IN_LGAMMA):
11836 CASE_FLT_FN (BUILT_IN_LLRINT):
11837 CASE_FLT_FN (BUILT_IN_LLROUND):
11838 CASE_FLT_FN (BUILT_IN_LOG):
11839 CASE_FLT_FN (BUILT_IN_LOG10):
11840 CASE_FLT_FN (BUILT_IN_LOG1P):
11841 CASE_FLT_FN (BUILT_IN_LOG2):
11842 CASE_FLT_FN (BUILT_IN_LOGB):
11843 CASE_FLT_FN (BUILT_IN_LRINT):
11844 CASE_FLT_FN (BUILT_IN_LROUND):
11845 CASE_FLT_FN (BUILT_IN_MODF):
11846 CASE_FLT_FN (BUILT_IN_NAN):
11847 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11848 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11849 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11850 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11851 CASE_FLT_FN (BUILT_IN_POW):
11852 CASE_FLT_FN (BUILT_IN_REMAINDER):
11853 CASE_FLT_FN (BUILT_IN_REMQUO):
11854 CASE_FLT_FN (BUILT_IN_RINT):
11855 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11856 CASE_FLT_FN (BUILT_IN_ROUND):
11857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11858 CASE_FLT_FN (BUILT_IN_SCALBLN):
11859 CASE_FLT_FN (BUILT_IN_SCALBN):
11860 CASE_FLT_FN (BUILT_IN_SIN):
11861 CASE_FLT_FN (BUILT_IN_SINH):
11862 CASE_FLT_FN (BUILT_IN_SINCOS):
11863 CASE_FLT_FN (BUILT_IN_SQRT):
11864 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11865 CASE_FLT_FN (BUILT_IN_TAN):
11866 CASE_FLT_FN (BUILT_IN_TANH):
11867 CASE_FLT_FN (BUILT_IN_TGAMMA):
11868 CASE_FLT_FN (BUILT_IN_TRUNC):
11869 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11870 return true;
11871 default:
11872 break;
11873 }
11874 return false;
11875 }