/* Expand builtin functions.
- Copyright (C) 1988-2018 Free Software Foundation, Inc.
+ Copyright (C) 1988-2019 Free Software Foundation, Inc.
This file is part of GCC.
#include "memmodel.h"
#include "gimple.h"
#include "predict.h"
+#include "params.h"
#include "tm_p.h"
#include "stringpool.h"
#include "tree-vrp.h"
/* Non-zero if __builtin_constant_p should be folded right away. */
bool force_folding_builtin_constant_p;
-static rtx c_readstr (const char *, scalar_int_mode);
static int target_char_cast (tree, char *);
static rtx get_memory_rtx (tree, tree);
static int apply_args_size (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_va_end (tree);
static rtx expand_builtin_va_copy (tree);
+static rtx inline_expand_builtin_string_cmp (tree, rtx);
static rtx expand_builtin_strcmp (tree, rtx);
static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
static rtx expand_builtin_memchr (tree, rtx);
static rtx expand_builtin_memcpy (tree, rtx);
static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
- rtx target, tree exp, int endp);
+ rtx target, tree exp,
+ memop_ret retmode);
static rtx expand_builtin_memmove (tree, rtx);
static rtx expand_builtin_mempcpy (tree, rtx);
-static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
+static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
static rtx expand_builtin_strcat (tree, rtx);
static rtx expand_builtin_strcpy (tree, rtx);
-static rtx expand_builtin_strcpy_args (tree, tree, rtx);
+static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
static rtx expand_builtin_stpncpy (tree, rtx);
static rtx expand_builtin_strncat (tree, rtx);
static rtx expand_builtin_frame_address (tree, tree);
static tree stabilize_va_list_loc (location_t, tree, int);
static rtx expand_builtin_expect (tree, rtx);
+static rtx expand_builtin_expect_with_probability (tree, rtx);
static tree fold_builtin_constant_p (tree);
static tree fold_builtin_classify_type (tree);
static tree fold_builtin_strlen (location_t, tree, tree);
return false;
}
-
-/* Return true if DECL is a function symbol representing a built-in. */
-
-bool
-is_builtin_fn (tree decl)
-{
- return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
-}
-
/* Return true if NODE should be considered for inline expansion regardless
of the optimization level. This means whenever a function is invoked with
its "internal" name, which normally contains the prefix "__builtin". */
return align;
}
-/* Return the number of non-zero elements in the sequence
+/* Return the number of leading non-zero elements in the sequence
[ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
-static unsigned
+unsigned
string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
{
gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
return n;
}
+/* For a call at LOC to a function FN that expects a string in the argument
+ ARG, issue a diagnostic due to it being a called with an argument
+ declared at NONSTR that is a character array with no terminating NUL. */
+
+void
+warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
+{
+ if (TREE_NO_WARNING (arg))
+ return;
+
+ loc = expansion_point_location_if_in_system_header (loc);
+
+ if (warning_at (loc, OPT_Wstringop_overflow_,
+ "%qs argument missing terminating nul", fn))
+ {
+ inform (DECL_SOURCE_LOCATION (decl),
+ "referenced argument declared here");
+ TREE_NO_WARNING (arg) = 1;
+ }
+}
+
+/* If EXP refers to an unterminated constant character array return
+ the declaration of the object of which the array is a member or
+ element and if SIZE is not null, set *SIZE to the size of
+ the unterminated array and set *EXACT if the size is exact or
+ clear it otherwise. Otherwise return null. */
+
+tree
+unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
+{
+ /* C_STRLEN will return NULL and set DECL in the info
+ structure if EXP references a unterminated array. */
+ c_strlen_data lendata = { };
+ tree len = c_strlen (exp, 1, &lendata);
+ if (len == NULL_TREE && lendata.minlen && lendata.decl)
+ {
+ if (size)
+ {
+ len = lendata.minlen;
+ if (lendata.off)
+ {
+ /* Constant offsets are already accounted for in LENDATA.MINLEN,
+ but not in a SSA_NAME + CST expression. */
+ if (TREE_CODE (lendata.off) == INTEGER_CST)
+ *exact = true;
+ else if (TREE_CODE (lendata.off) == PLUS_EXPR
+ && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
+ {
+ /* Subtract the offset from the size of the array. */
+ *exact = false;
+ tree temp = TREE_OPERAND (lendata.off, 1);
+ temp = fold_convert (ssizetype, temp);
+ len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
+ }
+ else
+ *exact = false;
+ }
+ else
+ *exact = true;
+
+ *size = len;
+ }
+ return lendata.decl;
+ }
+
+ return NULL_TREE;
+}
+
/* Compute the length of a null-terminated character string or wide
character string handling character sizes of 1, 2, and 4 bytes.
TREE_STRING_LENGTH is not the right way because it evaluates to
accesses. Note that this implies the result is not going to be emitted
into the instruction stream.
- The value returned is of type `ssizetype'.
+ Additional information about the string accessed may be recorded
+ in DATA. For example, if SRC references an unterminated string,
+ then the declaration will be stored in the DECL field. If the
+ length of the unterminated string can be determined, it'll be
+ stored in the LEN field. Note this length could well be different
+ than what a C strlen call would return.
- Unfortunately, string_constant can't access the values of const char
- arrays with initializers, so neither can we do so here. */
+ ELTSIZE is 1 for normal single byte character strings, and 2 or
+ 4 for wide characer strings. ELTSIZE is by default 1.
+
+ The value returned is of type `ssizetype'. */
tree
-c_strlen (tree src, int only_value)
+c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
{
+ /* If we were not passed a DATA pointer, then get one to a local
+ structure. That avoids having to check DATA for NULL before
+ each time we want to use it. */
+ c_strlen_data local_strlen_data = { };
+ if (!data)
+ data = &local_strlen_data;
+
+ gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
STRIP_NOPS (src);
if (TREE_CODE (src) == COND_EXPR
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
{
tree len1, len2;
- len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
- len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
+ len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
+ len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
if (tree_int_cst_equal (len1, len2))
return len1;
}
if (TREE_CODE (src) == COMPOUND_EXPR
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
- return c_strlen (TREE_OPERAND (src, 1), only_value);
+ return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
location_t loc = EXPR_LOC_OR_LOC (src, input_location);
/* Offset from the beginning of the string in bytes. */
tree byteoff;
- src = string_constant (src, &byteoff);
+ tree memsize;
+ tree decl;
+ src = string_constant (src, &byteoff, &memsize, &decl);
if (src == 0)
return NULL_TREE;
/* Determine the size of the string element. */
- unsigned eltsize
- = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
+ if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
+ return NULL_TREE;
/* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
- in case the latter is less than the size of the array. */
- HOST_WIDE_INT maxelts = TREE_STRING_LENGTH (src);
- tree type = TREE_TYPE (src);
- if (tree size = TYPE_SIZE_UNIT (type))
- if (tree_fits_shwi_p (size))
- maxelts = tree_to_uhwi (size);
+ in case the latter is less than the size of the array, such as when
+ SRC refers to a short string literal used to initialize a large array.
+ In that case, the elements of the array after the terminating NUL are
+ all NUL. */
+ HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
+ strelts = strelts / eltsize;
+
+ if (!tree_fits_uhwi_p (memsize))
+ return NULL_TREE;
- maxelts = maxelts / eltsize - 1;
+ HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
/* PTR can point to the byte representation of any string type, including
char* and wchar_t*. */
if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
{
- /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
- compute the offset to the following null if we don't know where to
+ /* The code below works only for single byte character types. */
+ if (eltsize != 1)
+ return NULL_TREE;
+
+ /* If the string has an internal NUL character followed by any
+ non-NUL characters (e.g., "foo\0bar"), we can't compute
+ the offset to the following NUL if we don't know where to
start searching for it. */
- if (string_length (ptr, eltsize, maxelts) < maxelts)
+ unsigned len = string_length (ptr, eltsize, strelts);
+
+ /* Return when an embedded null character is found or none at all.
+ In the latter case, set the DECL/LEN field in the DATA structure
+ so that callers may examine them. */
+ if (len + 1 < strelts)
+ return NULL_TREE;
+ else if (len >= maxelts)
{
- /* Return when an embedded null character is found. */
+ data->decl = decl;
+ data->off = byteoff;
+ data->minlen = ssize_int (len);
return NULL_TREE;
}
- if (!maxelts)
+ /* For empty strings the result should be zero. */
+ if (len == 0)
return ssize_int (0);
/* We don't know the starting offset, but we do know that the string
- has no internal zero bytes. We can assume that the offset falls
- within the bounds of the string; otherwise, the programmer deserves
- what he gets. Subtract the offset from the length of the string,
- and return that. This would perhaps not be valid if we were dealing
- with named arrays in addition to literal string constants. */
- return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
+ has no internal zero bytes. If the offset falls within the bounds
+ of the string subtract the offset from the length of the string,
+ and return that. Otherwise the length is zero. Take care to
+ use SAVE_EXPR in case the OFFSET has side-effects. */
+ tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
+ : byteoff;
+ offsave = fold_convert_loc (loc, sizetype, offsave);
+ tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
+ size_int (len));
+ tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
+ offsave);
+ lenexp = fold_convert_loc (loc, ssizetype, lenexp);
+ return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
+ build_zero_cst (ssizetype));
}
/* Offset from the beginning of the string in elements. */
a null character if we can represent it as a single HOST_WIDE_INT. */
if (byteoff == 0)
eltoff = 0;
- else if (! tree_fits_shwi_p (byteoff))
+ else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
eltoff = -1;
else
- eltoff = tree_to_shwi (byteoff) / eltsize;
+ eltoff = tree_to_uhwi (byteoff) / eltsize;
/* If the offset is known to be out of bounds, warn, and call strlen at
runtime. */
- if (eltoff < 0 || eltoff > maxelts)
+ if (eltoff < 0 || eltoff >= maxelts)
{
- /* Suppress multiple warnings for propagated constant strings. */
+ /* Suppress multiple warnings for propagated constant strings. */
if (only_value != 2
- && !TREE_NO_WARNING (src))
- {
- warning_at (loc, OPT_Warray_bounds,
- "offset %qwi outside bounds of constant string",
- eltoff);
- TREE_NO_WARNING (src) = 1;
- }
+ && !TREE_NO_WARNING (src)
+ && warning_at (loc, OPT_Warray_bounds,
+ "offset %qwi outside bounds of constant string",
+ eltoff))
+ TREE_NO_WARNING (src) = 1;
return NULL_TREE;
}
+ /* If eltoff is larger than strelts but less than maxelts the
+ string length is zero, since the excess memory will be zero. */
+ if (eltoff > strelts)
+ return ssize_int (0);
+
/* Use strlen to search for the first zero byte. Since any strings
constructed with build_string will have nulls appended, we win even
if we get handed something like (char[4])"abcd".
Since ELTOFF is our starting index into the string, no further
calculation is needed. */
unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
- maxelts - eltoff);
+ strelts - eltoff);
+
+ /* Don't know what to return if there was no zero termination.
+ Ideally this would turn into a gcc_checking_assert over time.
+ Set DECL/LEN so callers can examine them. */
+ if (len >= maxelts - eltoff)
+ {
+ data->decl = decl;
+ data->off = byteoff;
+ data->minlen = ssize_int (len);
+ return NULL_TREE;
+ }
return ssize_int (len);
}
/* Return a constant integer corresponding to target reading
- GET_MODE_BITSIZE (MODE) bits from string constant STR. */
+ GET_MODE_BITSIZE (MODE) bits from string constant STR. If
+ NULL_TERMINATED_P, reading stops after '\0' character, all further ones
+ are assumed to be zero, otherwise it reads as many characters
+ as needed. */
-static rtx
-c_readstr (const char *str, scalar_int_mode mode)
+rtx
+c_readstr (const char *str, scalar_int_mode mode,
+ bool null_terminated_p/*=true*/)
{
HOST_WIDE_INT ch;
unsigned int i, j;
j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
j *= BITS_PER_UNIT;
- if (ch)
+ if (ch || !null_terminated_p)
ch = (unsigned char) str[i];
tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
}
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
- emit_move_insn (hard_frame_pointer_rtx, fp);
+ /* Restore the frame pointer and stack pointer. We must use a
+ temporary since the setjmp buffer may be a local. */
+ fp = copy_to_reg (fp);
emit_stack_restore (SAVE_NONLOCAL, stack);
+ emit_move_insn (hard_frame_pointer_rtx, fp);
emit_use (hard_frame_pointer_rtx);
emit_use (stack_pointer_rtx);
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
- /* Restore frame pointer for containing function. */
- emit_move_insn (hard_frame_pointer_rtx, r_fp);
+ /* Restore the frame pointer and stack pointer. We must use a
+ temporary since the setjmp buffer may be a local. */
+ r_fp = copy_to_reg (r_fp);
emit_stack_restore (SAVE_NONLOCAL, r_sp);
+ emit_move_insn (hard_frame_pointer_rtx, r_fp);
/* USE of hard_frame_pointer_rtx added for consistency;
not clear if really needed. */
tree maxobjsize = max_object_size ();
tree func = get_callee_fndecl (exp);
- tree len = c_strlen (src, 0);
+ /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
+ so these conversions aren't necessary. */
+ c_strlen_data lendata = { };
+ tree len = c_strlen (src, 0, &lendata, 1);
+ if (len)
+ len = fold_convert_loc (loc, TREE_TYPE (bound), len);
if (TREE_CODE (bound) == INTEGER_CST)
{
"%K%qD specified bound %E "
"exceeds maximum object size %E",
exp, func, bound, maxobjsize))
- TREE_NO_WARNING (exp) = true;
+ TREE_NO_WARNING (exp) = true;
+ bool exact = true;
if (!len || TREE_CODE (len) != INTEGER_CST)
+ {
+ /* Clear EXACT if LEN may be less than SRC suggests,
+ such as in
+ strnlen (&a[i], sizeof a)
+ where the value of i is unknown. Unless i's value is
+ zero, the call is unsafe because the bound is greater. */
+ lendata.decl = unterminated_array (src, &len, &exact);
+ if (!lendata.decl)
+ return NULL_RTX;
+ }
+
+ if (lendata.decl
+ && !TREE_NO_WARNING (exp)
+ && ((tree_int_cst_lt (len, bound))
+ || !exact))
+ {
+ location_t warnloc
+ = expansion_point_location_if_in_system_header (loc);
+
+ if (warning_at (warnloc, OPT_Wstringop_overflow_,
+ exact
+ ? G_("%K%qD specified bound %E exceeds the size %E "
+ "of unterminated array")
+ : G_("%K%qD specified bound %E may exceed the size "
+ "of at most %E of unterminated array"),
+ exp, func, bound, len))
+ {
+ inform (DECL_SOURCE_LOCATION (lendata.decl),
+ "referenced argument declared here");
+ TREE_NO_WARNING (exp) = true;
+ return NULL_RTX;
+ }
+ }
+
+ if (!len)
return NULL_RTX;
- len = fold_convert_loc (loc, size_type_node, len);
len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
return expand_expr (len, target, target_mode, EXPAND_NORMAL);
}
return NULL_RTX;
wide_int min, max;
- enum value_range_type rng = get_range_info (bound, &min, &max);
+ enum value_range_kind rng = get_range_info (bound, &min, &max);
if (rng != VR_RANGE)
return NULL_RTX;
"%K%qD specified bound [%wu, %wu] "
"exceeds maximum object size %E",
exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
- TREE_NO_WARNING (exp) = true;
+ TREE_NO_WARNING (exp) = true;
+ bool exact = true;
if (!len || TREE_CODE (len) != INTEGER_CST)
+ {
+ lendata.decl = unterminated_array (src, &len, &exact);
+ if (!lendata.decl)
+ return NULL_RTX;
+ }
+
+ if (lendata.decl
+ && !TREE_NO_WARNING (exp)
+ && (wi::ltu_p (wi::to_wide (len), min)
+ || !exact))
+ {
+ location_t warnloc
+ = expansion_point_location_if_in_system_header (loc);
+
+ if (warning_at (warnloc, OPT_Wstringop_overflow_,
+ exact
+ ? G_("%K%qD specified bound [%wu, %wu] exceeds "
+ "the size %E of unterminated array")
+ : G_("%K%qD specified bound [%wu, %wu] may exceed "
+ "the size of at most %E of unterminated array"),
+ exp, func, min.to_uhwi (), max.to_uhwi (), len))
+ {
+ inform (DECL_SOURCE_LOCATION (lendata.decl),
+ "referenced argument declared here");
+ TREE_NO_WARNING (exp) = true;
+ }
+ }
+
+ if (lendata.decl)
return NULL_RTX;
if (wi::gtu_p (min, wi::to_wide (len)))
else
{
wide_int min, max;
- enum value_range_type range_type = VR_UNDEFINED;
+ enum value_range_kind range_type = VR_UNDEFINED;
/* Determine bounds from the type. */
if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
the upper bound given by MAXREAD add one to it for
the terminating nul. Otherwise, set it to one for
the same reason, or to MAXREAD as appropriate. */
- get_range_strlen (srcstr, range);
+ c_strlen_data lendata = { };
+ get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
+ range[0] = lendata.minlen;
+ range[1] = lendata.maxbound;
if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
{
if (maxread && tree_int_cst_le (maxread, range[0]))
/* First check the number of bytes to be written against the maximum
object size. */
- if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
+ if (range[0]
+ && TREE_CODE (range[0]) == INTEGER_CST
+ && tree_int_cst_lt (maxobjsize, range[0]))
{
if (TREE_NO_WARNING (exp))
return false;
if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
{
if (range[0]
+ && TREE_CODE (range[0]) == INTEGER_CST
&& ((tree_fits_uhwi_p (dstsize)
&& tree_int_cst_lt (dstsize, range[0]))
- || (tree_fits_uhwi_p (dstwrite)
+ || (dstwrite
+ && tree_fits_uhwi_p (dstwrite)
&& tree_int_cst_lt (dstwrite, range[0]))))
{
if (TREE_NO_WARNING (exp))
&& INTEGRAL_TYPE_P (TREE_TYPE (off)))
{
wide_int min, max;
- enum value_range_type rng = get_range_info (off, &min, &max);
+ enum value_range_kind rng = get_range_info (off, &min, &max);
if (rng == VR_RANGE)
{
check_memop_access (exp, dest, src, len);
return expand_builtin_memory_copy_args (dest, src, len, target, exp,
- /*endp=*/ 0);
+ /*retmode=*/ RETURN_BEGIN);
}
/* Check a call EXP to the memmove built-in for validity.
/* Expand a call EXP to the mempcpy builtin.
Return NULL_RTX if we failed; the caller should emit a normal call,
otherwise try to get the result in TARGET, if convenient (and in
- mode MODE if that's convenient). If ENDP is 0 return the
- destination pointer, if ENDP is 1 return the end pointer ala
- mempcpy, and if ENDP is 2 return the end pointer minus one ala
- stpcpy. */
+ mode MODE if that's convenient). */
static rtx
expand_builtin_mempcpy (tree exp, rtx target)
return NULL_RTX;
return expand_builtin_mempcpy_args (dest, src, len,
- target, exp, /*endp=*/ 1);
+ target, exp, /*retmode=*/ RETURN_END);
}
/* Helper function to do the actual work for expand of memory copy family
functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
- of memory from SRC to DEST and assign to TARGET if convenient.
- If ENDP is 0 return the
- destination pointer, if ENDP is 1 return the end pointer ala
- mempcpy, and if ENDP is 2 return the end pointer minus one ala
- stpcpy. */
+ of memory from SRC to DEST and assign to TARGET if convenient. Return
+ value is based on RETMODE argument. */
static rtx
expand_builtin_memory_copy_args (tree dest, tree src, tree len,
- rtx target, tree exp, int endp)
+ rtx target, tree exp, memop_ret retmode)
{
const char *src_str;
unsigned int src_align = get_pointer_alignment (src);
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
CONST_CAST (char *, src_str),
- dest_align, false, endp);
+ dest_align, false, retmode);
dest_mem = force_operand (XEXP (dest_mem, 0), target);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
/* Copy word part most expediently. */
enum block_op_methods method = BLOCK_OP_NORMAL;
- if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
+ if (CALL_EXPR_TAILCALL (exp)
+ && (retmode == RETURN_BEGIN || target == const0_rtx))
method = BLOCK_OP_TAILCALL;
- if (endp == 1 && target != const0_rtx)
+ if (retmode == RETURN_END && target != const0_rtx)
method = BLOCK_OP_NO_LIBCALL_RET;
dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
expected_align, expected_size,
dest_addr = convert_memory_address (ptr_mode, dest_addr);
}
- if (endp && target != const0_rtx)
+ if (retmode != RETURN_BEGIN && target != const0_rtx)
{
dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
/* stpcpy pointer to last byte. */
- if (endp == 2)
+ if (retmode == RETURN_END_MINUS_ONE)
dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
}
static rtx
expand_builtin_mempcpy_args (tree dest, tree src, tree len,
- rtx target, tree orig_exp, int endp)
+ rtx target, tree orig_exp, memop_ret retmode)
{
return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
- endp);
+ retmode);
}
/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
we failed, the caller should emit a normal call, otherwise try to
- get the result in TARGET, if convenient. If ENDP is 0 return the
- destination pointer, if ENDP is 1 return the end pointer ala
- mempcpy, and if ENDP is 2 return the end pointer minus one ala
- stpcpy. */
+ get the result in TARGET, if convenient.
+ Return value is based on RETMODE argument. */
static rtx
-expand_movstr (tree dest, tree src, rtx target, int endp)
+expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
{
struct expand_operand ops[3];
rtx dest_mem;
dest_mem = get_memory_rtx (dest, NULL);
src_mem = get_memory_rtx (src, NULL);
- if (!endp)
+ if (retmode == RETURN_BEGIN)
{
target = force_reg (Pmode, XEXP (dest_mem, 0));
dest_mem = replace_equiv_address (dest_mem, target);
}
- create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
+ create_output_operand (&ops[0],
+ retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
create_fixed_operand (&ops[1], dest_mem);
create_fixed_operand (&ops[2], src_mem);
if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
return NULL_RTX;
- if (endp && target != const0_rtx)
+ if (retmode != RETURN_BEGIN && target != const0_rtx)
{
target = ops[0].value;
/* movstr is supposed to set end to the address of the NUL
terminator. If the caller requested a mempcpy-like return value,
adjust it. */
- if (endp == 1)
+ if (retmode == RETURN_END)
{
rtx tem = plus_constant (GET_MODE (target),
gen_lowpart (GET_MODE (target), target), 1);
src, destsize);
}
- if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
+ if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
{
/* Check to see if the argument was declared attribute nonstring
and if so, issue a warning since at this point it's not known
expand_builtin_strcpy. */
static rtx
-expand_builtin_strcpy_args (tree dest, tree src, rtx target)
+expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
{
- return expand_movstr (dest, src, target, /*endp=*/0);
+ /* Detect strcpy calls with unterminated arrays.. */
+ if (tree nonstr = unterminated_array (src))
+ {
+ /* NONSTR refers to the non-nul terminated constant array. */
+ if (!TREE_NO_WARNING (exp))
+ warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
+ return NULL_RTX;
+ }
+
+ return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
}
/* Expand a call EXP to the stpcpy builtin.
compile-time, not an expression containing a string. This is
because the latter will potentially produce pessimized code
when used to produce the return value. */
- if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
- return expand_movstr (dst, src, target, /*endp=*/2);
+ c_strlen_data lendata = { };
+ if (!c_getstr (src, NULL)
+ || !(len = c_strlen (src, 0, &lendata, 1)))
+ return expand_movstr (dst, src, target,
+ /*retmode=*/ RETURN_END_MINUS_ONE);
+
+ if (lendata.decl && !TREE_NO_WARNING (exp))
+ warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
ret = expand_builtin_mempcpy_args (dst, src, lenp1,
- target, exp, /*endp=*/2);
+ target, exp,
+ /*retmode=*/ RETURN_END_MINUS_ONE);
if (ret)
return ret;
if (CONST_INT_P (len_rtx))
{
- ret = expand_builtin_strcpy_args (dst, src, target);
+ ret = expand_builtin_strcpy_args (exp, dst, src, target);
if (ret)
{
}
}
- return expand_movstr (dst, src, target, /*endp=*/2);
+ return expand_movstr (dst, src, target,
+ /*retmode=*/ RETURN_END_MINUS_ONE);
}
}
/* Try to determine the range of lengths that the source expression
refers to. */
- tree lenrange[2];
- get_range_strlen (src, lenrange);
+ c_strlen_data lendata = { };
+ get_range_strlen (src, &lendata, /* eltsize = */ 1);
/* Try to verify that the destination is big enough for the shortest
string. */
}
/* Add one for the terminating nul. */
- tree srclen = (lenrange[0]
- ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
+ tree srclen = (lendata.minlen
+ ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
size_one_node)
: NULL_TREE);
tree slen = c_strlen (src, 1);
/* Try to determine the range of lengths that the source expression
- refers to. */
- tree lenrange[2];
- if (slen)
- lenrange[0] = lenrange[1] = slen;
- else
- get_range_strlen (src, lenrange);
+ refers to. Since the lengths are only used for warning and not
+ for code generation disable strict mode below. */
+ tree maxlen = slen;
+ if (!maxlen)
+ {
+ c_strlen_data lendata = { };
+ get_range_strlen (src, &lendata, /* eltsize = */ 1);
+ maxlen = lendata.maxbound;
+ }
/* Try to verify that the destination is big enough for the shortest
string. First try to determine the size of the destination object
tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
/* Add one for the terminating nul. */
- tree srclen = (lenrange[0]
- ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
+ tree srclen = (maxlen
+ ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
size_one_node)
: NULL_TREE);
dest_mem = get_memory_rtx (dest, len);
store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_strncpy_read_str,
- CONST_CAST (char *, p), dest_align, false, 0);
+ CONST_CAST (char *, p), dest_align, false,
+ RETURN_BEGIN);
dest_mem = force_operand (XEXP (dest_mem, 0), target);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
val_rtx = force_reg (val_mode, val_rtx);
store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_memset_gen_str, val_rtx, dest_align,
- true, 0);
+ true, RETURN_BEGIN);
}
else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
dest_align, expected_align,
builtin_memset_read_str, &c, dest_align,
true))
store_by_pieces (dest_mem, tree_to_uhwi (len),
- builtin_memset_read_str, &c, dest_align, true, 0);
+ builtin_memset_read_str, &c, dest_align, true,
+ RETURN_BEGIN);
else if (!set_storage_via_setmem (dest_mem, len_rtx,
gen_int_mode (c, val_mode),
dest_align, expected_align,
tree arg1 = CALL_EXPR_ARG (exp, 0);
tree arg2 = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
+ enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
+ bool no_overflow = true;
/* Diagnose calls where the specified length exceeds the size of either
object. */
- if (warn_stringop_overflow)
+ tree size = compute_objsize (arg1, 0);
+ no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
+ len, /*maxread=*/NULL_TREE, size,
+ /*objsize=*/NULL_TREE);
+ if (no_overflow)
{
- tree size = compute_objsize (arg1, 0);
- if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
- /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
- {
- size = compute_objsize (arg2, 0);
- check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
- /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
- }
+ size = compute_objsize (arg2, 0);
+ no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
+ len, /*maxread=*/NULL_TREE, size,
+ /*objsize=*/NULL_TREE);
+ }
+
+ /* If the specified length exceeds the size of either object,
+ call the function. */
+ if (!no_overflow)
+ return NULL_RTX;
+
+ /* Due to the performance benefit, always inline the calls first
+ when result_eq is false. */
+ rtx result = NULL_RTX;
+
+ if (!result_eq && fcode != BUILT_IN_BCMP)
+ {
+ result = inline_expand_builtin_string_cmp (exp, target);
+ if (result)
+ return result;
}
machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
constfn = builtin_memcpy_read_str;
- rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
- TREE_TYPE (len), target,
- result_eq, constfn,
- CONST_CAST (char *, src_str));
+ result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
+ TREE_TYPE (len), target,
+ result_eq, constfn,
+ CONST_CAST (char *, src_str));
if (result)
{
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
+ /* Due to the performance benefit, always inline the calls first. */
+ rtx result = NULL_RTX;
+ result = inline_expand_builtin_string_cmp (exp, target);
+ if (result)
+ return result;
+
insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
rtx arg1_rtx = get_memory_rtx (arg1, NULL);
rtx arg2_rtx = get_memory_rtx (arg2, NULL);
- rtx result = NULL_RTX;
/* Try to call cmpstrsi. */
if (cmpstr_icode != CODE_FOR_nothing)
result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
+ /* Due to the performance benefit, always inline the calls first. */
+ rtx result = NULL_RTX;
+ result = inline_expand_builtin_string_cmp (exp, target);
+ if (result)
+ return result;
+
/* If c_strlen can determine an expression for one of the string
lengths, and it doesn't have side effects, then emit cmpstrnsi
using length MIN(strlen(string)+1, arg3). */
/* If we are not using the given length, we must incorporate it here.
The actual new length parameter will be MIN(len,arg3) in this case. */
if (len != len3)
- len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
+ {
+ len = fold_convert_loc (loc, sizetype, len);
+ len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
+ }
rtx arg1_rtx = get_memory_rtx (arg1, len);
rtx arg2_rtx = get_memory_rtx (arg2, len);
rtx arg3_rtx = expand_normal (len);
- rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
- arg2_rtx, TREE_TYPE (len), arg3_rtx,
- MIN (arg1_align, arg2_align));
+ result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
+ arg2_rtx, TREE_TYPE (len), arg3_rtx,
+ MIN (arg1_align, arg2_align));
tree fndecl = get_callee_fndecl (exp);
if (result)
if (!valid_arglist)
return NULL_RTX;
- if ((alloca_for_var && !warn_vla_limit)
- || (!alloca_for_var && !warn_alloca_limit))
- {
- /* -Walloca-larger-than and -Wvla-larger-than settings override
- the more general -Walloc-size-larger-than so unless either of
- the former options is specified check the alloca arguments for
- overflow. */
+ if ((alloca_for_var
+ && warn_vla_limit >= HOST_WIDE_INT_MAX
+ && warn_alloc_size_limit < warn_vla_limit)
+ || (!alloca_for_var
+ && warn_alloca_limit >= HOST_WIDE_INT_MAX
+ && warn_alloc_size_limit < warn_alloca_limit
+ ))
+ {
+ /* -Walloca-larger-than and -Wvla-larger-than settings of
+ less than HOST_WIDE_INT_MAX override the more general
+ -Walloc-size-larger-than so unless either of the former
+ options is smaller than the last one (wchich would imply
+ that the call was already checked), check the alloca
+ arguments for overflow. */
tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
int idx[] = { 0, -1 };
maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
return target;
}
+/* Expand a call to __builtin_expect_with_probability. We just return our
+ argument as the builtin_expect semantic should've been already executed by
+ tree branch prediction pass. */
+
+static rtx
+expand_builtin_expect_with_probability (tree exp, rtx target)
+{
+ tree arg;
+
+ if (call_expr_nargs (exp) < 3)
+ return const0_rtx;
+ arg = CALL_EXPR_ARG (exp, 0);
+
+ target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
+ /* When guessing was done, the hints should be already stripped away. */
+ gcc_assert (!flag_guess_branch_prob
+ || optimize == 0 || seen_error ());
+ return target;
+}
+
+
/* Expand a call to __builtin_assume_aligned. We just return our first
argument as the builtin_assume_aligned semantic should've been already
executed by CCP. */
get_builtin_sync_mem (tree loc, machine_mode mode)
{
rtx addr, mem;
+ int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
+ ? TREE_TYPE (TREE_TYPE (loc))
+ : TREE_TYPE (loc));
+ scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
- addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
- addr = convert_memory_address (Pmode, addr);
+ addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
+ addr = convert_memory_address (addr_mode, addr);
/* Note that we explicitly do not want any alias information for this
memory, so that we kill all other live memories. Otherwise we don't
satisfy the full barrier semantics of the intrinsic. */
- mem = validize_mem (gen_rtx_MEM (mode, addr));
+ mem = gen_rtx_MEM (mode, addr);
+
+ set_mem_addr_space (mem, addr_space);
+
+ mem = validize_mem (mem);
/* The alignment needs to be at least according to that of the mode. */
set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
{
rtx op;
unsigned HOST_WIDE_INT val;
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (input_location);
/* If the parameter is not a constant, it's a run time value so we'll just
enum memmodel success, failure;
tree weak;
bool is_weak;
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (input_location);
success = get_memmodel (CALL_EXPR_ARG (exp, 4));
enum memmodel success, failure;
tree lhs;
bool is_weak;
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (gimple_location (call));
success = get_memmodel (gimple_call_arg (call, 4));
model = get_memmodel (CALL_EXPR_ARG (exp, 1));
if (is_mm_release (model) || is_mm_acq_rel (model))
{
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (input_location);
warning_at (loc, OPT_Winvalid_memory_model,
"invalid memory model for %<__atomic_load%>");
if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
|| is_mm_release (model)))
{
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (input_location);
warning_at (loc, OPT_Winvalid_memory_model,
"invalid memory model for %<__atomic_store%>");
gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
- /* If we will emit code after the call, the call can not be a tail call.
+ /* If we will emit code after the call, the call cannot be a tail call.
If it is emitted as a tail call, a barrier is emitted after it, and
then all trailing code is removed. */
if (!ignore)
if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
{
- source_location loc
+ location_t loc
= expansion_point_location_if_in_system_header (input_location);
warning_at (loc, OPT_Winvalid_memory_model,
"invalid memory model for %<__atomic_store%>");
expand_insn (icode, 1, &op);
return target;
}
- error ("__builtin_thread_pointer is not supported on this target");
+ error ("%<__builtin_thread_pointer%> is not supported on this target");
return const0_rtx;
}
expand_insn (icode, 1, &op);
return;
}
- error ("__builtin_set_thread_pointer is not supported on this target");
+ error ("%<__builtin_set_thread_pointer%> is not supported on this target");
}
\f
return target;
}
+/* Expand a string compare operation using a sequence of char comparison
+ to get rid of the calling overhead, with result going to TARGET if
+ that's convenient.
+
+ VAR_STR is the variable string source;
+ CONST_STR is the constant string source;
+ LENGTH is the number of chars to compare;
+ CONST_STR_N indicates which source string is the constant string;
+ IS_MEMCMP indicates whether it's a memcmp or strcmp.
+
+ to: (assume const_str_n is 2, i.e., arg2 is a constant string)
+
+ target = (int) (unsigned char) var_str[0]
+ - (int) (unsigned char) const_str[0];
+ if (target != 0)
+ goto ne_label;
+ ...
+ target = (int) (unsigned char) var_str[length - 2]
+ - (int) (unsigned char) const_str[length - 2];
+ if (target != 0)
+ goto ne_label;
+ target = (int) (unsigned char) var_str[length - 1]
+ - (int) (unsigned char) const_str[length - 1];
+ ne_label:
+ */
+
+static rtx
+inline_string_cmp (rtx target, tree var_str, const char *const_str,
+ unsigned HOST_WIDE_INT length,
+ int const_str_n, machine_mode mode)
+{
+ HOST_WIDE_INT offset = 0;
+ rtx var_rtx_array
+ = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
+ rtx var_rtx = NULL_RTX;
+ rtx const_rtx = NULL_RTX;
+ rtx result = target ? target : gen_reg_rtx (mode);
+ rtx_code_label *ne_label = gen_label_rtx ();
+ tree unit_type_node = unsigned_char_type_node;
+ scalar_int_mode unit_mode
+ = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
+
+ start_sequence ();
+
+ for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
+ {
+ var_rtx
+ = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
+ const_rtx = c_readstr (const_str + offset, unit_mode);
+ rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
+ rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
+
+ op0 = convert_modes (mode, unit_mode, op0, 1);
+ op1 = convert_modes (mode, unit_mode, op1, 1);
+ result = expand_simple_binop (mode, MINUS, op0, op1,
+ result, 1, OPTAB_WIDEN);
+ if (i < length - 1)
+ emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
+ mode, true, ne_label);
+ offset += GET_MODE_SIZE (unit_mode);
+ }
+
+ emit_label (ne_label);
+ rtx_insn *insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+
+ return result;
+}
+
+/* Inline expansion a call to str(n)cmp, with result going to
+ TARGET if that's convenient.
+ If the call is not been inlined, return NULL_RTX. */
+static rtx
+inline_expand_builtin_string_cmp (tree exp, rtx target)
+{
+ tree fndecl = get_callee_fndecl (exp);
+ enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+ unsigned HOST_WIDE_INT length = 0;
+ bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
+
+ /* Do NOT apply this inlining expansion when optimizing for size or
+ optimization level below 2. */
+ if (optimize < 2 || optimize_insn_for_size_p ())
+ return NULL_RTX;
+
+ gcc_checking_assert (fcode == BUILT_IN_STRCMP
+ || fcode == BUILT_IN_STRNCMP
+ || fcode == BUILT_IN_MEMCMP);
+
+ /* On a target where the type of the call (int) has same or narrower presicion
+ than unsigned char, give up the inlining expansion. */
+ if (TYPE_PRECISION (unsigned_char_type_node)
+ >= TYPE_PRECISION (TREE_TYPE (exp)))
+ return NULL_RTX;
+
+ tree arg1 = CALL_EXPR_ARG (exp, 0);
+ tree arg2 = CALL_EXPR_ARG (exp, 1);
+ tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
+
+ unsigned HOST_WIDE_INT len1 = 0;
+ unsigned HOST_WIDE_INT len2 = 0;
+ unsigned HOST_WIDE_INT len3 = 0;
+
+ const char *src_str1 = c_getstr (arg1, &len1);
+ const char *src_str2 = c_getstr (arg2, &len2);
+
+ /* If neither strings is constant string, the call is not qualify. */
+ if (!src_str1 && !src_str2)
+ return NULL_RTX;
+
+ /* For strncmp, if the length is not a const, not qualify. */
+ if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
+ return NULL_RTX;
+
+ int const_str_n = 0;
+ if (!len1)
+ const_str_n = 2;
+ else if (!len2)
+ const_str_n = 1;
+ else if (len2 > len1)
+ const_str_n = 1;
+ else
+ const_str_n = 2;
+
+ gcc_checking_assert (const_str_n > 0);
+ length = (const_str_n == 1) ? len1 : len2;
+
+ if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
+ length = len3;
+
+ /* If the length of the comparision is larger than the threshold,
+ do nothing. */
+ if (length > (unsigned HOST_WIDE_INT)
+ PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
+ return NULL_RTX;
+
+ machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* Now, start inline expansion the call. */
+ return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
+ (const_str_n == 1) ? src_str1 : src_str2, length,
+ const_str_n, mode);
+}
+
+/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
+ represents the size of the first argument to that call, or VOIDmode
+ if the argument is a pointer. IGNORE will be true if the result
+ isn't used. */
+static rtx
+expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
+ bool ignore)
+{
+ rtx val, failsafe;
+ unsigned nargs = call_expr_nargs (exp);
+
+ tree arg0 = CALL_EXPR_ARG (exp, 0);
+
+ if (mode == VOIDmode)
+ {
+ mode = TYPE_MODE (TREE_TYPE (arg0));
+ gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
+ }
+
+ val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
+
+ /* An optional second argument can be used as a failsafe value on
+ some machines. If it isn't present, then the failsafe value is
+ assumed to be 0. */
+ if (nargs > 1)
+ {
+ tree arg1 = CALL_EXPR_ARG (exp, 1);
+ failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
+ }
+ else
+ failsafe = const0_rtx;
+
+ /* If the result isn't used, the behavior is undefined. It would be
+ nice to emit a warning here, but path splitting means this might
+ happen with legitimate code. So simply drop the builtin
+ expansion in that case; we've handled any side-effects above. */
+ if (ignore)
+ return const0_rtx;
+
+ /* If we don't have a suitable target, create one to hold the result. */
+ if (target == NULL || GET_MODE (target) != mode)
+ target = gen_reg_rtx (mode);
+
+ if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
+ val = convert_modes (mode, VOIDmode, val, false);
+
+ return targetm.speculation_safe_value (mode, target, val, failsafe);
+}
+
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
(and in mode MODE if that's convenient).
return target;
break;
- /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
+ /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
when changing it to a strcmp call. */
case BUILT_IN_STRCMP_EQ:
return target;
/* Change this call back to a BUILT_IN_STRCMP. */
- TREE_OPERAND (exp, 1)
+ TREE_OPERAND (exp, 1)
= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
/* Delete the last parameter. */
return target;
/* Change it back to a BUILT_IN_STRNCMP. */
- TREE_OPERAND (exp, 1)
+ TREE_OPERAND (exp, 1)
= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
/* FALLTHROUGH */
return expand_builtin_va_copy (exp);
case BUILT_IN_EXPECT:
return expand_builtin_expect (exp, target);
+ case BUILT_IN_EXPECT_WITH_PROBABILITY:
+ return expand_builtin_expect_with_probability (exp, target);
case BUILT_IN_ASSUME_ALIGNED:
return expand_builtin_assume_aligned (exp, target);
case BUILT_IN_PREFETCH:
case BUILT_IN_GOACC_PARLEVEL_SIZE:
return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
+ case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
+ return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
+
+ case BUILT_IN_SPECULATION_SAFE_VALUE_1:
+ case BUILT_IN_SPECULATION_SAFE_VALUE_2:
+ case BUILT_IN_SPECULATION_SAFE_VALUE_4:
+ case BUILT_IN_SPECULATION_SAFE_VALUE_8:
+ case BUILT_IN_SPECULATION_SAFE_VALUE_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
+ return expand_speculation_safe_value (mode, exp, target, ignore);
+
default: /* just do library call, if unknown builtin */
break;
}
return END_BUILTINS;
fndecl = get_callee_fndecl (t);
- if (fndecl == NULL_TREE
- || TREE_CODE (fndecl) != FUNCTION_DECL
- || ! DECL_BUILT_IN (fndecl)
- || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
- return END_BUILTINS;
+ if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
+ return END_BUILTINS;
parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
init_const_call_expr_arg_iterator (t, &iter);
return NULL_TREE;
}
-/* Create builtin_expect with PRED and EXPECTED as its arguments and
- return it as a truthvalue. */
+/* Create builtin_expect or builtin_expect_with_probability
+ with PRED and EXPECTED as its arguments and return it as a truthvalue.
+ Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
+ builtin_expect_with_probability instead uses third argument as PROBABILITY
+ value. */
static tree
build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
- tree predictor)
+ tree predictor, tree probability)
{
tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
- fn = builtin_decl_explicit (BUILT_IN_EXPECT);
+ fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
+ : BUILT_IN_EXPECT_WITH_PROBABILITY);
arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
ret_type = TREE_TYPE (TREE_TYPE (fn));
pred_type = TREE_VALUE (arg_types);
pred = fold_convert_loc (loc, pred_type, pred);
expected = fold_convert_loc (loc, expected_type, expected);
- call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
- predictor);
+
+ if (probability)
+ call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
+ else
+ call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
+ predictor);
return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
build_int_cst (ret_type, 0));
}
-/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
+/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
NULL_TREE if no simplification is possible. */
tree
-fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
+fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
+ tree arg3)
{
tree inner, fndecl, inner_arg0;
enum tree_code code;
if (TREE_CODE (inner) == CALL_EXPR
&& (fndecl = get_callee_fndecl (inner))
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
+ && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
+ || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
return arg0;
inner = inner_arg0;
tree op1 = TREE_OPERAND (inner, 1);
arg1 = save_expr (arg1);
- op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
- op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
+ op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
+ op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
inner = build2 (code, TREE_TYPE (inner), op0, op1);
return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
return NULL_TREE;
else
{
- tree len = c_strlen (arg, 0);
+ c_strlen_data lendata = { };
+ tree len = c_strlen (arg, 0, &lendata);
if (len)
return fold_convert_loc (loc, type, len);
+ if (!lendata.decl)
+ c_strlen (arg, 1, &lendata);
+
+ if (lendata.decl)
+ {
+ if (EXPR_HAS_LOCATION (arg))
+ loc = EXPR_LOCATION (arg);
+ else if (loc == UNKNOWN_LOCATION)
+ loc = input_location;
+ warn_string_no_nul (loc, "strlen", arg, lendata.decl);
+ }
+
return NULL_TREE;
}
}
tree arg0, tree arg1, tree arg2)
{
enum internal_fn ifn = IFN_LAST;
- /* The code of the expression corresponding to the type-generic
- built-in, or ERROR_MARK for the type-specific ones. */
+ /* The code of the expression corresponding to the built-in. */
enum tree_code opcode = ERROR_MARK;
bool ovf_only = false;
ovf_only = true;
/* FALLTHRU */
case BUILT_IN_ADD_OVERFLOW:
- opcode = PLUS_EXPR;
- /* FALLTHRU */
case BUILT_IN_SADD_OVERFLOW:
case BUILT_IN_SADDL_OVERFLOW:
case BUILT_IN_SADDLL_OVERFLOW:
case BUILT_IN_UADD_OVERFLOW:
case BUILT_IN_UADDL_OVERFLOW:
case BUILT_IN_UADDLL_OVERFLOW:
+ opcode = PLUS_EXPR;
ifn = IFN_ADD_OVERFLOW;
break;
case BUILT_IN_SUB_OVERFLOW_P:
ovf_only = true;
/* FALLTHRU */
case BUILT_IN_SUB_OVERFLOW:
- opcode = MINUS_EXPR;
- /* FALLTHRU */
case BUILT_IN_SSUB_OVERFLOW:
case BUILT_IN_SSUBL_OVERFLOW:
case BUILT_IN_SSUBLL_OVERFLOW:
case BUILT_IN_USUB_OVERFLOW:
case BUILT_IN_USUBL_OVERFLOW:
case BUILT_IN_USUBLL_OVERFLOW:
+ opcode = MINUS_EXPR;
ifn = IFN_SUB_OVERFLOW;
break;
case BUILT_IN_MUL_OVERFLOW_P:
ovf_only = true;
/* FALLTHRU */
case BUILT_IN_MUL_OVERFLOW:
- opcode = MULT_EXPR;
- /* FALLTHRU */
case BUILT_IN_SMUL_OVERFLOW:
case BUILT_IN_SMULL_OVERFLOW:
case BUILT_IN_SMULLL_OVERFLOW:
case BUILT_IN_UMUL_OVERFLOW:
case BUILT_IN_UMULL_OVERFLOW:
case BUILT_IN_UMULLL_OVERFLOW:
+ opcode = MULT_EXPR;
ifn = IFN_MUL_OVERFLOW;
break;
default:
? boolean_true_node : boolean_false_node,
arg2);
- tree ctype = build_complex_type (type);
- tree call = build_call_expr_internal_loc (loc, ifn, ctype,
- 2, arg0, arg1);
- tree tgt = save_expr (call);
- tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
- tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
- ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
+ tree intres, ovfres;
+ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ {
+ intres = fold_binary_loc (loc, opcode, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type, arg1));
+ if (TREE_OVERFLOW (intres))
+ intres = drop_tree_overflow (intres);
+ ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
+ ? boolean_true_node : boolean_false_node);
+ }
+ else
+ {
+ tree ctype = build_complex_type (type);
+ tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
+ arg0, arg1);
+ tree tgt = save_expr (call);
+ intres = build1_loc (loc, REALPART_EXPR, type, tgt);
+ ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
+ ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
+ }
if (ovf_only)
return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
return fold_builtin_strpbrk (loc, arg0, arg1, type);
case BUILT_IN_EXPECT:
- return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
+ return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
case BUILT_IN_ISGREATER:
return fold_builtin_unordered_cmp (loc, fndecl,
return fold_builtin_memcmp (loc, arg0, arg1, arg2);
case BUILT_IN_EXPECT:
- return fold_builtin_expect (loc, arg0, arg1, arg2);
+ return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
+
+ case BUILT_IN_EXPECT_WITH_PROBABILITY:
+ return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
case BUILT_IN_ADD_OVERFLOW:
case BUILT_IN_SUB_OVERFLOW:
{
tree ret = NULL_TREE;
tree fndecl = get_callee_fndecl (exp);
- if (fndecl
- && TREE_CODE (fndecl) == FUNCTION_DECL
- && DECL_BUILT_IN (fndecl)
+ if (fndecl && fndecl_built_in_p (fndecl)
/* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
yet. Defer folding until we see all the arguments
(after inlining). */
if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
{
tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
- if (fndecl2
- && TREE_CODE (fndecl2) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
+ if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
return NULL_TREE;
}
tree fndecl = TREE_OPERAND (fn, 0);
if (TREE_CODE (fndecl) == FUNCTION_DECL
- && DECL_BUILT_IN (fndecl))
+ && fndecl_built_in_p (fndecl))
{
/* If last argument is __builtin_va_arg_pack (), arguments to this
function are not finalized yet. Defer folding until they are. */
if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
{
tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
- if (fndecl2
- && TREE_CODE (fndecl2) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
+ if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
return NULL_TREE;
}
if (avoid_folding_inline_builtin (fndecl))
definition of the va_start macro (perhaps on the token for
builtin) in a system header, so warnings will not be emitted.
Use the location in real source code. */
- source_location current_location =
+ location_t current_location =
linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
NULL);
static void
maybe_emit_free_warning (tree exp)
{
+ if (call_expr_nargs (exp) != 1)
+ return;
+
tree arg = CALL_EXPR_ARG (exp, 0);
STRIP_NOPS (arg);
tree ret = NULL_TREE;
tree fndecl = gimple_call_fndecl (stmt);
location_t loc = gimple_location (stmt);
- if (fndecl
- && TREE_CODE (fndecl) == FUNCTION_DECL
- && DECL_BUILT_IN (fndecl)
+ if (fndecl && fndecl_built_in_p (fndecl)
&& !gimple_call_va_arg_pack_p (stmt))
{
int nargs = gimple_call_num_args (stmt);
void
set_builtin_user_assembler_name (tree decl, const char *asmspec)
{
- gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+ gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
&& asmspec != 0);
tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
bool
is_simple_builtin (tree decl)
{
- if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (decl))
{
/* Builtins that expand to constants. */
*p = (char)tree_to_uhwi (t);
return true;
}
-
-/* Return the maximum object size. */
-
-tree
-max_object_size (void)
-{
- /* To do: Make this a configurable parameter. */
- return TYPE_MAX_VALUE (ptrdiff_type_node);
-}