]> git.ipfire.org Git - thirdparty/gcc.git/blobdiff - gcc/tree-inline.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / tree-inline.c
index b2cddc223120406f748e004d123837df55d674c5..5e50e8013e2af3b1ab6f9b45f6421a8933f23dc2 100644 (file)
@@ -1,5 +1,5 @@
 /* Tree inlining.
-   Copyright (C) 2001-2018 Free Software Foundation, Inc.
+   Copyright (C) 2001-2021 Free Software Foundation, Inc.
    Contributed by Alexandre Oliva <aoliva@redhat.com>
 
 This file is part of GCC.
@@ -53,13 +53,18 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-ssa.h"
 #include "except.h"
 #include "debug.h"
-#include "params.h"
 #include "value-prof.h"
 #include "cfgloop.h"
 #include "builtins.h"
 #include "stringpool.h"
 #include "attribs.h"
 #include "sreal.h"
+#include "tree-cfgcleanup.h"
+#include "tree-ssa-live.h"
+#include "alloc-pool.h"
+#include "symbol-summary.h"
+#include "symtab-thunks.h"
+#include "symtab-clones.h"
 
 /* I'm not real happy about this, but we need to handle gimple and
    non-gimple trees.  */
@@ -128,11 +133,9 @@ static void copy_bind_expr (tree *, int *, copy_body_data *);
 static void declare_inline_vars (tree, tree);
 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
 static void prepend_lexical_block (tree current_block, tree new_block);
-static tree copy_decl_to_var (tree, copy_body_data *);
 static tree copy_result_decl_to_var (tree, copy_body_data *);
 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
-static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
 
 /* Insert a tree->tree mapping for ID.  Despite the name suggests
@@ -149,30 +152,6 @@ insert_decl_map (copy_body_data *id, tree key, tree value)
     id->decl_map->put (value, value);
 }
 
-/* Insert a tree->tree mapping for ID.  This is only used for
-   variables.  */
-
-static void
-insert_debug_decl_map (copy_body_data *id, tree key, tree value)
-{
-  if (!gimple_in_ssa_p (id->src_cfun))
-    return;
-
-  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
-    return;
-
-  if (!target_for_debug_bind (key))
-    return;
-
-  gcc_assert (TREE_CODE (key) == PARM_DECL);
-  gcc_assert (VAR_P (value));
-
-  if (!id->debug_map)
-    id->debug_map = new hash_map<tree, tree>;
-
-  id->debug_map->put (key, value);
-}
-
 /* If nonzero, we're remapping the contents of inlined debug
    statements.  If negative, an error has occurred, such as a
    reference to a variable that isn't available in the inlined
@@ -191,7 +170,21 @@ remap_ssa_name (tree name, copy_body_data *id)
 
   n = id->decl_map->get (name);
   if (n)
-    return unshare_expr (*n);
+    {
+      /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
+        remove an unused LHS from a call statement.  Such LHS can however
+        still appear in debug statements, but their value is lost in this
+        function and we do not want to map them.  */
+      if (id->killed_new_ssa_names
+         && id->killed_new_ssa_names->contains (*n))
+       {
+         gcc_assert (processing_debug_stmt);
+         processing_debug_stmt = -1;
+         return name;
+       }
+
+      return unshare_expr (*n);
+    }
 
   if (processing_debug_stmt)
     {
@@ -258,6 +251,11 @@ remap_ssa_name (tree name, copy_body_data *id)
          struct ptr_info_def *new_pi = get_ptr_info (new_tree);
          new_pi->pt = pi->pt;
        }
+      /* So can range-info.  */
+      if (!POINTER_TYPE_P (TREE_TYPE (name))
+         && SSA_NAME_RANGE_INFO (name))
+       duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
+                                      SSA_NAME_RANGE_INFO (name));
       return new_tree;
     }
 
@@ -291,6 +289,11 @@ remap_ssa_name (tree name, copy_body_data *id)
          struct ptr_info_def *new_pi = get_ptr_info (new_tree);
          new_pi->pt = pi->pt;
        }
+      /* So can range-info.  */
+      if (!POINTER_TYPE_P (TREE_TYPE (name))
+         && SSA_NAME_RANGE_INFO (name))
+       duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
+                                      SSA_NAME_RANGE_INFO (name));
       if (SSA_NAME_IS_DEFAULT_DEF (name))
        {
          /* By inlining function having uninitialized variable, we might
@@ -523,11 +526,28 @@ remap_type_1 (tree type, copy_body_data *id)
 
       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
        {
-         gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
+         gcc_checking_assert (TYPE_DOMAIN (type)
+                              == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
          TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
        }
       else
-       TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
+        {
+         TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
+         /* For array bounds where we have decided not to copy over the bounds
+            variable which isn't used in OpenMP/OpenACC region, change them to
+            an uninitialized VAR_DECL temporary.  */
+         if (id->adjust_array_error_bounds
+             && TYPE_DOMAIN (new_tree)
+             && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
+             && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
+           {
+             tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
+             DECL_ATTRIBUTES (v)
+               = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
+                            DECL_ATTRIBUTES (v));
+             TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
+           }
+        }
       break;
 
     case RECORD_TYPE:
@@ -582,6 +602,92 @@ remap_type_1 (tree type, copy_body_data *id)
   return new_tree;
 }
 
+/* Helper function for remap_type_2, called through walk_tree.  */
+
+static tree
+remap_type_3 (tree *tp, int *walk_subtrees, void *data)
+{
+  copy_body_data *id = (copy_body_data *) data;
+
+  if (TYPE_P (*tp))
+    *walk_subtrees = 0;
+
+  else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
+    return *tp;
+
+  return NULL_TREE;
+}
+
+/* Return true if TYPE needs to be remapped because remap_decl on any
+   needed embedded decl returns something other than that decl.  */
+
+static bool
+remap_type_2 (tree type, copy_body_data *id)
+{
+  tree t;
+
+#define RETURN_TRUE_IF_VAR(T) \
+  do                                                           \
+    {                                                          \
+      tree _t = (T);                                           \
+      if (_t)                                                  \
+       {                                                       \
+         if (DECL_P (_t) && remap_decl (_t, id) != _t)         \
+           return true;                                        \
+         if (!TYPE_SIZES_GIMPLIFIED (type)                     \
+             && walk_tree (&_t, remap_type_3, id, NULL))       \
+           return true;                                        \
+       }                                                       \
+    }                                                          \
+  while (0)
+
+  switch (TREE_CODE (type))
+    {
+    case POINTER_TYPE:
+    case REFERENCE_TYPE:
+    case FUNCTION_TYPE:
+    case METHOD_TYPE:
+      return remap_type_2 (TREE_TYPE (type), id);
+
+    case INTEGER_TYPE:
+    case REAL_TYPE:
+    case FIXED_POINT_TYPE:
+    case ENUMERAL_TYPE:
+    case BOOLEAN_TYPE:
+      RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
+      RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
+      return false;
+
+    case ARRAY_TYPE:
+      if (remap_type_2 (TREE_TYPE (type), id)
+         || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
+       return true;
+      break;
+
+    case RECORD_TYPE:
+    case UNION_TYPE:
+    case QUAL_UNION_TYPE:
+      for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
+       if (TREE_CODE (t) == FIELD_DECL)
+         {
+           RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
+           RETURN_TRUE_IF_VAR (DECL_SIZE (t));
+           RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
+           if (TREE_CODE (type) == QUAL_UNION_TYPE)
+             RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
+         }
+      break;
+
+    default:
+      return false;
+    }
+
+  RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
+  RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
+  return false;
+#undef RETURN_TRUE_IF_VAR
+}
+
 tree
 remap_type (tree type, copy_body_data *id)
 {
@@ -597,7 +703,10 @@ remap_type (tree type, copy_body_data *id)
     return *node;
 
   /* The type only needs remapping if it's variably modified.  */
-  if (! variably_modified_type_p (type, id->src_fn))
+  if (! variably_modified_type_p (type, id->src_fn)
+      /* Don't remap if copy_decl method doesn't always return a new
+        decl and for all embedded decls returns the passed in decl.  */
+      || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
     {
       insert_decl_map (id, type, type);
       return type;
@@ -615,7 +724,7 @@ remap_type (tree type, copy_body_data *id)
 static bool
 can_be_nonlocal (tree decl, copy_body_data *id)
 {
-  /* We can not duplicate function decls.  */
+  /* We cannot duplicate function decls.  */
   if (TREE_CODE (decl) == FUNCTION_DECL)
     return true;
 
@@ -704,7 +813,7 @@ remap_block (tree *block, copy_body_data *id)
   old_block = *block;
   new_block = make_node (BLOCK);
   TREE_USED (new_block) = TREE_USED (old_block);
-  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
+  BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
   BLOCK_NONLOCALIZED_VARS (new_block)
     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
@@ -895,7 +1004,12 @@ remap_dependence_clique (copy_body_data *id, unsigned short clique)
   bool existed;
   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
   if (!existed)
-    newc = ++cfun->last_clique;
+    {
+      /* Clique 1 is reserved for local ones set by PTA.  */
+      if (cfun->last_clique == 0)
+       cfun->last_clique = 1;
+      newc = ++cfun->last_clique;
+    }
   return newc;
 }
 
@@ -990,7 +1104,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
       /* Otherwise, just copy the node.  Note that copy_tree_r already
         knows not to copy VAR_DECLs, etc., so this is safe.  */
 
-      if (TREE_CODE (*tp) == MEM_REF)
+      if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
        {
          /* We need to re-canonicalize MEM_REFs from inline substitutions
             that can happen when a pointer argument is an ADDR_EXPR.
@@ -1002,7 +1116,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
          *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
          TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
          TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
-         TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+         copy_warning (*tp, old);
          if (MR_DEPENDENCE_CLIQUE (old) != 0)
            {
              MR_DEPENDENCE_CLIQUE (*tp)
@@ -1216,11 +1330,11 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
              tree type = TREE_TYPE (*tp);
              tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
              tree old = *tp;
-             *tp = gimple_fold_indirect_ref (ptr);
+             *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
              if (! *tp)
                {
                  type = remap_type (type, id);
-                 if (TREE_CODE (ptr) == ADDR_EXPR)
+                 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
                    {
                      *tp
                        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
@@ -1249,7 +1363,7 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
              return NULL;
            }
        }
-      else if (TREE_CODE (*tp) == MEM_REF)
+      else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
        {
          /* We need to re-canonicalize MEM_REFs from inline substitutions
             that can happen when a pointer argument is an ADDR_EXPR.
@@ -1261,7 +1375,7 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
          *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
          TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
          TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
-         TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+         copy_warning (*tp, old);
          if (MR_DEPENDENCE_CLIQUE (old) != 0)
            {
              MR_DEPENDENCE_CLIQUE (*tp)
@@ -1321,7 +1435,8 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
 
          /* Handle the case where we substituted an INDIRECT_REF
             into the operand of the ADDR_EXPR.  */
-         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
+         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
+             && !id->do_not_fold)
            {
              tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
              if (TREE_TYPE (t) != TREE_TYPE (*tp))
@@ -1338,6 +1453,28 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
 
          *walk_subtrees = 0;
        }
+      else if (TREE_CODE (*tp) == OMP_CLAUSE
+              && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
+                  || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
+       {
+         tree t = OMP_CLAUSE_DECL (*tp);
+         if (t
+             && TREE_CODE (t) == TREE_LIST
+             && TREE_PURPOSE (t)
+             && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
+           {
+             *walk_subtrees = 0;
+             OMP_CLAUSE_DECL (*tp) = copy_node (t);
+             t = OMP_CLAUSE_DECL (*tp);
+             TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
+             for (int i = 0; i <= 4; i++)
+               walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
+                          copy_tree_body_r, id, NULL);
+             if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
+               remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
+             walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
+           }
+       }
     }
 
   /* Keep iterating.  */
@@ -1389,6 +1526,11 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
          : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
     return NULL;
 
+  if (!is_gimple_debug (stmt)
+      && id->param_body_adjs
+      && id->param_body_adjs->m_dead_stmts.contains (stmt))
+    return NULL;
+
   /* Begin by recognizing trees that we'll completely rewrite for the
      inlining context.  Our output for these trees is completely
      different from our input (e.g. RETURN_EXPR is deleted and morphs
@@ -1407,9 +1549,12 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
         assignment to the equivalent of the original RESULT_DECL.
         If RETVAL is just the result decl, the result decl has
         already been set (e.g. a recent "foo (&result_decl, ...)");
-        just toss the entire GIMPLE_RETURN.  */
+        just toss the entire GIMPLE_RETURN.  Likewise for when the
+        call doesn't want the return value.  */
       if (retval
          && (TREE_CODE (retval) != RESULT_DECL
+             && (!id->call_stmt
+                 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
              && (TREE_CODE (retval) != SSA_NAME
                  || ! SSA_NAME_VAR (retval)
                  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
@@ -1513,9 +1658,22 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
          copy = gimple_build_omp_master (s1);
          break;
 
+       case GIMPLE_OMP_MASKED:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_masked
+                  (s1, gimple_omp_masked_clauses (stmt));
+         break;
+
+       case GIMPLE_OMP_SCOPE:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_scope
+                  (s1, gimple_omp_scope_clauses (stmt));
+         break;
+
        case GIMPLE_OMP_TASKGROUP:
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
-         copy = gimple_build_omp_taskgroup (s1);
+         copy = gimple_build_omp_taskgroup
+                  (s1, gimple_omp_taskgroup_clauses (stmt));
          break;
 
        case GIMPLE_OMP_ORDERED:
@@ -1525,6 +1683,12 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
          break;
 
+       case GIMPLE_OMP_SCAN:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_scan
+                  (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
+         break;
+
        case GIMPLE_OMP_SECTION:
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
          copy = gimple_build_omp_section (s1);
@@ -1628,13 +1792,30 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                return NULL;
            }
        }
+     
+      /* We do not allow CLOBBERs of handled components.  In case
+        returned value is stored via such handled component, remove
+        the clobber so stmt verifier is happy.  */
+      if (gimple_clobber_p (stmt)
+         && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
+       {
+         tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
+         if (!DECL_P (remapped)
+             && TREE_CODE (remapped) != MEM_REF)
+           return NULL;
+       }
 
       if (gimple_debug_bind_p (stmt))
        {
+         tree value;
+         if (id->param_body_adjs
+             && id->param_body_adjs->m_dead_stmts.contains (stmt))
+           value = NULL_TREE;
+         else
+           value = gimple_debug_bind_get_value (stmt);
          gdebug *copy
            = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
-                                      gimple_debug_bind_get_value (stmt),
-                                      stmt);
+                                      value, stmt);
          if (id->reset_location)
            gimple_set_location (copy, input_location);
          id->debug_stmts.safe_push (copy);
@@ -1658,12 +1839,11 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
          /* If the inlined function has too many debug markers,
             don't copy them.  */
          if (id->src_cfun->debug_marker_count
-             > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
+             > param_max_debug_marker_count
+             || id->reset_location)
            return stmts;
 
          gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
-         if (id->reset_location)
-           gimple_set_location (copy, input_location);
          id->debug_stmts.safe_push (copy);
          gimple_seq_add_stmt (&stmts, copy);
          return stmts;
@@ -1754,13 +1934,28 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
     }
 
   /* If STMT has a block defined, map it to the newly constructed block.  */
-  if (gimple_block (copy))
+  if (tree block = gimple_block (copy))
     {
       tree *n;
-      n = id->decl_map->get (gimple_block (copy));
+      n = id->decl_map->get (block);
       gcc_assert (n);
       gimple_set_block (copy, *n);
     }
+  if (id->param_body_adjs)
+    {
+      gimple_seq extra_stmts = NULL;
+      id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
+      if (!gimple_seq_empty_p (extra_stmts))
+       {
+         memset (&wi, 0, sizeof (wi));
+         wi.info = id;
+         for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
+              !gsi_end_p (egsi);
+              gsi_next (&egsi))
+           walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
+         gimple_seq_add_seq (&stmts, extra_stmts);
+       }
+    }
 
   if (id->reset_location)
     gimple_set_location (copy, input_location);
@@ -1784,6 +1979,26 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
       gimple_set_vuse (copy, NULL_TREE);
     }
 
+  if (cfun->can_throw_non_call_exceptions)
+    {
+      /* When inlining a function which does not have non-call exceptions
+        enabled into a function that has (which only happens with
+        always-inline) we have to fixup stmts that cannot throw.  */
+      if (gcond *cond = dyn_cast <gcond *> (copy))
+       if (gimple_could_trap_p (cond))
+         {
+           gassign *cmp
+             = gimple_build_assign (make_ssa_name (boolean_type_node),
+                                    gimple_cond_code (cond),
+                                    gimple_cond_lhs (cond),
+                                    gimple_cond_rhs (cond));
+           gimple_seq_add_stmt (&stmts, cmp);
+           gimple_cond_set_code (cond, NE_EXPR);
+           gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
+           gimple_cond_set_rhs (cond, boolean_false_node);
+         }
+    }
+
   gimple_seq_add_stmt (&stmts, copy);
   return stmts;
 }
@@ -1898,27 +2113,29 @@ copy_bb (copy_body_data *id, basic_block bb,
              tree p;
              gcall *new_call;
              vec<tree> argarray;
-             size_t nargs = gimple_call_num_args (id->call_stmt);
-             size_t n;
+             size_t nargs_caller = gimple_call_num_args (id->call_stmt);
+             size_t nargs = nargs_caller;
 
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
                nargs--;
 
              /* Create the new array of arguments.  */
-             n = nargs + gimple_call_num_args (call_stmt);
+             size_t nargs_callee = gimple_call_num_args (call_stmt);
+             size_t n = nargs + nargs_callee;
              argarray.create (n);
-             argarray.safe_grow_cleared (n);
+             argarray.safe_grow_cleared (n, true);
 
              /* Copy all the arguments before '...'  */
-             memcpy (argarray.address (),
-                     gimple_call_arg_ptr (call_stmt, 0),
-                     gimple_call_num_args (call_stmt) * sizeof (tree));
+             if (nargs_callee)
+               memcpy (argarray.address (),
+                       gimple_call_arg_ptr (call_stmt, 0),
+                       nargs_callee * sizeof (tree));
 
              /* Append the arguments passed in '...'  */
-             memcpy (argarray.address () + gimple_call_num_args (call_stmt),
-                     gimple_call_arg_ptr (id->call_stmt, 0)
-                     + (gimple_call_num_args (id->call_stmt) - nargs),
-                     nargs * sizeof (tree));
+             if (nargs)
+               memcpy (argarray.address () + nargs_callee,
+                       gimple_call_arg_ptr (id->call_stmt, 0)
+                       + (nargs_caller - nargs), nargs * sizeof (tree));
 
              new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
                                                argarray);
@@ -1929,8 +2146,9 @@ copy_bb (copy_body_data *id, basic_block bb,
                 GF_CALL_VA_ARG_PACK.  */
              gimple_call_copy_flags (new_call, call_stmt);
              gimple_call_set_va_arg_pack (new_call, false);
+             gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
+             /* location includes block.  */
              gimple_set_location (new_call, gimple_location (stmt));
-             gimple_set_block (new_call, gimple_block (stmt));
              gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
 
              gsi_replace (&copy_gsi, new_call, false);
@@ -2010,38 +2228,73 @@ copy_bb (copy_body_data *id, basic_block bb,
                  if (edge)
                    {
                      struct cgraph_edge *old_edge = edge;
-                     profile_count old_cnt = edge->count;
-                     edge = edge->clone (id->dst_node, call_stmt,
-                                         gimple_uid (stmt),
-                                         num, den,
-                                         true);
-
-                     /* Speculative calls consist of two edges - direct and
-                        indirect.  Duplicate the whole thing and distribute
-                        frequencies accordingly.  */
+
+                     /* A speculative call is consist of multiple
+                        edges - indirect edge and one or more direct edges
+                        Duplicate the whole thing and distribute frequencies
+                        accordingly.  */
                      if (edge->speculative)
                        {
-                         struct cgraph_edge *direct, *indirect;
-                         struct ipa_ref *ref;
+                         int n = 0;
+                         profile_count direct_cnt
+                                = profile_count::zero ();
+
+                         /* First figure out the distribution of counts
+                            so we can re-scale BB profile accordingly.  */
+                         for (cgraph_edge *e = old_edge; e;
+                              e = e->next_speculative_call_target ())
+                           direct_cnt = direct_cnt + e->count;
+
+                         cgraph_edge *indirect
+                                = old_edge->speculative_call_indirect_edge ();
+                         profile_count indir_cnt = indirect->count;
 
-                         gcc_assert (!edge->indirect_unknown_callee);
-                         old_edge->speculative_call_info (direct, indirect, ref);
+                         /* Next iterate all direct edges, clone it and its
+                            corresponding reference and update profile.  */
+                         for (cgraph_edge *e = old_edge;
+                              e;
+                              e = e->next_speculative_call_target ())
+                           {
+                             profile_count cnt = e->count;
+
+                             id->dst_node->clone_reference
+                                (e->speculative_call_target_ref (), stmt);
+                             edge = e->clone (id->dst_node, call_stmt,
+                                              gimple_uid (stmt), num, den,
+                                              true);
+                             profile_probability prob
+                                = cnt.probability_in (direct_cnt
+                                                      + indir_cnt);
+                             edge->count
+                                = copy_basic_block->count.apply_probability
+                                        (prob);
+                             n++;
+                           }
+                         gcc_checking_assert
+                                (indirect->num_speculative_call_targets_p ()
+                                 == n);
 
-                         profile_count indir_cnt = indirect->count;
+                         /* Duplicate the indirect edge after all direct edges
+                            cloned.  */
                          indirect = indirect->clone (id->dst_node, call_stmt,
                                                      gimple_uid (stmt),
                                                      num, den,
                                                      true);
 
                          profile_probability prob
-                            = indir_cnt.probability_in (old_cnt + indir_cnt);
+                            = indir_cnt.probability_in (direct_cnt
+                                                        + indir_cnt);
                          indirect->count
                             = copy_basic_block->count.apply_probability (prob);
-                         edge->count = copy_basic_block->count - indirect->count;
-                         id->dst_node->clone_reference (ref, stmt);
                        }
                      else
-                       edge->count = copy_basic_block->count;
+                       {
+                         edge = edge->clone (id->dst_node, call_stmt,
+                                             gimple_uid (stmt),
+                                             num, den,
+                                             true);
+                         edge->count = copy_basic_block->count;
+                       }
                    }
                  break;
 
@@ -2054,7 +2307,7 @@ copy_bb (copy_body_data *id, basic_block bb,
                case CB_CGE_MOVE:
                  edge = id->dst_node->get_edge (orig_stmt);
                  if (edge)
-                   edge->set_call_stmt (call_stmt);
+                   edge = cgraph_edge::set_call_stmt (edge, call_stmt);
                  break;
 
                default:
@@ -2092,7 +2345,7 @@ copy_bb (copy_body_data *id, basic_block bb,
                  if (dump_file)
                    {
                      fprintf (dump_file, "Created new direct edge to %s\n",
-                              dest->name ());
+                              dest->dump_name ());
                    }
                }
 
@@ -2173,6 +2426,79 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
       }
 }
 
+/* Insert clobbers for automatic variables of inlined ID->src_fn
+   function at the start of basic block ID->eh_landing_pad_dest.  */
+
+static void
+add_clobbers_to_eh_landing_pad (copy_body_data *id)
+{
+  tree var;
+  basic_block bb = id->eh_landing_pad_dest;
+  live_vars_map *vars = NULL;
+  unsigned int cnt = 0;
+  unsigned int i;
+  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
+    if (VAR_P (var)
+       && !DECL_HARD_REGISTER (var)
+       && !TREE_THIS_VOLATILE (var)
+       && !DECL_HAS_VALUE_EXPR_P (var)
+       && !is_gimple_reg (var)
+       && auto_var_in_fn_p (var, id->src_fn)
+       && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
+      {
+       tree *t = id->decl_map->get (var);
+       if (!t)
+         continue;
+       tree new_var = *t;
+       if (VAR_P (new_var)
+           && !DECL_HARD_REGISTER (new_var)
+           && !TREE_THIS_VOLATILE (new_var)
+           && !DECL_HAS_VALUE_EXPR_P (new_var)
+           && !is_gimple_reg (new_var)
+           && auto_var_in_fn_p (new_var, id->dst_fn))
+         {
+           if (vars == NULL)
+             vars = new live_vars_map;
+            vars->put (DECL_UID (var), cnt++);
+         }
+      }
+  if (vars == NULL)
+    return;
+
+  vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
+  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
+    if (VAR_P (var))
+      {
+       edge e;
+       edge_iterator ei;
+       bool needed = false;
+       unsigned int *v = vars->get (DECL_UID (var));
+       if (v == NULL)
+         continue;
+       FOR_EACH_EDGE (e, ei, bb->preds)
+         if ((e->flags & EDGE_EH) != 0
+             && e->src->index >= id->add_clobbers_to_eh_landing_pads)
+           {
+             basic_block src_bb = (basic_block) e->src->aux;
+
+             if (bitmap_bit_p (&live[src_bb->index], *v))
+               {
+                 needed = true;
+                 break;
+               }
+           }
+       if (needed)
+         {
+           tree new_var = *id->decl_map->get (var);
+           gimple_stmt_iterator gsi = gsi_after_labels (bb);
+           tree clobber = build_clobber (TREE_TYPE (new_var));
+           gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
+           gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
+         }
+      }
+  destroy_live_vars (live);
+  delete vars;
+}
 
 /* Copy edges from BB into its copy constructed earlier, scale profile
    accordingly.  Edges will be taken care of later.  Assume aux
@@ -2215,7 +2541,7 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
     return false;
 
-  /* When doing function splitting, we must decreate count of the return block
+  /* When doing function splitting, we must decrease count of the return block
      which was previously reachable by block we did not copy.  */
   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
     FOR_EACH_EDGE (old_edge, ei, bb->preds)
@@ -2246,7 +2572,7 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
          propagation can change an INDIRECT_REF which throws
          into a COMPONENT_REF which doesn't.  If the copy
          can throw, the original could also throw.  */
-      can_throw = stmt_can_throw_internal (copy_stmt);
+      can_throw = stmt_can_throw_internal (cfun, copy_stmt);
       nonlocal_goto
        = (stmt_can_make_abnormal_goto (copy_stmt)
           && !computed_goto_p (copy_stmt));
@@ -2300,8 +2626,18 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
              e->probability = old_edge->probability;
            
           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
-           if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
-             e->probability = profile_probability::never ();
+           if (e->flags & EDGE_EH)
+             {
+               if (!e->probability.initialized_p ())
+                 e->probability = profile_probability::never ();
+               if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
+                 {
+                   if (id->eh_landing_pad_dest == NULL)
+                     id->eh_landing_pad_dest = e->dest;
+                   else
+                     gcc_assert (id->eh_landing_pad_dest == e->dest);
+                 }
+             }
         }
 
 
@@ -2361,7 +2697,9 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
       phi = si.phi ();
       res = PHI_RESULT (phi);
       new_res = res;
-      if (!virtual_operand_p (res))
+      if (!virtual_operand_p (res)
+         && (!id->param_body_adjs
+             || !id->param_body_adjs->m_dead_stmts.contains (phi)))
        {
          walk_tree (&new_res, copy_tree_body_r, id, NULL);
          if (EDGE_COUNT (new_bb->preds) == 0)
@@ -2466,6 +2804,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
+  cfun->calls_eh_return = src_cfun->calls_eh_return;
   cfun->stdarg = src_cfun->stdarg;
   cfun->after_inlining = src_cfun->after_inlining;
   cfun->can_throw_non_call_exceptions
@@ -2514,7 +2853,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
 
   if (gsi_end_p (si)
       || gsi_one_before_end_p (si)
-      || !(stmt_can_throw_internal (gsi_stmt (si))
+      || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
           || stmt_can_make_abnormal_goto (gsi_stmt (si))))
     return;
 
@@ -2541,7 +2880,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
                  gimple_set_location (stmt, UNKNOWN_LOCATION);
                }
              gsi_remove (&si, false);
-             gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
+             gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
              continue;
            }
 
@@ -2567,7 +2906,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
            new_stmt = as_a <gdebug *> (gimple_copy (stmt));
          else
            gcc_unreachable ();
-         gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
+         gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
          id->debug_stmts.safe_push (new_stmt);
          gsi_prev (&ssi);
        }
@@ -2579,15 +2918,15 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
 
 static void
 copy_loops (copy_body_data *id,
-           struct loop *dest_parent, struct loop *src_parent)
+           class loop *dest_parent, class loop *src_parent)
 {
-  struct loop *src_loop = src_parent->inner;
+  class loop *src_loop = src_parent->inner;
   while (src_loop)
     {
       if (!id->blocks_to_copy
          || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
        {
-         struct loop *dest_loop = alloc_loop ();
+         class loop *dest_loop = alloc_loop ();
 
          /* Assign the new loop its header and latch and associate
             those with the new loop.  */
@@ -2601,23 +2940,20 @@ copy_loops (copy_body_data *id,
 
          /* Copy loop meta-data.  */
          copy_loop_info (src_loop, dest_loop);
+         if (dest_loop->unroll)
+           cfun->has_unroll = true;
+         if (dest_loop->force_vectorize)
+           cfun->has_force_vectorize_loops = true;
+         if (id->src_cfun->last_clique != 0)
+           dest_loop->owned_clique
+             = remap_dependence_clique (id,
+                                        src_loop->owned_clique
+                                        ? src_loop->owned_clique : 1);
 
          /* Finally place it into the loop array and the loop tree.  */
          place_new_loop (cfun, dest_loop);
          flow_loop_tree_node_add (dest_parent, dest_loop);
 
-         dest_loop->safelen = src_loop->safelen;
-         if (src_loop->unroll)
-           {
-             dest_loop->unroll = src_loop->unroll;
-             cfun->has_unroll = true;
-           }
-         dest_loop->dont_vectorize = src_loop->dont_vectorize;
-         if (src_loop->force_vectorize)
-           {
-             dest_loop->force_vectorize = true;
-             cfun->has_force_vectorize_loops = true;
-           }
          if (src_loop->simduid)
            {
              dest_loop->simduid = remap_decl (src_loop->simduid, id);
@@ -2631,7 +2967,7 @@ copy_loops (copy_body_data *id,
     }
 }
 
-/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
+/* Call redirect_call_stmt_to_callee on all calls in BB.  */
 
 void
 redirect_all_calls (copy_body_data * id, basic_block bb)
@@ -2643,10 +2979,25 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
       gimple *stmt = gsi_stmt (si);
       if (is_gimple_call (stmt))
        {
+         tree old_lhs = gimple_call_lhs (stmt);
          struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
          if (edge)
            {
-             edge->redirect_call_stmt_to_callee ();
+             gimple *new_stmt
+               = cgraph_edge::redirect_call_stmt_to_callee (edge);
+             /* If IPA-SRA transformation, run as part of edge redirection,
+                removed the LHS because it is unused, save it to
+                killed_new_ssa_names so that we can prune it from debug
+                statements.  */
+             if (old_lhs
+                 && TREE_CODE (old_lhs) == SSA_NAME
+                 && !gimple_call_lhs (new_stmt))
+               {
+                 if (!id->killed_new_ssa_names)
+                   id->killed_new_ssa_names = new hash_set<tree> (16);
+                 id->killed_new_ssa_names->add (old_lhs);
+               }
+
              if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
                gimple_purge_dead_eh_edges (bb);
            }
@@ -2742,6 +3093,12 @@ copy_cfg_body (copy_body_data * id,
       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
                                               abnormal_goto_dest, id);
 
+  if (id->eh_landing_pad_dest)
+    {
+      add_clobbers_to_eh_landing_pad (id);
+      id->eh_landing_pad_dest = NULL;
+    }
+
   if (new_entry)
     {
       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
@@ -2777,7 +3134,7 @@ copy_cfg_body (copy_body_data * id,
            && bb->index != ENTRY_BLOCK
            && bb->index != EXIT_BLOCK)
          maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
-       /* Update call edge destinations.  This can not be done before loop
+       /* Update call edge destinations.  This cannot be done before loop
           info is updated, because we may split basic blocks.  */
        if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
            && bb->index != ENTRY_BLOCK
@@ -2795,7 +3152,7 @@ copy_cfg_body (copy_body_data * id,
        maybe_move_debug_stmts_to_successors (id,
                                              BASIC_BLOCK_FOR_FN (cfun, last));
       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
-      /* Update call edge destinations.  This can not be done before loop
+      /* Update call edge destinations.  This cannot be done before loop
         info is updated, because we may split basic blocks.  */
       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
        redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
@@ -2829,14 +3186,21 @@ copy_debug_stmt (gdebug *stmt, copy_body_data *id)
   tree t, *n;
   struct walk_stmt_info wi;
 
-  if (gimple_block (stmt))
+  if (tree block = gimple_block (stmt))
     {
-      n = id->decl_map->get (gimple_block (stmt));
+      n = id->decl_map->get (block);
       gimple_set_block (stmt, n ? *n : id->block);
     }
 
   if (gimple_debug_nonbind_marker_p (stmt))
-    return;
+    {
+      if (id->call_stmt && !gimple_block (stmt))
+       {
+         gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+         gsi_remove (&gsi, true);
+       }
+      return;
+    }
 
   /* Remap all the operands in COPY.  */
   memset (&wi, 0, sizeof (wi));
@@ -2851,7 +3215,8 @@ copy_debug_stmt (gdebug *stmt, copy_body_data *id)
   else
     gcc_unreachable ();
 
-  if (TREE_CODE (t) == PARM_DECL && id->debug_map
+  if (TREE_CODE (t) == PARM_DECL
+      && id->debug_map
       && (n = id->debug_map->get (t)))
     {
       gcc_assert (VAR_P (*n));
@@ -2919,13 +3284,10 @@ copy_debug_stmt (gdebug *stmt, copy_body_data *id)
 static void
 copy_debug_stmts (copy_body_data *id)
 {
-  size_t i;
-  gdebug *stmt;
-
   if (!id->debug_stmts.exists ())
     return;
 
-  FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
+  for (gdebug *stmt : id->debug_stmts)
     copy_debug_stmt (stmt, id);
 
   id->debug_stmts.release ();
@@ -2961,6 +3323,8 @@ copy_body (copy_body_data *id,
   body = copy_cfg_body (id, entry_block_map, exit_block_map,
                        new_entry);
   copy_debug_stmts (id);
+  delete id->killed_new_ssa_names;
+  id->killed_new_ssa_names = NULL;
 
   return body;
 }
@@ -3011,7 +3375,10 @@ insert_init_debug_bind (copy_body_data *id,
        base_stmt = gsi_stmt (gsi);
     }
 
-  note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
+  note = gimple_build_debug_bind (tracked_var,
+                                 value == error_mark_node
+                                 ? NULL_TREE : unshare_expr (value),
+                                 base_stmt);
 
   if (bb)
     {
@@ -3043,7 +3410,7 @@ insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
          && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
        {
          tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
-                            gimple_expr_type (init_stmt),
+                            TREE_TYPE (gimple_assign_lhs (init_stmt)),
                             gimple_assign_rhs1 (init_stmt));
          rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
                                          GSI_NEW_STMT);
@@ -3051,16 +3418,41 @@ insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
          gimple_assign_set_rhs1 (init_stmt, rhs);
        }
       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
-      gimple_regimplify_operands (init_stmt, &si);
-
       if (!is_gimple_debug (init_stmt))
        {
+         gimple_regimplify_operands (init_stmt, &si);
+
          tree def = gimple_assign_lhs (init_stmt);
          insert_init_debug_bind (id, bb, def, def, init_stmt);
        }
     }
 }
 
+/* Deal with mismatched formal/actual parameters, in a rather brute-force way
+   if need be (which should only be necessary for invalid programs).  Attempt
+   to convert VAL to TYPE and return the result if it is possible, just return
+   a zero constant of the given type if it fails.  */
+
+tree
+force_value_to_type (tree type, tree value)
+{
+  /* If we can match up types by promotion/demotion do so.  */
+  if (fold_convertible_p (type, value))
+    return fold_convert (type, value);
+
+  /* ???  For valid programs we should not end up here.
+     Still if we end up with truly mismatched types here, fall back
+     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
+     GIMPLE to the following passes.  */
+  if (TREE_CODE (value) == WITH_SIZE_EXPR)
+    return error_mark_node;
+  else if (!is_gimple_reg_type (TREE_TYPE (value))
+          || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
+    return fold_build1 (VIEW_CONVERT_EXPR, type, value);
+  else
+    return build_zero_cst (type);
+}
+
 /* Initialize parameter P with VALUE.  If needed, produce init statement
    at the end of BB.  When BB is NULL, we return init statement to be
    output later.  */
@@ -3070,31 +3462,9 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
 {
   gimple *init_stmt = NULL;
   tree var;
-  tree rhs = value;
   tree def = (gimple_in_ssa_p (cfun)
              ? ssa_default_def (id->src_cfun, p) : NULL);
 
-  if (value
-      && value != error_mark_node
-      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
-    {
-      /* If we can match up types by promotion/demotion do so.  */
-      if (fold_convertible_p (TREE_TYPE (p), value))
-       rhs = fold_convert (TREE_TYPE (p), value);
-      else
-       {
-         /* ???  For valid programs we should not end up here.
-            Still if we end up with truly mismatched types here, fall back
-            to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
-            GIMPLE to the following passes.  */
-         if (!is_gimple_reg_type (TREE_TYPE (value))
-             || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
-           rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
-         else
-           rhs = build_zero_cst (TREE_TYPE (p));
-       }
-    }
-
   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
      here since the type of this decl must be visible to the calling
      function.  */
@@ -3113,16 +3483,18 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
      value.  */
   if (TREE_READONLY (p)
       && !TREE_ADDRESSABLE (p)
-      && value && !TREE_SIDE_EFFECTS (value)
+      && value
+      && !TREE_SIDE_EFFECTS (value)
       && !def)
     {
-      /* We may produce non-gimple trees by adding NOPs or introduce
-        invalid sharing when operand is not really constant.
-        It is not big deal to prohibit constant propagation here as
-        we will constant propagate in DOM1 pass anyway.  */
-      if (is_gimple_min_invariant (value)
-         && useless_type_conversion_p (TREE_TYPE (p),
-                                                TREE_TYPE (value))
+      /* We may produce non-gimple trees by adding NOPs or introduce invalid
+        sharing when the value is not constant or DECL.  And we need to make
+        sure that it cannot be modified from another path in the callee.  */
+      if ((is_gimple_min_invariant (value)
+          || (DECL_P (value) && TREE_READONLY (value))
+          || (auto_var_in_fn_p (value, id->dst_fn)
+              && !TREE_ADDRESSABLE (value)))
+         && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
          /* We have to be very careful about ADDR_EXPR.  Make sure
             the base variable isn't a local variable of the inlined
             function, e.g., when doing recursive inlining, direct or
@@ -3131,7 +3503,9 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
          && ! self_inlining_addr_expr (value, fn))
        {
          insert_decl_map (id, p, value);
-         insert_debug_decl_map (id, p, var);
+         if (!id->debug_map)
+           id->debug_map = new hash_map<tree, tree>;
+         id->debug_map->put (p, var);
          return insert_init_debug_bind (id, bb, var, value, NULL);
        }
     }
@@ -3141,17 +3515,17 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
      automatically replaced by the VAR_DECL.  */
   insert_decl_map (id, p, var);
 
-  /* Even if P was TREE_READONLY, the new VAR should not be.
-     In the original code, we would have constructed a
-     temporary, and then the function body would have never
-     changed the value of P.  However, now, we will be
-     constructing VAR directly.  The constructor body may
-     change its value multiple times as it is being
-     constructed.  Therefore, it must not be TREE_READONLY;
-     the back-end assumes that TREE_READONLY variable is
-     assigned to only once.  */
-  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
-    TREE_READONLY (var) = 0;
+  /* Even if P was TREE_READONLY, the new VAR should not be.  In the original
+     code, we would have constructed a temporary, and then the function body
+     would have never changed the value of P.  However, now, we will be
+     constructing VAR directly.  Therefore, it must not be TREE_READONLY.  */
+  TREE_READONLY (var) = 0;
+
+  tree rhs = value;
+  if (value
+      && value != error_mark_node
+      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
+    rhs = force_value_to_type (TREE_TYPE (p), value);
 
   /* If there is no setup required and we are in SSA, take the easy route
      replacing all SSA names representing the function parameter by the
@@ -3314,7 +3688,9 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
      vs. the call expression.  */
   if (modify_dest)
     caller_type = TREE_TYPE (modify_dest);
-  else
+  else if (return_slot)
+    caller_type = TREE_TYPE (return_slot);
+  else /* No LHS on the call.  */
     caller_type = TREE_TYPE (TREE_TYPE (callee));
 
   /* We don't need to do anything for functions that don't return anything.  */
@@ -3341,6 +3717,7 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
             taken by alias analysis.  */
          gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
          var = return_slot_addr;
+         mark_addressable (return_slot);
        }
       else
        {
@@ -3349,11 +3726,13 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
          if (TREE_ADDRESSABLE (result))
            mark_addressable (var);
        }
-      if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
-           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
-         && !DECL_GIMPLE_REG_P (result)
+      if (DECL_NOT_GIMPLE_REG_P (result)
          && DECL_P (var))
-       DECL_GIMPLE_REG_P (var) = 0;
+       DECL_NOT_GIMPLE_REG_P (var) = 1;
+
+      if (!useless_type_conversion_p (callee_type, caller_type))
+       var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
+
       use = NULL;
       goto done;
     }
@@ -3374,7 +3753,7 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
       /* ??? If we're assigning to a variable sized type, then we must
         reuse the destination variable, because we've no good way to
         create variable sized temporaries at this point.  */
-      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
+      else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
        use_it = true;
 
       /* If the callee cannot possibly modify MODIFY_DEST, then we can
@@ -3392,10 +3771,8 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
            use_it = false;
          else if (is_global_var (base_m))
            use_it = false;
-         else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
-                   || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
-                  && !DECL_GIMPLE_REG_P (result)
-                  && DECL_GIMPLE_REG_P (base_m))
+         else if (DECL_NOT_GIMPLE_REG_P (result)
+                  && !DECL_NOT_GIMPLE_REG_P (base_m))
            use_it = false;
          else if (!TREE_ADDRESSABLE (base_m))
            use_it = true;
@@ -3409,14 +3786,14 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
        }
     }
 
-  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
+  gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
 
   var = copy_result_decl_to_var (result, id);
   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
 
   /* Do not have the rest of GCC warn about this variable as it should
      not be visible to the user.  */
-  TREE_NO_WARNING (var) = 1;
+  suppress_warning (var /* OPT_Wuninitialized? */);
 
   declare_inline_vars (id->block, var);
 
@@ -3435,11 +3812,8 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
             to using a MEM_REF to not leak invalid GIMPLE to the following
             passes.  */
          /* Prevent var from being written into SSA form.  */
-         if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
-             || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
-           DECL_GIMPLE_REG_P (var) = false;
-         else if (is_gimple_reg_type (TREE_TYPE (var)))
-           TREE_ADDRESSABLE (var) = true;
+         if (is_gimple_reg_type (TREE_TYPE (var)))
+           DECL_NOT_GIMPLE_REG_P (var) = true;
          use = fold_build2 (MEM_REF, caller_type,
                             build_fold_addr_expr (var),
                             build_int_cst (ptr_type_node, 0));
@@ -3526,7 +3900,7 @@ copy_forbidden (struct function *fun)
 static const char *inline_forbidden_reason;
 
 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
-   iff a function can not be inlined.  Also sets the reason why. */
+   iff a function cannot be inlined.  Also sets the reason why. */
 
 static tree
 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
@@ -3612,7 +3986,7 @@ inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
               return from the function the inline has been inlined into.  */
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because "
-                  "it uses __builtin_return or __builtin_apply_args");
+                  "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
            *handled_ops_p = true;
            return t;
 
@@ -3669,6 +4043,19 @@ inline_forbidden_p (tree fndecl)
   wi.info = (void *) fndecl;
   wi.pset = &visited_nodes;
 
+  /* We cannot inline a function with a variable-sized parameter because we
+     cannot materialize a temporary of such a type in the caller if need be.
+     Note that the return case is not symmetrical because we can guarantee
+     that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT.  */
+  for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
+    if (!poly_int_tree_p (DECL_SIZE (parm)))
+      {
+       inline_forbidden_reason
+         = G_("function %q+F can never be inlined because "
+              "it has a VLA argument");
+       return true;
+      }
+
   FOR_EACH_BB_FN (bb, fun)
     {
       gimple *ret;
@@ -3693,7 +4080,7 @@ function_attribute_inlinable_p (const_tree fndecl)
 
       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
        {
-         const_tree name = TREE_PURPOSE (a);
+         const_tree name = get_attribute_name (a);
          int i;
 
          for (i = 0; targetm.attribute_table[i].name != NULL; i++)
@@ -3721,7 +4108,7 @@ tree_inlinable_function_p (tree fn)
     return false;
 
   /* We only warn for functions declared `inline' by the user.  */
-  do_warning = (warn_inline
+  do_warning = (opt_for_fn (fn, warn_inline)
                && DECL_DECLARED_INLINE_P (fn)
                && !DECL_NO_INLINE_WARNING_P (fn)
                && !DECL_IN_SYSTEM_HEADER (fn));
@@ -3732,8 +4119,8 @@ tree_inlinable_function_p (tree fn)
       && always_inline == NULL)
     {
       if (do_warning)
-        warning (OPT_Winline, "function %q+F can never be inlined because it "
-                 "is suppressed using -fno-inline", fn);
+       warning (OPT_Winline, "function %q+F can never be inlined because it "
+                "is suppressed using %<-fno-inline%>", fn);
       inlinable = false;
     }
 
@@ -3882,6 +4269,8 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
 
     case REALIGN_LOAD_EXPR:
 
+    case WIDEN_PLUS_EXPR:
+    case WIDEN_MINUS_EXPR:
     case WIDEN_SUM_EXPR:
     case WIDEN_MULT_EXPR:
     case DOT_PROD_EXPR:
@@ -3890,6 +4279,10 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
     case WIDEN_MULT_MINUS_EXPR:
     case WIDEN_LSHIFT_EXPR:
 
+    case VEC_WIDEN_PLUS_HI_EXPR:
+    case VEC_WIDEN_PLUS_LO_EXPR:
+    case VEC_WIDEN_MINUS_HI_EXPR:
+    case VEC_WIDEN_MINUS_LO_EXPR:
     case VEC_WIDEN_MULT_HI_EXPR:
     case VEC_WIDEN_MULT_LO_EXPR:
     case VEC_WIDEN_MULT_EVEN_EXPR:
@@ -4043,8 +4436,8 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
            /* Do not special case builtins where we see the body.
               This just confuse inliner.  */
            struct cgraph_node *node;
-           if (!(node = cgraph_node::get (decl))
-               || node->definition)
+           if ((node = cgraph_node::get (decl))
+               && node->definition)
              ;
            /* For buitins that are likely expanded to nothing or
               inlined do not account operand costs.  */
@@ -4108,6 +4501,9 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
           with very long asm statements.  */
        if (count > 1000)
          count = 1000;
+       /* If this asm is asm inline, count anything as minimum size.  */
+       if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
+         count = MIN (1, count);
        return MAX (1, count);
       }
 
@@ -4160,8 +4556,11 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
     case GIMPLE_OMP_TASK:
     case GIMPLE_OMP_CRITICAL:
     case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_MASKED:
+    case GIMPLE_OMP_SCOPE:
     case GIMPLE_OMP_TASKGROUP:
     case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SCAN:
     case GIMPLE_OMP_SECTION:
     case GIMPLE_OMP_SECTIONS:
     case GIMPLE_OMP_SINGLE:
@@ -4334,7 +4733,8 @@ reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
 
 static bool
-expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
+expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
+                   bitmap to_purge)
 {
   tree use_retvar;
   tree fn;
@@ -4404,7 +4804,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
             be to be able to keep both bodies and use extern inline body
             for inlining, but we can't do that because frontends overwrite
             the body.  */
-         && !cg_edge->callee->local.redefined_extern_inline
+         && !cg_edge->callee->redefined_extern_inline
          /* During early inline pass, report only when optimization is
             not turned on.  */
          && (symtab->global_info_ready
@@ -4413,7 +4813,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
          /* PR 20090218-1_0.c. Body can be provided by another module. */
          && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
        {
-         error ("inlining failed in call to always_inline %q+F: %s", fn,
+         error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
                 cgraph_inline_failed_string (reason));
          if (gimple_location (stmt) != UNKNOWN_LOCATION)
            inform (gimple_location (stmt), "called from here");
@@ -4421,7 +4821,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
            inform (DECL_SOURCE_LOCATION (cfun->decl),
                    "called from this function");
        }
-      else if (warn_inline
+      else if (opt_for_fn (fn, warn_inline)
               && DECL_DECLARED_INLINE_P (fn)
               && !DECL_NO_INLINE_WARNING_P (fn)
               && !DECL_IN_SYSTEM_HEADER (fn)
@@ -4449,39 +4849,42 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
 
   /* If callee is thunk, all we need is to adjust the THIS pointer
      and redirect to function being thunked.  */
-  if (id->src_node->thunk.thunk_p)
+  if (id->src_node->thunk)
     {
       cgraph_edge *edge;
       tree virtual_offset = NULL;
       profile_count count = cg_edge->count;
       tree op;
       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
+      thunk_info *info = thunk_info::get (id->src_node);
 
-      cg_edge->remove ();
+      cgraph_edge::remove (cg_edge);
       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
                                           gimple_uid (stmt),
                                           profile_count::one (),
                                           profile_count::one (),
                                           true);
       edge->count = count;
-      if (id->src_node->thunk.virtual_offset_p)
-        virtual_offset = size_int (id->src_node->thunk.virtual_value);
+      if (info->virtual_offset_p)
+       virtual_offset = size_int (info->virtual_value);
       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
                              NULL);
       gsi_insert_before (&iter, gimple_build_assign (op,
                                                    gimple_call_arg (stmt, 0)),
                         GSI_NEW_STMT);
-      gcc_assert (id->src_node->thunk.this_adjusting);
-      op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
-                        virtual_offset);
+      gcc_assert (info->this_adjusting);
+      op = thunk_adjust (&iter, op, 1, info->fixed_offset,
+                        virtual_offset, info->indirect_offset);
 
       gimple_call_set_arg (stmt, 0, op);
       gimple_call_set_fndecl (stmt, edge->callee->decl);
       update_stmt (stmt);
       id->src_node->remove ();
-      expand_call_inline (bb, stmt, id);
+      successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
       maybe_remove_unused_call_args (cfun, stmt);
-      return true;
+      /* This used to return true even though we do fail to inline in
+        some cases.  See PR98525.  */
+      goto egress;
     }
   fn = cg_edge->callee->decl;
   cg_edge->callee->get_untransformed_body ();
@@ -4491,7 +4894,6 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
 
   /* We will be inlining this callee.  */
   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
-  id->assign_stmts.create (0);
 
   /* Update the callers EH personality.  */
   if (DECL_FUNCTION_PERSONALITY (fn))
@@ -4525,7 +4927,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
      artificial decls inserted by the compiler itself.  We need to
      either link the inlined blocks into the caller block tree or
      not refer to them in any way to not break GC for locations.  */
-  if (gimple_block (stmt))
+  if (tree block = gimple_block (stmt))
     {
       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
          to make inlined_function_outer_scope_p return true on this BLOCK.  */
@@ -4535,9 +4937,9 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
       if (loc == UNKNOWN_LOCATION)
        loc = BUILTINS_LOCATION;
       id->block = make_node (BLOCK);
-      BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
+      BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
       BLOCK_SOURCE_LOCATION (id->block) = loc;
-      prepend_lexical_block (gimple_block (stmt), id->block);
+      prepend_lexical_block (block, id->block);
     }
 
   /* Local declarations will be replaced by their equivalents in this map.  */
@@ -4545,6 +4947,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
   id->decl_map = new hash_map<tree, tree>;
   dst = id->debug_map;
   id->debug_map = NULL;
+  if (flag_stack_reuse != SR_NONE)
+    id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
 
   /* Record the function we are about to inline.  */
   id->src_fn = fn;
@@ -4575,6 +4979,9 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
   src_properties = id->src_cfun->curr_properties & prop_mask;
   if (src_properties != prop_mask)
     dst_cfun->curr_properties &= src_properties | ~prop_mask;
+  dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
+  id->dst_node->calls_declare_variant_alt
+    |= id->src_node->calls_declare_variant_alt;
 
   gcc_assert (!id->src_cfun->after_inlining);
 
@@ -4592,7 +4999,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     {
       gimple_stmt_iterator si = gsi_last_bb (bb);
       gsi_insert_after (&si, gimple_build_debug_inline_entry
-                       (id->block, input_location), GSI_NEW_STMT);
+                       (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
+                       GSI_NEW_STMT);
     }
 
   if (DECL_INITIAL (fn))
@@ -4644,7 +5052,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
         initialized.  We do not want to issue a warning about that
         uninitialized variable.  */
       if (DECL_P (modify_dest))
-       TREE_NO_WARNING (modify_dest) = 1;
+       suppress_warning (modify_dest, OPT_Wuninitialized);
 
       if (gimple_call_return_slot_opt_p (call_stmt))
        {
@@ -4660,7 +5068,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
      we may get confused if the compiler sees that the inlined new
      function returns a pointer which was just deleted.  See bug
      33407.  */
-  if (DECL_IS_OPERATOR_NEW (fn))
+  if (DECL_IS_OPERATOR_NEW_P (fn))
     {
       return_slot = NULL;
       modify_dest = NULL;
@@ -4672,14 +5080,20 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
   /* Add local vars in this inlined callee to caller.  */
   add_local_variables (id->src_cfun, cfun, id);
 
-  if (dump_file && (dump_flags & TDF_DETAILS))
+  if (dump_enabled_p ())
     {
-      fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
-              id->src_node->dump_name (),
-              id->dst_node->dump_name (),
-              cg_edge->sreal_frequency ().to_double ());
-      id->src_node->dump (dump_file);
-      id->dst_node->dump (dump_file);
+      char buf[128];
+      snprintf (buf, sizeof(buf), "%4.2f",
+               cg_edge->sreal_frequency ().to_double ());
+      dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
+                      call_stmt,
+                      "Inlining %C to %C with frequency %s\n",
+                      id->src_node, id->dst_node, buf);
+      if (dump_file && (dump_flags & TDF_DETAILS))
+       {
+         id->src_node->dump (dump_file);
+         id->dst_node->dump (dump_file);
+       }
     }
 
   /* This is it.  Duplicate the callee body.  Assume callee is
@@ -4695,12 +5109,16 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
       if (!TREE_THIS_VOLATILE (p))
        {
+         /* The value associated with P is a local temporary only if
+            there is no value associated with P in the debug map.  */
          tree *varp = id->decl_map->get (p);
-         if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
+         if (varp
+             && VAR_P (*varp)
+             && !is_gimple_reg (*varp)
+             && !(id->debug_map && id->debug_map->get (p)))
            {
-             tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
+             tree clobber = build_clobber (TREE_TYPE (*varp));
              gimple *clobber_stmt;
-             TREE_THIS_VOLATILE (clobber) = 1;
              clobber_stmt = gimple_build_assign (*varp, clobber);
              gimple_set_location (clobber_stmt, gimple_location (stmt));
              gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
@@ -4768,9 +5186,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
          && !is_gimple_reg (id->retvar)
          && !stmt_ends_bb_p (stmt))
        {
-         tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
+         tree clobber = build_clobber (TREE_TYPE (id->retvar));
          gimple *clobber_stmt;
-         TREE_THIS_VOLATILE (clobber) = 1;
          clobber_stmt = gimple_build_assign (id->retvar, clobber);
          gimple_set_location (clobber_stmt, gimple_location (old_stmt));
          gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
@@ -4816,9 +5233,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
               && !TREE_THIS_VOLATILE (id->retvar)
               && !is_gimple_reg (id->retvar))
        {
-         tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
+         tree clobber = build_clobber (TREE_TYPE (id->retvar));
          gimple *clobber_stmt;
-         TREE_THIS_VOLATILE (clobber) = 1;
          clobber_stmt = gimple_build_assign (id->retvar, clobber);
          gimple_set_location (clobber_stmt, gimple_location (stmt));
          gsi_replace (&stmt_gsi, clobber_stmt, false);
@@ -4829,10 +5245,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     }
 
   if (purge_dead_abnormal_edges)
-    {
-      gimple_purge_dead_eh_edges (return_block);
-      gimple_purge_dead_abnormal_call_edges (return_block);
-    }
+    bitmap_set_bit (to_purge, return_block->index);
 
   /* If the value of the new expression is ignored, that's OK.  We
      don't warn about this for CALL_EXPRs, so we shouldn't warn about
@@ -4844,7 +5257,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
     }
 
-  id->assign_stmts.release ();
+  id->add_clobbers_to_eh_landing_pads = 0;
 
   /* Output the inlining info for this abstract function, since it has been
      inlined.  If we don't do this now, we can lose the information about the
@@ -4870,7 +5283,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
    in a MODIFY_EXPR.  */
 
 static bool
-gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
+gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
+                           bitmap to_purge)
 {
   gimple_stmt_iterator gsi;
   bool inlined = false;
@@ -4882,7 +5296,7 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
 
       if (is_gimple_call (stmt)
          && !gimple_call_internal_p (stmt))
-       inlined |= expand_call_inline (bb, stmt, id);
+       inlined |= expand_call_inline (bb, stmt, id, to_purge);
     }
 
   return inlined;
@@ -4895,87 +5309,119 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
 static void
 fold_marked_statements (int first, hash_set<gimple *> *statements)
 {
-  for (; first < n_basic_blocks_for_fn (cfun); first++)
-    if (BASIC_BLOCK_FOR_FN (cfun, first))
-      {
-        gimple_stmt_iterator gsi;
+  auto_bitmap to_purge;
 
-       for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
-            !gsi_end_p (gsi);
-            gsi_next (&gsi))
-         if (statements->contains (gsi_stmt (gsi)))
-           {
-             gimple *old_stmt = gsi_stmt (gsi);
-             tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
+  auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
+  auto_sbitmap visited (last_basic_block_for_fn (cfun));
+  bitmap_clear (visited);
 
-             if (old_decl && fndecl_built_in_p (old_decl))
-               {
-                 /* Folding builtins can create multiple instructions,
-                    we need to look at all of them.  */
-                 gimple_stmt_iterator i2 = gsi;
-                 gsi_prev (&i2);
-                 if (fold_stmt (&gsi))
-                   {
-                     gimple *new_stmt;
-                     /* If a builtin at the end of a bb folded into nothing,
-                        the following loop won't work.  */
-                     if (gsi_end_p (gsi))
-                       {
-                         cgraph_update_edges_for_call_stmt (old_stmt,
-                                                            old_decl, NULL);
-                         break;
-                       }
-                     if (gsi_end_p (i2))
-                       i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
-                     else
+  stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
+  while (!stack.is_empty ())
+    {
+      /* Look at the edge on the top of the stack.  */
+      edge e = stack.pop ();
+      basic_block dest = e->dest;
+
+      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
+         || bitmap_bit_p (visited, dest->index))
+       continue;
+
+      bitmap_set_bit (visited, dest->index);
+
+      if (dest->index >= first)
+       for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
+            !gsi_end_p (gsi); gsi_next (&gsi))
+         {
+           if (!statements->contains (gsi_stmt (gsi)))
+             continue;
+
+           gimple *old_stmt = gsi_stmt (gsi);
+           tree old_decl = (is_gimple_call (old_stmt)
+                            ? gimple_call_fndecl (old_stmt) : 0);
+           if (old_decl && fndecl_built_in_p (old_decl))
+             {
+               /* Folding builtins can create multiple instructions,
+                  we need to look at all of them.  */
+               gimple_stmt_iterator i2 = gsi;
+               gsi_prev (&i2);
+               if (fold_stmt (&gsi))
+                 {
+                   gimple *new_stmt;
+                   /* If a builtin at the end of a bb folded into nothing,
+                      the following loop won't work.  */
+                   if (gsi_end_p (gsi))
+                     {
+                       cgraph_update_edges_for_call_stmt (old_stmt,
+                                                          old_decl, NULL);
+                       break;
+                     }
+                   if (gsi_end_p (i2))
+                     i2 = gsi_start_bb (dest);
+                   else
+                     gsi_next (&i2);
+                   while (1)
+                     {
+                       new_stmt = gsi_stmt (i2);
+                       update_stmt (new_stmt);
+                       cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
+                                                          new_stmt);
+
+                       if (new_stmt == gsi_stmt (gsi))
+                         {
+                           /* It is okay to check only for the very last
+                              of these statements.  If it is a throwing
+                              statement nothing will change.  If it isn't
+                              this can remove EH edges.  If that weren't
+                              correct then because some intermediate stmts
+                              throw, but not the last one.  That would mean
+                              we'd have to split the block, which we can't
+                              here and we'd loose anyway.  And as builtins
+                              probably never throw, this all
+                              is mood anyway.  */
+                           if (maybe_clean_or_replace_eh_stmt (old_stmt,
+                                                               new_stmt))
+                             bitmap_set_bit (to_purge, dest->index);
+                           break;
+                         }
                        gsi_next (&i2);
-                     while (1)
-                       {
-                         new_stmt = gsi_stmt (i2);
-                         update_stmt (new_stmt);
-                         cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
-                                                            new_stmt);
+                     }
+                 }
+             }
+           else if (fold_stmt (&gsi))
+             {
+               /* Re-read the statement from GSI as fold_stmt() may
+                  have changed it.  */
+               gimple *new_stmt = gsi_stmt (gsi);
+               update_stmt (new_stmt);
+
+               if (is_gimple_call (old_stmt)
+                   || is_gimple_call (new_stmt))
+                 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
+                                                    new_stmt);
+
+               if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
+                 bitmap_set_bit (to_purge, dest->index);
+             }
+         }
 
-                         if (new_stmt == gsi_stmt (gsi))
-                           {
-                             /* It is okay to check only for the very last
-                                of these statements.  If it is a throwing
-                                statement nothing will change.  If it isn't
-                                this can remove EH edges.  If that weren't
-                                correct then because some intermediate stmts
-                                throw, but not the last one.  That would mean
-                                we'd have to split the block, which we can't
-                                here and we'd loose anyway.  And as builtins
-                                probably never throw, this all
-                                is mood anyway.  */
-                             if (maybe_clean_or_replace_eh_stmt (old_stmt,
-                                                                 new_stmt))
-                               gimple_purge_dead_eh_edges (
-                                 BASIC_BLOCK_FOR_FN (cfun, first));
-                             break;
-                           }
-                         gsi_next (&i2);
-                       }
-                   }
-               }
-             else if (fold_stmt (&gsi))
-               {
-                 /* Re-read the statement from GSI as fold_stmt() may
-                    have changed it.  */
-                 gimple *new_stmt = gsi_stmt (gsi);
-                 update_stmt (new_stmt);
-
-                 if (is_gimple_call (old_stmt)
-                     || is_gimple_call (new_stmt))
-                   cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
-                                                      new_stmt);
-
-                 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
-                   gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
-                                                                   first));
-               }
+      if (EDGE_COUNT (dest->succs) > 0)
+       {
+         /* Avoid warnings emitted from folding statements that
+            became unreachable because of inlined function parameter
+            propagation.  */
+         e = find_taken_edge (dest, NULL_TREE);
+         if (e)
+           stack.quick_push (e);
+         else
+           {
+             edge_iterator ei;
+             FOR_EACH_EDGE (e, ei, dest->succs)
+               stack.safe_push (e);
            }
-      }
+       }
+    }
+
+  gimple_purge_all_dead_eh_edges (to_purge);
 }
 
 /* Expand calls to inline functions in the body of FN.  */
@@ -5021,8 +5467,9 @@ optimize_inline_calls (tree fn)
      will split id->current_basic_block, and the new blocks will
      follow it; we'll trudge through them, processing their CALL_EXPRs
      along the way.  */
+  auto_bitmap to_purge;
   FOR_EACH_BB_FN (bb, cfun)
-    inlined_p |= gimple_expand_calls_inline (bb, &id);
+    inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
 
   pop_gimplify_context (NULL);
 
@@ -5037,21 +5484,41 @@ optimize_inline_calls (tree fn)
        gcc_assert (e->inline_failed);
     }
 
+  /* If we didn't inline into the function there is nothing to do.  */
+  if (!inlined_p)
+    {
+      delete id.statements_to_fold;
+      return 0;
+    }
+
   /* Fold queued statements.  */
   update_max_bb_count ();
   fold_marked_statements (last, id.statements_to_fold);
   delete id.statements_to_fold;
 
-  gcc_assert (!id.debug_stmts.exists ());
+  /* Finally purge EH and abnormal edges from the call stmts we inlined.
+     We need to do this after fold_marked_statements since that may walk
+     the SSA use-def chain.  */
+  unsigned i;
+  bitmap_iterator bi;
+  EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
+    {
+      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
+      if (bb)
+       {
+         gimple_purge_dead_eh_edges (bb);
+         gimple_purge_dead_abnormal_call_edges (bb);
+       }
+    }
 
-  /* If we didn't inline into the function there is nothing to do.  */
-  if (!inlined_p)
-    return 0;
+  gcc_assert (!id.debug_stmts.exists ());
 
   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
   number_blocks (fn);
 
-  delete_unreachable_blocks_update_callgraph (&id);
+  delete_unreachable_blocks_update_callgraph (id.dst_node, false);
+  id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
+
   if (flag_checking)
     id.dst_node->verify ();
 
@@ -5452,6 +5919,10 @@ copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
     SET_DECL_RTL (copy, 0);
+  /* For vector typed decls make sure to update DECL_MODE according
+     to the new function context.  */
+  if (VECTOR_TYPE_P (TREE_TYPE (copy)))
+    SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
 
   /* These args would always appear unused, if not for this.  */
   TREE_USED (copy) = 1;
@@ -5486,7 +5957,11 @@ copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
   return copy;
 }
 
-static tree
+/* Create a new VAR_DECL that is indentical in all respect to DECL except that
+   DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
+   DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
+
+tree
 copy_decl_to_var (tree decl, copy_body_data *id)
 {
   tree copy, type;
@@ -5503,7 +5978,8 @@ copy_decl_to_var (tree decl, copy_body_data *id)
   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
   TREE_READONLY (copy) = TREE_READONLY (decl);
   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
-  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+  DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
+  DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
 
   return copy_decl_for_dup_finish (id, decl, copy);
 }
@@ -5532,7 +6008,12 @@ copy_result_decl_to_var (tree decl, copy_body_data *id)
   if (!DECL_BY_REFERENCE (decl))
     {
       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
-      DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+      DECL_NOT_GIMPLE_REG_P (copy)
+       = (DECL_NOT_GIMPLE_REG_P (decl)
+          /* RESULT_DECLs are treated special by needs_to_live_in_memory,
+             mirror that to the created VAR_DECL.  */
+          || (TREE_CODE (decl) == RESULT_DECL
+              && aggregate_value_p (decl, id->src_fn)));
     }
 
   return copy_decl_for_dup_finish (id, decl, copy);
@@ -5569,38 +6050,24 @@ copy_decl_maybe_to_var (tree decl, copy_body_data *id)
     return copy_decl_no_change (decl, id);
 }
 
-/* Return a copy of the function's argument tree.  */
+/* Return a copy of the function's argument tree without any modifications.  */
+
 static tree
-copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
-                              bitmap args_to_skip, tree *vars)
+copy_arguments_nochange (tree orig_parm, copy_body_data * id)
 {
   tree arg, *parg;
   tree new_parm = NULL;
-  int i = 0;
 
   parg = &new_parm;
-
-  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
-    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
-      {
-        tree new_tree = remap_decl (arg, id);
-       if (TREE_CODE (new_tree) != PARM_DECL)
-         new_tree = id->copy_decl (arg, id);
-        lang_hooks.dup_lang_specific_decl (new_tree);
-        *parg = new_tree;
-       parg = &DECL_CHAIN (new_tree);
-      }
-    else if (!id->decl_map->get (arg))
-      {
-       /* Make an equivalent VAR_DECL.  If the argument was used
-          as temporary variable later in function, the uses will be
-          replaced by local variable.  */
-       tree var = copy_decl_to_var (arg, id);
-       insert_decl_map (id, arg, var);
-        /* Declare this new variable.  */
-        DECL_CHAIN (var) = *vars;
-        *vars = var;
-      }
+  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
+    {
+      tree new_tree = remap_decl (arg, id);
+      if (TREE_CODE (new_tree) != PARM_DECL)
+       new_tree = id->copy_decl (arg, id);
+      lang_hooks.dup_lang_specific_decl (new_tree);
+      *parg = new_tree;
+      parg = &DECL_CHAIN (new_tree);
+    }
   return new_parm;
 }
 
@@ -5631,101 +6098,29 @@ tree_versionable_function_p (tree fndecl)
          && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
 }
 
-/* Delete all unreachable basic blocks and update callgraph.
-   Doing so is somewhat nontrivial because we need to update all clones and
-   remove inline function that become unreachable.  */
-
-static bool
-delete_unreachable_blocks_update_callgraph (copy_body_data *id)
-{
-  bool changed = false;
-  basic_block b, next_bb;
-
-  find_unreachable_blocks ();
-
-  /* Delete all unreachable basic blocks.  */
-
-  for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
-       != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
-    {
-      next_bb = b->next_bb;
-
-      if (!(b->flags & BB_REACHABLE))
-       {
-          gimple_stmt_iterator bsi;
-
-          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
-           {
-             struct cgraph_edge *e;
-             struct cgraph_node *node;
-
-             id->dst_node->remove_stmt_references (gsi_stmt (bsi));
-
-             if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
-                 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
-               {
-                 if (!e->inline_failed)
-                   e->callee->remove_symbol_and_inline_clones (id->dst_node);
-                 else
-                   e->remove ();
-               }
-             if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
-                 && id->dst_node->clones)
-               for (node = id->dst_node->clones; node != id->dst_node;)
-                 {
-                   node->remove_stmt_references (gsi_stmt (bsi));
-                   if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
-                       && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
-                     {
-                       if (!e->inline_failed)
-                         e->callee->remove_symbol_and_inline_clones (id->dst_node);
-                       else
-                         e->remove ();
-                     }
-
-                   if (node->clones)
-                     node = node->clones;
-                   else if (node->next_sibling_clone)
-                     node = node->next_sibling_clone;
-                   else
-                     {
-                       while (node != id->dst_node && !node->next_sibling_clone)
-                         node = node->clone_of;
-                       if (node != id->dst_node)
-                         node = node->next_sibling_clone;
-                     }
-                 }
-           }
-         delete_basic_block (b);
-         changed = true;
-       }
-    }
-
-  return changed;
-}
-
 /* Update clone info after duplication.  */
 
 static void
 update_clone_info (copy_body_data * id)
 {
-  struct cgraph_node *node;
-  if (!id->dst_node->clones)
+  struct cgraph_node *this_node = id->dst_node;
+  if (!this_node->clones)
     return;
-  for (node = id->dst_node->clones; node != id->dst_node;)
+  for (cgraph_node *node = this_node->clones; node != this_node;)
     {
       /* First update replace maps to match the new body.  */
-      if (node->clone.tree_map)
-        {
+      clone_info *info = clone_info::get (node);
+      if (info && info->tree_map)
+       {
          unsigned int i;
-          for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
+         for (i = 0; i < vec_safe_length (info->tree_map); i++)
            {
              struct ipa_replace_map *replace_info;
-             replace_info = (*node->clone.tree_map)[i];
-             walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
+             replace_info = (*info->tree_map)[i];
              walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
            }
        }
+
       if (node->clones)
        node = node->clones;
       else if (node->next_sibling_clone)
@@ -5747,19 +6142,18 @@ update_clone_info (copy_body_data * id)
    tree with another tree while duplicating the function's
    body, TREE_MAP represents the mapping between these
    trees. If UPDATE_CLONES is set, the call_stmt fields
-   of edges of clones of the function will be updated.  
+   of edges of clones of the function will be updated.
 
-   If non-NULL ARGS_TO_SKIP determine function parameters to remove
-   from new version.
-   If SKIP_RETURN is true, the new version will return void.
-   If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
+   If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
+   function parameters and return value) should be modified).
+   If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
    If non_NULL NEW_ENTRY determine new entry BB of the clone.
 */
 void
 tree_function_versioning (tree old_decl, tree new_decl,
                          vec<ipa_replace_map *, va_gc> *tree_map,
-                         bool update_clones, bitmap args_to_skip,
-                         bool skip_return, bitmap blocks_to_copy,
+                         ipa_param_adjustments *param_adjustments,
+                         bool update_clones, bitmap blocks_to_copy,
                          basic_block new_entry)
 {
   struct cgraph_node *old_version_node;
@@ -5771,7 +6165,12 @@ tree_function_versioning (tree old_decl, tree new_decl,
   basic_block old_entry_block, bb;
   auto_vec<gimple *, 10> init_stmts;
   tree vars = NULL_TREE;
-  bitmap debug_args_to_skip = args_to_skip;
+
+  /* We can get called recursively from expand_call_inline via clone
+     materialization.  While expand_call_inline maintains input_location
+     we cannot tolerate it to leak into the materialized clone.  */
+  location_t saved_location = input_location;
+  input_location = UNKNOWN_LOCATION;
 
   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
              && TREE_CODE (new_decl) == FUNCTION_DECL);
@@ -5837,6 +6236,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
   initialize_cfun (new_decl, old_decl,
                   new_entry ? new_entry->count : old_entry_block->count);
+  new_version_node->calls_declare_variant_alt
+    = old_version_node->calls_declare_variant_alt;
   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
       = id.src_cfun->gimple_df->ipa_pta;
@@ -5847,96 +6248,47 @@ tree_function_versioning (tree old_decl, tree new_decl,
     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
       = copy_static_chain (p, &id);
 
+  auto_vec<int, 16> new_param_indices;
+  clone_info *info = clone_info::get (old_version_node);
+  ipa_param_adjustments *old_param_adjustments
+    = info ? info->param_adjustments : NULL;
+  if (old_param_adjustments)
+    old_param_adjustments->get_updated_indices (&new_param_indices);
+
   /* If there's a tree_map, prepare for substitution.  */
   if (tree_map)
     for (i = 0; i < tree_map->length (); i++)
       {
        gimple *init;
        replace_info = (*tree_map)[i];
-       if (replace_info->replace_p)
-         {
-           int parm_num = -1;
-           if (!replace_info->old_tree)
-             {
-               int p = replace_info->parm_num;
-               tree parm;
-               tree req_type, new_type;
-
-               for (parm = DECL_ARGUMENTS (old_decl); p;
-                    parm = DECL_CHAIN (parm))
-                 p--;
-               replace_info->old_tree = parm;
-               parm_num = replace_info->parm_num;
-               req_type = TREE_TYPE (parm);
-               new_type = TREE_TYPE (replace_info->new_tree);
-               if (!useless_type_conversion_p (req_type, new_type))
-                 {
-                   if (fold_convertible_p (req_type, replace_info->new_tree))
-                     replace_info->new_tree
-                       = fold_build1 (NOP_EXPR, req_type,
-                                      replace_info->new_tree);
-                   else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
-                     replace_info->new_tree
-                       = fold_build1 (VIEW_CONVERT_EXPR, req_type,
-                                      replace_info->new_tree);
-                   else
-                     {
-                       if (dump_file)
-                         {
-                           fprintf (dump_file, "    const ");
-                           print_generic_expr (dump_file,
-                                               replace_info->new_tree);
-                           fprintf (dump_file,
-                                    "  can't be converted to param ");
-                           print_generic_expr (dump_file, parm);
-                           fprintf (dump_file, "\n");
-                         }
-                       replace_info->old_tree = NULL;
-                     }
-                 }
-             }
-           else
-             gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
-           if (replace_info->old_tree)
-             {
-               init = setup_one_parameter (&id, replace_info->old_tree,
-                                           replace_info->new_tree, id.src_fn,
-                                           NULL,
-                                           &vars);
-               if (init)
-                 init_stmts.safe_push (init);
-               if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
-                 {
-                   if (parm_num == -1)
-                     {
-                       tree parm;
-                       int p;
-                       for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
-                            parm = DECL_CHAIN (parm), p++)
-                         if (parm == replace_info->old_tree)
-                           {
-                             parm_num = p;
-                             break;
-                           }
-                     }
-                   if (parm_num != -1)
-                     {
-                       if (debug_args_to_skip == args_to_skip)
-                         {
-                           debug_args_to_skip = BITMAP_ALLOC (NULL);
-                           bitmap_copy (debug_args_to_skip, args_to_skip);
-                         }
-                       bitmap_clear_bit (debug_args_to_skip, parm_num);
-                     }
-                 }
-             }
-         }
+
+       int p = replace_info->parm_num;
+       if (old_param_adjustments)
+         p = new_param_indices[p];
+
+       tree parm;
+       for (parm = DECL_ARGUMENTS (old_decl); p;
+            parm = DECL_CHAIN (parm))
+         p--;
+       gcc_assert (parm);
+       init = setup_one_parameter (&id, parm, replace_info->new_tree,
+                                   id.src_fn, NULL, &vars);
+       if (init)
+         init_stmts.safe_push (init);
       }
-  /* Copy the function's arguments.  */
-  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
+
+  ipa_param_body_adjustments *param_body_adjs = NULL;
+  if (param_adjustments)
+    {
+      param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
+                                                       new_decl, old_decl,
+                                                       &id, &vars, tree_map);
+      id.param_body_adjs = param_body_adjs;
+      DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
+    }
+  else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
     DECL_ARGUMENTS (new_decl)
-      = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
-                                      args_to_skip, &vars);
+      = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
 
   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
@@ -5949,12 +6301,21 @@ tree_function_versioning (tree old_decl, tree new_decl,
 
   if (DECL_RESULT (old_decl) == NULL_TREE)
     ;
-  else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
+  else if (param_adjustments && param_adjustments->m_skip_return
+          && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
     {
+      tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
+                                                  &id);
+      declare_inline_vars (NULL, resdecl_repl);
+      if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
+       resdecl_repl = build_fold_addr_expr (resdecl_repl);
+      insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
+
       DECL_RESULT (new_decl)
        = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
                      RESULT_DECL, NULL_TREE, void_type_node);
       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
+      DECL_IS_MALLOC (new_decl) = false;
       cfun->returns_struct = 0;
       cfun->returns_pcc_struct = 0;
     }
@@ -6017,7 +6378,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
   update_max_bb_count ();
   fold_marked_statements (0, id.statements_to_fold);
   delete id.statements_to_fold;
-  delete_unreachable_blocks_update_callgraph (&id);
+  delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
   if (id.dst_node->definition)
     cgraph_edge::rebuild_references ();
   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
@@ -6047,29 +6408,30 @@ tree_function_versioning (tree old_decl, tree new_decl,
        }
     }
 
-  if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
+  if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
     {
-      tree parm;
       vec<tree, va_gc> **debug_args = NULL;
       unsigned int len = 0;
-      for (parm = DECL_ARGUMENTS (old_decl), i = 0;
-          parm; parm = DECL_CHAIN (parm), i++)
-       if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
-         {
-           tree ddecl;
+      unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
 
-           if (debug_args == NULL)
-             {
-               debug_args = decl_debug_args_insert (new_decl);
-               len = vec_safe_length (*debug_args);
-             }
-           ddecl = make_node (DEBUG_EXPR_DECL);
-           DECL_ARTIFICIAL (ddecl) = 1;
-           TREE_TYPE (ddecl) = TREE_TYPE (parm);
-           SET_DECL_MODE (ddecl, DECL_MODE (parm));
-           vec_safe_push (*debug_args, DECL_ORIGIN (parm));
-           vec_safe_push (*debug_args, ddecl);
-         }
+      for (i = 0; i < reset_len; i++)
+       {
+         tree parm = param_body_adjs->m_reset_debug_decls[i];
+         gcc_assert (is_gimple_reg (parm));
+         tree ddecl;
+
+         if (debug_args == NULL)
+           {
+             debug_args = decl_debug_args_insert (new_decl);
+             len = vec_safe_length (*debug_args);
+           }
+         ddecl = make_node (DEBUG_EXPR_DECL);
+         DECL_ARTIFICIAL (ddecl) = 1;
+         TREE_TYPE (ddecl) = TREE_TYPE (parm);
+         SET_DECL_MODE (ddecl, DECL_MODE (parm));
+         vec_safe_push (*debug_args, DECL_ORIGIN (parm));
+         vec_safe_push (*debug_args, ddecl);
+       }
       if (debug_args != NULL)
        {
          /* On the callee side, add
@@ -6080,11 +6442,11 @@ tree_function_versioning (tree old_decl, tree new_decl,
             in the debug info that var (whole DECL_ORIGIN is the parm
             PARM_DECL) is optimized away, but could be looked up at the
             call site as value of D#X there.  */
-         tree var = vars, vexpr;
+         tree vexpr;
          gimple_stmt_iterator cgsi
            = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
          gimple *def_temp;
-         var = vars;
+         tree var = vars;
          i = vec_safe_length (*debug_args);
          do
            {
@@ -6095,7 +6457,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
              if (var == NULL_TREE)
                break;
              vexpr = make_node (DEBUG_EXPR_DECL);
-             parm = (**debug_args)[i];
+             tree parm = (**debug_args)[i];
              DECL_ARTIFICIAL (vexpr) = 1;
              TREE_TYPE (vexpr) = TREE_TYPE (parm);
              SET_DECL_MODE (vexpr, DECL_MODE (parm));
@@ -6107,14 +6469,13 @@ tree_function_versioning (tree old_decl, tree new_decl,
          while (i > len);
        }
     }
-
-  if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
-    BITMAP_FREE (debug_args_to_skip);
+  delete param_body_adjs;
   free_dominance_info (CDI_DOMINATORS);
   free_dominance_info (CDI_POST_DOMINATORS);
 
   gcc_assert (!id.debug_stmts.exists ());
   pop_cfun ();
+  input_location = saved_location;
   return;
 }
 
@@ -6229,6 +6590,7 @@ copy_fn (tree fn, tree& parms, tree& result)
      since front-end specific mechanisms may rely on sharing.  */
   id.regimplify = false;
   id.do_not_unshare = true;
+  id.do_not_fold = true;
 
   /* We're not inside any EH region.  */
   id.eh_lp_nr = 0;